code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import json
import logging
import re
from urllib.parse import unquote_plus
from streamlink.plugin import Plugin, PluginError
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
QUALITY_WEIGHTS = {
"src": 1080,
}
class DLive(Plugin):
_re_url = re.compile(r"""
https?://(?:www\.)?dlive\.tv/
(?:
(?:p/(?P<video>[^/]+))
|
(?P<channel>[^/]+)
)
""", re.VERBOSE)
_re_videoPlaybackUrl = re.compile(r'"playbackUrl"\s*:\s*"([^"]+\.m3u8)"')
_schema_userByDisplayName = validate.Schema({
"data": {
"userByDisplayName": {
"livestream": validate.any(None, {
"title": validate.text
}),
"username": validate.text
}
}},
validate.get("data"),
validate.get("userByDisplayName")
)
_schema_videoPlaybackUrl = validate.Schema(
validate.transform(_re_videoPlaybackUrl.search),
validate.any(None, validate.all(
validate.get(1),
validate.transform(unquote_plus),
validate.transform(lambda url: bytes(url, "utf-8").decode("unicode_escape")),
validate.url()
))
)
@classmethod
def can_handle_url(cls, url):
return cls._re_url.match(url)
@classmethod
def stream_weight(cls, key):
weight = QUALITY_WEIGHTS.get(key)
if weight:
return weight, "dlive"
return Plugin.stream_weight(key)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.author = None
self.title = None
match = self._re_url.match(self.url)
self.video = match.group("video")
self.channel = match.group("channel")
def get_author(self):
return self.author
def get_title(self):
return self.title
def _get_streams_video(self):
log.debug("Getting video HLS streams for {0}".format(self.video))
try:
hls_url = self.session.http.get(self.url, schema=self._schema_videoPlaybackUrl)
if hls_url is None:
return
except PluginError:
return
return HLSStream.parse_variant_playlist(self.session, hls_url)
def _get_streams_live(self):
log.debug("Getting live HLS streams for {0}".format(self.channel))
try:
data = json.dumps({"query": """query {{
userByDisplayName(displayname:"{displayname}") {{
livestream {{
title
}}
username
}}
}}""".format(displayname=self.channel)})
res = self.session.http.post("https://graphigo.prd.dlive.tv/", data=data)
res = self.session.http.json(res, schema=self._schema_userByDisplayName)
if res["livestream"] is None:
return
except PluginError:
return
self.author = self.channel
self.title = res["livestream"]["title"]
hls_url = "https://live.prd.dlive.tv/hls/live/{0}.m3u8".format(res["username"])
return HLSStream.parse_variant_playlist(self.session, hls_url)
def _get_streams(self):
if self.video:
return self._get_streams_video()
elif self.channel:
return self._get_streams_live()
__plugin__ = DLive
|
beardypig/streamlink
|
src/streamlink/plugins/dlive.py
|
Python
|
bsd-2-clause
| 3,485
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0005_waypoint_event_dependency'),
]
operations = [
migrations.AlterField(
model_name='events',
name='destination_point',
field=models.CharField(help_text=b'Indica el punto de destino del viaje', max_length=250, null=True, verbose_name=b'Punto de destino', blank=True),
),
migrations.AlterField(
model_name='events',
name='meeting_point',
field=models.CharField(help_text=b'Indica el punto de reunion para tu evento', max_length=250, null=True, verbose_name=b'Punto de reunion', blank=True),
),
migrations.AlterField(
model_name='my_groups',
name='color',
field=models.CharField(default=b'E38A33', max_length=25),
),
]
|
flipjack/misrutas
|
project/app/migrations/0006_auto_20151006_1038.py
|
Python
|
mit
| 973
|
"""OpenID 2.0 - Requesting Authentication
Ref: https://openid.net/specs/openid-authentication-2_0.html#requesting_authentication
"""
from urllib.parse import urlencode
from uuid import uuid4
from datetime import datetime, timezone
from requests import get
from .utils import create_return_to
class Authentication:
"""Authentication initialization
Note:
Based on OpenID specification
https://openid.net/specs/openid-authentication-2_0.html
Args:
mode
ns
identity
claimed_id
return_to
request_id
Attributes:
mode
ns
identity
claimed_id
return_to
request_id
"""
def __init__(self, mode=None, ns=None, identity=None,
claimed_id=None, return_to=None, request_id=None):
self.mode = mode or 'checkid_setup'
self.ns = ns or 'http://specs.openid.net/auth/2.0'
self.identity = identity or 'http://specs.openid.net/auth/2.0/' \
'identifier_select'
self.claimed_id = claimed_id or 'http://specs.openid.net/auth/2.0/' \
'identifier_select'
self.request_id = request_id or uuid4().hex
self.return_to = return_to or create_return_to(self.request_id)
def authenticate(self, where, request_id=None):
"""Process to authenticate a request based on few data
On this step, the most important information is the request_id.
This parameter will allow us to recover this transaction on
return url.
"""
request = get(self.destination(where), allow_redirects=False)
location = request.headers['Location']
return location
@property
def payload(self):
"""Prepare the OpenID payload to authenticate this request"""
return {
'openid.mode': self.mode,
'openid.ns': self.ns,
'openid.identity': self.identity,
'openid.claimed_id': self.claimed_id,
'openid.return_to': self.return_to,
}
def convert(self, payload):
"""Convert the OpenID payload on QueryString format"""
return urlencode(payload)
def destination(self, base):
"""Full destination URL to send the payload"""
return base + '?' + self.convert(self.payload)
@property
def evidence(self):
"""This function could be used to get an evidence about what requests
were sent.
Example:
{
'openid.claimed_id': 'http://specs.openid.net/auth/2.0/identifier_select',
'openid.identity': 'http://specs.openid.net/auth/2.0/identifier_select',
'openid.mode': 'checkid_setup',
'openid.ns': 'http://specs.openid.net/auth/2.0',
'openid.return_to': 'https://requestb.in/1e7ing31?request_id=07c52d8bb36c4412a4f7e133be9b08ee',
'request_id': '07c52d8bb36c4412a4f7e133be9b08ee',
'timestamp': datetime.datetime(2017, 8, 9, 12, 12, 36, 735736, tzinfo=datetime.timezone.utc)
}
"""
evidence = {}
evidence.update(self.payload)
evidence.update({'request_id': self.request_id})
evidence.update({'timestamp': datetime.now(timezone.utc)})
return evidence
|
mac-developer/openid-wargaming
|
openid_wargaming/authentication.py
|
Python
|
mit
| 3,304
|
def test_salt_contrib_installed(File):
grains = File('/srv/salt/_grains')
assert grains.exists
assert grains.is_symlink
contrib_repo = File('/etc/salt/contrib')
assert contrib_repo.exists
assert contrib_repo.is_directory
|
mitodl/salt-ops
|
tests/test_salt_contrib.py
|
Python
|
bsd-3-clause
| 245
|
import googlemaps
from google_accounts import accounts
app = accounts["torino"]
gmaps = googlemaps.Client(key=app["api_key"])
reverse_result = gmaps.reverse_geocode((40.6413111,-73.77813909999999))
for result in reverse_result:
print(result["types"], result["formatted_address"])
|
bmtgoncalves/TorinoCourse
|
Lecture IV/google_reverse_geocode.py
|
Python
|
mit
| 283
|
"""
Tests for `bx.align.score`.
"""
import bx.align.score
import bx.align.maf
import StringIO
import unittest
import sys
from numpy import array, cumsum, allclose
aligns = [ ( "CCACTAGTTTTTAAATAATCTACTATCAAATAAAAGATTTGTTAATAATAAATTTTAAATCATTAACACTT",
"CCATTTGGGTTCAAAAATTGATCTATCA----------TGGTGGATTATTATTTAGCCATTAAGGACAAAT",
-111 ),
( "CCACTAGTTTTTAAATAATCTAC-----AATAAAAGATTTGTTAATAAT---AAATTTTAAATCATTAA-----CACTT",
"CCATTTGGGTTCAAAAATTGATCTATCA----------TGGTGGAT---TATTATTT-----AGCCATTAAGGACAAAT",
-3626 ),
( "CCACTAGTTTTTGATTC",
"CCATTTGGGTTC-----",
-299 ),
( "CTTAGTTTTTGATCACC",
"-----CTTGGGTTTACC",
-299 ),
( "gggaattgaacaatgagaacacatggacacaggaaggggaacatcacacacc----------ggggcctgttgtggggtggggggaag",
"ggaactagaacaagggagacacatacaaacaacaacaacaacaacacagcccttcccttcaaagagcttatagtctgatggaggagag",
1690 )
]
mafs = """##maf
a score=2883.0
s hg17.chr1 6734 30 + 245522847 CTACCTCAGTGTGGAAGGTGGGCAGTTCTG
s rheMac1.SCAFFOLD71394 9319 30 - 13789 CTACCTCAGTGTGGAAGGTGGGCAGTTCTG
a score=8167.0
s hg17.chr1 41401 40 + 245522847 TGTGTGATTAATGCCTGAGACTGTGTGAAGTAAGAGATGG
s panTro1.chr1 49673 40 + 229575298 TGCGTGATTAATGCCTGAGATTGTGTGAAGTAAAAGATGG
s rheMac1.SCAFFOLD45837 26063 33 - 31516 TGTGTGATTAATGCCTGAGATTGTGTGAAGTAA-------
"""
nonsymm_scheme = bx.align.score.build_scoring_scheme ( """ A C G T
91 0 -31 -123
-114 100 -125 -31
-31 -125 100 -114
-123 -31 -114 91 """, 400, 30 )
aligns_for_nonsymm_scheme = [ ( "AAAACCCCGGGGTTTT",
"ACGTACGTACGTACGT",
-580 )
]
asymm_scheme = bx.align.score.build_scoring_scheme ( """ 01 02 A C G T
01 200 -200 -50 100 -50 100
02 -200 200 100 -50 100 -50 """,
0, 0, gap1='\x00' )
aligns_for_asymm_scheme = [ ( "\x01\x01\x01\x01\x01\x01",
"ACGT\x01\x02",
100 )
]
class BasicTests( unittest.TestCase ):
def test_scoring_text( self ):
ss = bx.align.score.hox70
for t1, t2, score in aligns:
self.assertEquals( bx.align.score.score_texts( ss, t1, t2 ), score )
def test_align( self ):
ss = bx.align.score.hox70
for block in bx.align.maf.Reader( StringIO.StringIO( mafs ) ):
self.assertEquals( bx.align.score.score_alignment( ss, block ), float( block.score ) )
def test_accumulate( self ):
ss = bx.align.score.hox70
self.assert_( allclose( bx.align.score.accumulate_scores( ss, "-----CTTT", "CTTAGTTTA" ),
cumsum( array( [ -430, -30, -30, -30, -30, -31, 91, 91, -123 ] ) ) ) )
self.assert_( allclose( bx.align.score.accumulate_scores( ss, "-----CTTT", "CTTAGTTTA", skip_ref_gaps=True ),
cumsum( array( [ -581, 91, 91, -123 ] ) ) ) )
def test_nonsymm_scoring( self ):
ss = nonsymm_scheme
for t1, t2, score in aligns_for_nonsymm_scheme:
self.assertEquals( bx.align.score.score_texts( ss, t1, t2 ), score )
def test_asymm_scoring( self ):
ss = asymm_scheme
for t1, t2, score in aligns_for_asymm_scheme:
self.assertEquals( bx.align.score.score_texts( ss, t1, t2 ), score )
test_classes = [ BasicTests ]
suite = unittest.TestSuite( [ unittest.makeSuite( c ) for c in test_classes ] )
|
dnanexus/rseqc
|
rseqc/lib/bx/align/score_tests.py
|
Python
|
gpl-3.0
| 4,010
|
# -*- coding: utf-8 -*-
#
# gmtasks documentation build configuration file, created by
# sphinx-quickstart on Thu Jan 19 17:32:25 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gmtasks'
copyright = u'2012, Chris Petersen'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'gmtasksdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gmtasks.tex', u'gmtasks Documentation',
u'Chris Petersen', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gmtasks', u'gmtasks Documentation',
[u'Chris Petersen'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'gmtasks', u'gmtasks Documentation',
u'Chris Petersen', 'gmtasks', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
'gearman': ('http://packages.python.org/gearman', None),
}
|
ex-nerd/gmtasks
|
doc/source/conf.py
|
Python
|
bsd-3-clause
| 8,027
|
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, current_user
from . import auth
from .. import db
from ..models import User
from ..email import send_email
from .forms import LoginForm, RegistrationForm, ChangePasswordForm,\
PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm
@auth.before_app_request
def before_request():
if current_user.is_authenticated \
and not current_user.confirmed \
and request.endpoint[:5] != 'auth.' \
and request.endpoint != 'static':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('main.index'))
return render_template('auth/unconfirmed.html')
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous:
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change-email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
|
Lirean/Elibrary
|
app/auth/views.py
|
Python
|
gpl-3.0
| 6,011
|
#!/usr/bin/env python
"""
tests for mozfile.load
"""
import mozhttpd
import os
import tempfile
import unittest
from mozfile import load
class TestLoad(unittest.TestCase):
"""test the load function"""
def test_http(self):
"""test with mozhttpd and a http:// URL"""
def example(request):
"""example request handler"""
body = 'example'
return (200, {'Content-type': 'text/plain',
'Content-length': len(body)
}, body)
host = '127.0.0.1'
httpd = mozhttpd.MozHttpd(host=host,
urlhandlers=[{'method': 'GET',
'path': '.*',
'function': example}])
try:
httpd.start(block=False)
content = load(httpd.get_url()).read()
self.assertEqual(content, 'example')
finally:
httpd.stop()
def test_file_path(self):
"""test loading from file path"""
try:
# create a temporary file
tmp = tempfile.NamedTemporaryFile(delete=False)
tmp.write('foo bar')
tmp.close()
# read the file
contents = file(tmp.name).read()
self.assertEqual(contents, 'foo bar')
# read the file with load and a file path
self.assertEqual(load(tmp.name).read(), contents)
# read the file with load and a file URL
self.assertEqual(load('file://%s' % tmp.name).read(), contents)
finally:
# remove the tempfile
if os.path.exists(tmp.name):
os.remove(tmp.name)
if __name__ == '__main__':
unittest.main()
|
vladikoff/fxa-mochitest
|
tests/mozbase/mozfile/tests/test_load.py
|
Python
|
mpl-2.0
| 1,783
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
"""
This module defines a set of global settings that can be used to "hack in" special features,
particularly one-off features that it's not worth redesigning the architecture for.
"""
class ImportOptions(object):
# Used in the DataSelection applet serializer for importing.
default_axis_order = None
|
nielsbuwen/ilastik
|
ilastik/utility/globals.py
|
Python
|
gpl-3.0
| 1,249
|
from DisplayTarget import DisplayTarget
from layout import Unit
from utils.DOM import DOM
from utils.datatypes import *
from utils import dialog
from utils import vfs
import utils
import gtk
try:
from utils import svg
except ImportError:
import sys
log("Could not import svg module!")
sys.exit(1)
#
# Target where sensors can draw on.
#
class TargetCanvas(DisplayTarget):
def __init__(self, name, parent):
# a mini DOM for accessing the SVG data
self.__dom = None
# the previous size of the widget; used to detect resizings
self.__old_size = (0, 0)
# the size of the image
self.__image_size = (100, 100)
DisplayTarget.__init__(self, name, parent)
self.__widget = gtk.Image()
self.__widget.show()
# the "graphics" property is not readable because otherwise it could
# be used to spy out files on the user's system after loading them into
# "uri"
self._register_property("graphics", TYPE_STRING,
self._setp_graphics, None)
self._register_property("dom", TYPE_OBJECT,
None, self._getp_dom)
self._register_property("uri", TYPE_STRING,
self._setp_uri, self._getp)
self._setp("graphics", "")
# watch the element for geometry changes
self.add_observer(self.__on_observe_size)
def get_widget(self): return self.__widget
def delete(self):
del self.__dom
del self.__widget
DisplayTarget.delete(self)
#
# Observer for size.
#
def __on_observe_size(self, src, cmd, *args):
x, y, w, h = src.get_geometry()
if (cmd == src.OBS_GEOMETRY and
(w.as_px(), h.as_px()) != self.__old_size):
utils.request_call(self.__redraw)
self.__old_size = (w.as_px(), h.as_px())
#
# Transforms the given coordinates into buffer space.
#
def __transform_coords(self, x, y):
width, height = self.get_geometry()[2:4]
tx = (width.as_px() / 2.0) * (1.0 + float(x))
ty = (height.as_px() / 2.0) * (1.0 - float(y))
return (tx, ty)
def __make_style(self, foreground, fill):
s = "stroke:" + foreground
if (fill): s+= ";fill:" + foreground
else: s+= ";fill:none"
out = "style='%s'" % s
return out
#
# Performs the given drawing operations. This is used for backwards
# compatibility. New code should directly send SVG data.
#
def __draw_svg(self, commands):
w, h = self.get_geometry()[2:4]
out = "<svg width='%d' height='%d'>" % (w.as_px(), h.as_px())
current_fg = "rgb(0, 0, 0)"
for c in commands:
if (not c.strip()): continue
parts = c.split()
cmd, args = parts[0], parts[1:]
if (cmd == "color"):
color = args[0]
gdkcolor = gtk.gdk.color_parse(color)
current_fg = "rgb(%d, %d, %d)" \
% (gdkcolor.red >> 8, gdkcolor.green >> 8, gdkcolor.blue >> 8)
elif (cmd == "line"):
x1, y1, x2, y2 = args
x1, y1 = self.__transform_coords(x1, y1)
x2, y2 = self.__transform_coords(x2, y2)
style = self.__make_style(current_fg, False)
out += "<line x1='%f' y1='%f' x2='%f' y2='%f' %s/>" \
% (x1, y1, x2, y2, style)
elif (cmd == "polygon"):
fill = int(args[-1])
style = self.__make_style(current_fg, fill)
points = [ self.__transform_coords(args[i], args[i+1])
for i in range(0,len(args)-1, 2) ]
if (points): path = "M%f %f " % (points.pop(0))
while (points):
path += "L%f %f " % (points.pop(0))
out += "<path d='%s' %s/>" % (path, style)
elif (cmd == "rectangle"):
x1, y1, x2, y2, fill = args
style = self.__make_style(current_fg, fill)
x1, y1 = self.__transform_coords(x1, y1)
x2, y2 = self.__transform_coords(x2, y2)
w = x2 - x1
h = y2 - y1
out += "<rect x='%f' y='%f' width='%f' height='%f' %s/>" \
% (x, y, w, h, style)
#end if
#end for
out += "</svg>"
self.__render(out)
#
# Renders the given SVG data.
#
def __render(self, data):
def f():
utils.request_call(self.__redraw)
if (not data): return
self.__dom = DOM(data).get_root()
if (not self.__dom): return
self.__dom.set_update_handler(f)
# check if the SVG has dynamic or static size
try:
self.__image_size = \
int(float(self.__dom["width"])), int(float(self.__dom["height"]))
except KeyError:
log("Error: width and/or height not given\n")
except UserError:
log("Error: Desklet contains errors. Please contact the author!\n No width and/or height given in the SVG root element (in a Canvas element).")
except ValueError:
try:
self.__image_size = \
Unit.Unit(string = self.__dom["width"]).as_px(), \
Unit.Unit(string = self.__dom["height"]).as_px()
except KeyError:
pass
self.__redraw()
#
# Redraws the canvas.
#
def __redraw(self):
if (not self.__dom): return
w, h = self.__widget.size_request()
imgw, imgh = self.__image_size
if (imgw == 0 or imgh == 0):
log ("Warning: The desklet is broken. Image height or width is 0",
"Please contact the author to fix the problem.")
return
# crappy SVG needs the size to be given; just set it here so that it
# dynamically takes the correct size
self.__dom["width"] = str(w or 100)
self.__dom["height"] = str(h or 100)
# so that's why the XML parser inserted an empty <g> node... :)
g = self.__dom.get_children()[0]
g["transform"] = "scale(%f, %f)" % (float(w) / imgw,
float(h) / imgh)
svg.render(self.__widget, w, h, str(self.__dom))
#
# "graphics" property.
#
def _setp_graphics(self, key, value):
# native SVG
if (value and value.lstrip()[0] == "<"):
self.__render(value)
# legacy graphics language
else:
value = value.split(",")
self.__draw_svg(value)
self._setp(key, value)
#
# Returns the DOM object of the graphics.
#
def _getp_dom(self, key): return self.__dom
#
# Loads SVG from the given URI.
#
def _setp_uri(self, key, value):
uri = self._get_display().get_full_path(value)
try:
data = vfs.read_entire_file(uri)
except:
log("Couldn't read file %s.\n" % uri)
return
self.__render(data)
self._setp(key, value)
|
RaumZeit/gdesklets-core
|
display/TargetCanvas.py
|
Python
|
gpl-2.0
| 7,277
|
# coding: utf-8
import os,logging
from flask import send_from_directory, current_app, request,redirect,url_for,abort,flash
from flask_menu.classy import register_flaskview
from flask_menu import MenuEntryMixin,current_menu
from flask_classful import FlaskView,route
from flask_security import roles_accepted,current_user,login_required
from unifispot.core.views import (
UserAPI,AdminAPI,ClientAPI,AdminManage,
ClientManage,AdminDashboard,WifisiteAPI,
WifisiteManage,SiteDashboard,LandingpageAPI,
LandingpageManage,FileAPI,LandingpagePreview,
GuestViewAPI,GuestDataManage,
AccountAPI,AccountManage,NotificationAPI,
MailOptionsAPI,MailOptionsManage,TestEmail
)
from unifispot.core.forms import UserForm,get_wifisite_form
def media(filename):
return send_from_directory(current_app.config.get('MEDIA_ROOT'), filename)
def static_from_root():
return send_from_directory(current_app.static_folder, request.path[1:])
class IndexView(FlaskView):
decorators = [login_required]
def index(self):
if current_user.type =='admin':
return redirect(url_for('AdminDashboard:index'))
elif current_user.type =='client':
return redirect(url_for('AdminDashboard:index'))
else:
current_app.logger.error("Unknown User Type!! for ID:%s"%current_user.id)
abort(400)
def configure(app):
#index view, redirect to corresponding dashboard
IndexView.register(app, route_base='/')
AdminDashboard.register(app, route_base='/a/dashboard')
register_flaskview(app, AdminDashboard)
#common user api for changing profile details
UserAPI.register(app, route_base='/user')
@app.context_processor
def inject_userform():
if current_user and current_user.is_authenticated:
userform = UserForm()
userform.populate()
return dict(userform=userform)
else:
return {}
@app.context_processor
def inject_newsiteform():
if current_user and current_user.is_authenticated:
newsiteform = get_wifisite_form(baseform=True)
newsiteform.populate()
return dict(newsiteform=newsiteform)
else:
return {}
#adminAPI
AdminAPI.register(app, route_base='/admin/')
#client API
ClientAPI.register(app, route_base='/client/')
#admin manage view
AdminManage.register(app, route_base='/a/manage/admin/')
register_flaskview(app, AdminManage)
#client manage view
ClientManage.register(app, route_base='/a/manage/client/')
register_flaskview(app, ClientManage)
#account settings API
AccountAPI.register(app, route_base='/settings/')
#account manage view
AccountManage.register(app, route_base='/a/manage/settings/')
register_flaskview(app, AccountManage)
#optionsAPI
MailOptionsAPI.register(app, route_base='/mailoptions/api')
MailOptionsManage.register(app, route_base='/a/manage/mailoptions/')
register_flaskview(app, MailOptionsManage)
#testemail API
TestEmail.register(app, route_base='/testemail/api')
#notifications API
NotificationAPI.register(app, route_base='/notifications/')
#wifisite API
WifisiteAPI.register(app, route_base='/site/')
WifisiteManage.register(app, route_base='/s/manage/site/<siteid>')
register_flaskview(app, WifisiteManage)
SiteDashboard.register(app, route_base='/s/dashboard/<siteid>')
register_flaskview(app, SiteDashboard)
#-------------------Landing page --------------------------------
LandingpageAPI.register(app, route_base='/s/manage/landingpage/<siteid>')
FileAPI.register(app, route_base='/s/upload/file/<siteid>')
LandingpageManage.register(app, route_base='/s/landing/<siteid>')
register_flaskview(app, LandingpageManage)
LandingpagePreview.register(app, route_base='/s/preview/<siteid>')
#-------------------Guest Data --------------------------------
GuestViewAPI.register(app, route_base='/s/api/guest/<siteid>')
GuestDataManage.register(app, route_base='/s/data/guest/<siteid>')
register_flaskview(app, GuestDataManage)
|
Spotipo/spotipo
|
unifispot/ext/routes.py
|
Python
|
agpl-3.0
| 4,225
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0005_auto_20170228_0037'),
]
operations = [
migrations.AlterUniqueTogether(
name='persondeduplication',
unique_together=set([('person1_id', 'person2_id')]),
),
]
|
dchaplinsky/pep.org.ua
|
pepdb/tasks/migrations/0006_auto_20170228_0051.py
|
Python
|
mit
| 403
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set expandtab:ts=4:sw=4:setfiletype python
import os
import stat
from waflib import Context
from waflib.Build import BuildContext
import atexit
def options(self):
pass
packages = [
# "Pillow==2.2.1",
# "docutils==0.11",
# "actdiag==0.5.0",
# "nwdiag==1.0.0",
# "seqdiag==0.9.0",
# "blockdiag==1.3.0",
"Pygments==2.1.3",
"six==1.4.1",
"cython==0.20.2",
# "Sphinx==1.1.3",
"Jinja2==2.7.1",
"termcolor==1.1.0",
"python-dateutil==2.1",
"pyelftools",
# "pydot",
"tmuxp",
"numpy==1.9.1",
"scipy==0.12.1",
"numexpr==2.4",
"pandas==0.14.1",
"tables==3.1.1",
"future"
]
"""
packages_for_ipython
"cov-core==1.7",
"coverage==3.7.1",
"nose==1.3.0",
"nose-cov==1.6",
"MarkupSafe==0.18",
#"basemap==1.0.7", -> World map views, external dependency, maybe?
"funcparserlib==0.3.6",
"numpydoc==0.4",
"openpyxl==1.7.0",
"pandas==0.14.1",
"pyparsing==2.0.1",
"pytz==2013.8",
"pyzmq==13.1.0",
"tornado==3.1.1",
"version==0.1.0",
"webcolors==1.4",
"xlrd==0.9.2",
#"python-qt==0.50",
"pyside==1.2.2",
#"sip",
"matplotlib==1.3.1",
"ipython==1.1.0",
"pyuv==0.10.11",
"patsy==0.3.0",
"statsmodels==0.5.0",
]
"""
ACTIVATE = """
#!/bin/bash
. ~/.bashrc
. %(venv)s/bin/activate
export LD_LIBRARY_PATH="%(libpath)s:$LD_LIBRARY_PATH"
export PYTHONPATH="%(pythonpath)s:$PYTHONPATH"
export PATH="%(path)s:$PATH"
alias deactivate=exit
"""
def configure(self):
for package in packages:
self.python_get(package)
def vexecute(self, cmd = ""):
path = []
libpath = []
pythonpath = []
for key in list(self.env.table.keys()):
if key.startswith("PATH_"):
for val in self.env[key]:
if not val in path:
path.append(val)
if key.startswith("LIBPATH_") and not key == "LIBPATH_ST":
for val in self.env[key]:
if not val in libpath:
libpath.append(val)
if key.startswith("PYTHONPATH_"):
for val in self.env[key]:
if not val in pythonpath:
pythonpath.append(val)
path.append(os.path.join(self.srcnode.abspath(), "tools"))
pythonpath.append(self.srcnode.abspath())
activate = self.bldnode.find_or_declare("activate")
activate.write(ACTIVATE % {"path" : ':'.join(path), "libpath" : ':'.join(libpath), "pythonpath" : ':'.join(pythonpath), "venv": self.env.VENV_PATH})
activate.chmod(stat.S_IXUSR | stat.S_IXGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IWUSR | stat.S_IWGRP)
def eexit():
os.execve(self.env.BASH, [self.env.BASH , '--rcfile', activate.abspath(), '-i'], os.environ)
atexit.register(eexit)
def bash(self):
vexecute(self)
class Bash(BuildContext):
cmd = 'bash'
fun = 'bash'
setattr(Context.g_module, 'bash', bash)
""" # For IPython
def console(self):
vexecute(self, "ipython qtconsole --colors=linux")
class Console(BuildContext):
cmd = 'console'
fun = 'console'
setattr(Context.g_module, 'console', console)
def xconsole(self):
vexecute(self, "ipython qtconsole --colors=linux")
class XConsole(BuildContext):
cmd = 'xconsole'
fun = 'xconsole'
setattr(Context.g_module, 'xconsole', console)
"""
|
socrocket/pysc
|
waf/shell.py
|
Python
|
lgpl-3.0
| 3,376
|
import _plotly_utils.basevalidators
class XaxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(self, plotly_name="xaxis", parent_name="waterfall", **kwargs):
super(XaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop("dflt", "x"),
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/waterfall/_xaxis.py
|
Python
|
mit
| 452
|
def statustype():
errors = ('EXPNEEDED', 'NOERROR', 'WARNING', 'EXITERROR')
return errors
def errortype():
return {} # TODO
class RadarStatus(): # TODO finish the class when we determine how to log and what information
# from the driver that we would like to pass back to the experiment / user.
# Suggested information: confirmed ctrfreq's and sampling rate's from the driver
# third-stage sampling rate (rate of result data)
"""Class to define transmit specifications of a certain frequency, beam, and pulse sequence.
errors = ('EXPNEEDED', 'NOERROR', 'WARNING', 'EXITERROR')
Probably will be phased out once administrator is working
"""
def __init__(self):
"""
A RadarStatus is only initiated on startup of radar_control so we need an experiment
at this point.
"""
self.status = 'EXPNEEDED'# needs a control program.
self.errorcode = None
self.logs_for_user = []
# def warning(self, warning_data):
# self.status = 'WARNING'
# self.logs_for_user.append(warning_data)
|
SuperDARNCanada/borealis
|
radar_status/radar_status.py
|
Python
|
gpl-3.0
| 1,108
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Removes all non-coordinate records from the file.
Keeps only MODEL, ENDMDL, END, ATOM, HETATM, CONECT.
Usage:
python pdb_keepcoord.py <pdb file>
Example:
python pdb_keepcoord.py 1CTF.pdb
This program is part of the `pdb-tools` suite of utilities and should not be
distributed isolatedly. The `pdb-tools` were created to quickly manipulate PDB
files using the terminal, and can be used sequentially, with one tool streaming
data to another. They are based on old FORTRAN77 code that was taking too much
effort to maintain and compile. RIP.
"""
import os
import sys
__author__ = "Joao Rodrigues"
__email__ = "j.p.g.l.m.rodrigues@gmail.com"
def check_input(args):
"""Checks whether to read from stdin/file and validates user input/options.
"""
# Defaults
fh = sys.stdin # file handle
if not len(args):
# Reading from pipe with default option
if sys.stdin.isatty():
sys.stderr.write(__doc__)
sys.exit(1)
elif len(args) == 1:
if not os.path.isfile(args[0]):
emsg = 'ERROR!! File not found or not readable: \'{}\'\n'
sys.stderr.write(emsg.format(args[0]))
sys.stderr.write(__doc__)
sys.exit(1)
fh = open(args[0], 'r')
else: # Whatever ...
emsg = 'ERROR!! Script takes 1 argument, not \'{}\'\n'
sys.stderr.write(emsg.format(len(args)))
sys.stderr.write(__doc__)
sys.exit(1)
return fh
def keep_coordinates(fhandle):
"""Keeps only coordinate records in the PDB file.
"""
records = ('MODEL ', 'ATOM ', 'HETATM',
'ENDMDL', 'END ',
'TER ', 'CONECT')
for line in fhandle:
if line.startswith(records):
yield line
def main():
# Check Input
pdbfh = check_input(sys.argv[1:])
# Do the job
new_pdb = keep_coordinates(pdbfh)
try:
_buffer = []
_buffer_size = 5000 # write N lines at a time
for lineno, line in enumerate(new_pdb):
if not (lineno % _buffer_size):
sys.stdout.write(''.join(_buffer))
_buffer = []
_buffer.append(line)
sys.stdout.write(''.join(_buffer))
sys.stdout.flush()
except IOError:
# This is here to catch Broken Pipes
# for example to use 'head' or 'tail' without
# the error message showing up
pass
# last line of the script
# We can close it even if it is sys.stdin
pdbfh.close()
sys.exit(0)
if __name__ == '__main__':
main()
|
JoaoRodrigues/pdb-tools
|
pdbtools/pdb_keepcoord.py
|
Python
|
apache-2.0
| 3,212
|
from scipy.stats import betabinom, hypergeom, bernoulli, boltzmann
import numpy as np
from numpy.testing import assert_almost_equal, assert_equal, assert_allclose
def test_hypergeom_logpmf():
# symmetries test
# f(k,N,K,n) = f(n-k,N,N-K,n) = f(K-k,N,K,N-n) = f(k,N,n,K)
k = 5
N = 50
K = 10
n = 5
logpmf1 = hypergeom.logpmf(k, N, K, n)
logpmf2 = hypergeom.logpmf(n - k, N, N - K, n)
logpmf3 = hypergeom.logpmf(K - k, N, K, N - n)
logpmf4 = hypergeom.logpmf(k, N, n, K)
assert_almost_equal(logpmf1, logpmf2, decimal=12)
assert_almost_equal(logpmf1, logpmf3, decimal=12)
assert_almost_equal(logpmf1, logpmf4, decimal=12)
# test related distribution
# Bernoulli distribution if n = 1
k = 1
N = 10
K = 7
n = 1
hypergeom_logpmf = hypergeom.logpmf(k, N, K, n)
bernoulli_logpmf = bernoulli.logpmf(k, K/N)
assert_almost_equal(hypergeom_logpmf, bernoulli_logpmf, decimal=12)
def test_boltzmann_upper_bound():
k = np.arange(-3, 5)
N = 1
p = boltzmann.pmf(k, 0.123, N)
expected = k == 0
assert_equal(p, expected)
lam = np.log(2)
N = 3
p = boltzmann.pmf(k, lam, N)
expected = [0, 0, 0, 4/7, 2/7, 1/7, 0, 0]
assert_allclose(p, expected, rtol=1e-13)
c = boltzmann.cdf(k, lam, N)
expected = [0, 0, 0, 4/7, 6/7, 1, 1, 1]
assert_allclose(c, expected, rtol=1e-13)
def test_betabinom_a_and_b_unity():
# test limiting case that betabinom(n, 1, 1) is a discrete uniform
# distribution from 0 to n
n = 20
k = np.arange(n + 1)
p = betabinom(n, 1, 1).pmf(k)
expected = np.repeat(1 / (n + 1), n + 1)
assert_almost_equal(p, expected)
def test_betabinom_bernoulli():
# test limiting case that betabinom(1, a, b) = bernoulli(a / (a + b))
a = 2.3
b = 0.63
k = np.arange(2)
p = betabinom(1, a, b).pmf(k)
expected = bernoulli(a / (a + b)).pmf(k)
assert_almost_equal(p, expected)
|
pizzathief/scipy
|
scipy/stats/tests/test_discrete_distns.py
|
Python
|
bsd-3-clause
| 1,955
|
# -*- coding: utf-8 -*-
# pylint: disable-msg=E1101, E0102, E0202
"""
Created on Mon Feb 27 12:08:29 2012
Module with definition of classes to work with annotations in the MIT format.
@author: T. Teijeiro
"""
import struct
class MITAnnotation(object):
"""
This class represents an annotation in the MIT format. Currently only
standard 2-byte annotations are supported.
"""
#We use slots instead of __dict__ to save memory space when a lot of
#annotations are created and managed.
__slots__ = ('code', 'time', 'subtype', 'chan', 'num', 'aux')
def __init__(self, code=0, time=0, subtype=0, chan=0, num=0, aux=None):
self.code = code
self.time = time
self.subtype = subtype
self.chan = chan
self.num = num
self.aux = aux
def __str__(self):
return '{0} {1} {2} {3} {4} {5}'.format(self.time, self.code,
self.subtype, self.chan,
self.num, repr(self.aux))
def __repr__(self):
return str(self)
def __lt__(self, other):
return self.time < other.time
#MIT format special codes
SKIP_TIME = 1023
AUX_CODE = 63
SKIP_CODE = 59
NUM_CODE = 60
SUB_CODE = 61
CHN_CODE = 62
def is_qrs_annotation(annot):
"""
Checks if an annotation corresponds to a QRS annotation.
NORMAL
UNKNOWN
SVPB
FUSION
VESC
LEARN
AESC
NESC
SVESC
PVC
BBB
LBBB
ABERR
RBBB
NPC
PACE
PFUS
APC
RONT
"""
return annot.code in (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
25, 30, 34, 35, 38, 41)
def read_annotations(path):
"""
Reads an annotation file in the MIT format.
See: http://www.physionet.org/physiotools/wag/annot-5.htm
Parameters
----------
path:
Path to the file containing the annotations.
Returns
-------
out:
List for annotation objects
"""
result = []
f = open(path, 'rb')
num = 0
chan = 0
displ = 0
while True:
bann = f.read(2)
if not bann:
break
(b0, b1) = struct.unpack('bb', bann)
A = (b1 & 0xff) >> 2
I = ((b1 & 0x03) << 8) | (0xff & b0)
#Special codes parsing
if A == SKIP_CODE and I == 0:
(b0, b1, b2, b3) = struct.unpack('4b', f.read(4))
displ = ((b1 << 24) | ((b0 & 0xff) << 16) |
((b3 & 0xff) << 8) | (b2 & 0xff))
elif A is NUM_CODE:
num = I
result[-1].num = num
elif A is SUB_CODE:
result[-1].subtype = I
elif A is CHN_CODE:
chan = I
result[-1].chan = chan
elif A is AUX_CODE:
result[-1].aux = f.read(I)
if I % 2 != 0:
f.read(1)
elif A == I == 0:
break
else:
result.append(MITAnnotation(code=A, time=I+displ, chan=chan, num=num))
displ = 0
f.close()
#Now, for each annotation we put the absolute time
abs_time = 0
for annot in result:
abs_time += annot.time
annot.time = max(0, abs_time)
return result
def save_annotations(annots, path):
"""
Saves a list of annotations in a file, in the MIT format.
See: http://www.physionet.org/physiotools/wag/annot-5.htm
Parameters
----------
annots: List of MITAnnotation objects to be saved. It is sorted before
the writing operation.
path: Path to the file where the list is saved.
"""
annots = sorted(annots)
f = open(path, 'wb')
prev_time = 0
prev_num = 0
prev_chn = 0
for anot in annots:
rel_time = anot.time - prev_time
#If the distance is greater than 1023 (what we can express with 10
#bits), we should write an skip code in the file.
if rel_time > SKIP_TIME:
#A = SKIP_CODE, I = 0; Then 4 byte PDP-11 long integer
f.write(struct.pack('>H', SKIP_CODE << 2))
f.write(struct.pack('<H', rel_time >> 16))
f.write(struct.pack('<H', rel_time & 0xFFFF))
#The next written position is 0
rel_time = 0
#We write the annotation code and the timestamp
f.write(struct.pack('<H', anot.code << 10 | rel_time))
prev_time = anot.time
#Write the NUM annotation, if changes
if anot.num != prev_num:
f.write(struct.pack('<H', NUM_CODE << 10 | anot.num))
prev_num = anot.num
#Write the SUBTYPE annotation, if != 0
if anot.subtype != 0:
f.write(struct.pack('<H', SUB_CODE << 10 | anot.subtype))
#Write the CHAN annotation, if changes
if anot.chan != prev_chn:
f.write(struct.pack('<H', CHN_CODE << 10 | anot.chan))
prev_chn = anot.chan
#Write the AUX field, if present
if anot.aux != None:
f.write(struct.pack('<H', AUX_CODE << 10 | len(anot.aux)))
aux = (anot.aux if isinstance(anot.aux, bytes)
else bytes(anot.aux, encoding='utf-8'))
f.write(aux)
if len(anot.aux) % 2 != 0:
f.write(struct.pack('<b', 0))
#Finish the file with a 00
f.write(struct.pack('<h', 0))
f.close()
def convert_annots_freq(spath, sfreq, dpath, dfreq):
"""
Converts the frequency of an annotations file.
Parameters
----------
spath:
Path of the input annotations file.
sfreq:
Frequency in Hz used in the input annotations timing.
dpath:
Path where the new annotations will be stored.
dfreq:
Frequency in Hz used in the output annotations timing.
"""
annots = read_annotations(spath)
for ann in annots:
ann.time = int(ann.time/float(sfreq)*float(dfreq))
save_annotations(annots, dpath)
if __name__ == "__main__":
pass
|
citiususc/construe
|
construe/utils/MIT/MITAnnotation.py
|
Python
|
agpl-3.0
| 6,004
|
import unittest
import ray
import ray.rllib.agents.ppo as ppo
from ray.rllib.policy.policy import LEARNER_STATS_KEY
from ray.rllib.policy.sample_batch import DEFAULT_POLICY_ID
from ray.rllib.utils.test_utils import check_compute_single_action, \
framework_iterator
class TestDDPPO(unittest.TestCase):
@classmethod
def setUpClass(cls):
ray.init()
@classmethod
def tearDownClass(cls):
ray.shutdown()
def test_ddppo_compilation(self):
"""Test whether a DDPPOTrainer can be built with both frameworks."""
config = ppo.ddppo.DEFAULT_CONFIG.copy()
config["num_gpus_per_worker"] = 0
num_iterations = 2
for _ in framework_iterator(config, "torch"):
trainer = ppo.ddppo.DDPPOTrainer(config=config, env="CartPole-v0")
for i in range(num_iterations):
trainer.train()
check_compute_single_action(trainer)
trainer.stop()
def test_ddppo_schedule(self):
"""Test whether lr_schedule will anneal lr to 0"""
config = ppo.ddppo.DEFAULT_CONFIG.copy()
config["num_gpus_per_worker"] = 0
config["lr_schedule"] = [[0, config["lr"]], [1000, 0.0]]
num_iterations = 3
for _ in framework_iterator(config, "torch"):
trainer = ppo.ddppo.DDPPOTrainer(config=config, env="CartPole-v0")
for _ in range(num_iterations):
result = trainer.train()
lr = result["info"]["learner"][DEFAULT_POLICY_ID][
LEARNER_STATS_KEY]["cur_lr"]
trainer.stop()
assert lr == 0.0, "lr should anneal to 0.0"
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
|
pcmoritz/ray-1
|
rllib/agents/ppo/tests/test_ddppo.py
|
Python
|
apache-2.0
| 1,760
|
import os
from kolekto.db import MoviesMetadata
class Command(object):
"""
Base class for all commands.
:cvar help: the help for the command
"""
help = ''
def __init__(self, name, profile, aparser_subs):
self._aparser = aparser_subs.add_parser(name, help=self.help)
self._profile = profile
self._aparser.set_defaults(command=self.run, command_name=name)
@property
def profile(self):
return self._profile
def add_arg(self, *args, **kwargs):
""" Add an argument to the command argument parser.
"""
self._aparser.add_argument(*args, **kwargs)
def prepare(self):
""" Method to override, executed before to parse arguments from command
line. This is a good place to call :meth:`add_arg`.
"""
pass
def run(self, args, config):
""" Method to override, executed if command has been selected.
:param args: parsed arguments
:param config: parsed configuration
"""
pass
def get_metadata_db(self, tree):
return MoviesMetadata(os.path.join(tree, '.kolekto', 'metadata.db'),
object_class=self.profile.object_class)
|
NaPs/Kolekto
|
kolekto/commands/__init__.py
|
Python
|
mit
| 1,232
|
from blueman.services.meta import NetworkService
from blueman.Sdp import GN_SVCLASS_ID
class GroupNetwork(NetworkService):
__svclass_id__ = GN_SVCLASS_ID
__icon__ = "network-wireless"
__priority__ = 80
|
blueman-project/blueman
|
blueman/services/GroupNetwork.py
|
Python
|
gpl-3.0
| 216
|
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import networkx as nx
class DeploymentGraph(object):
def __init__(self, tasks):
self.tasks = tasks
self.graph = self._create_graph()
def _create_graph(self):
"""Create graph from tasks
:return: directed graph
"""
graph = nx.DiGraph()
for task in self.tasks:
task_id = task['id']
graph.add_node(task_id, **task)
if 'required_for' in task:
for req in task['required_for']:
graph.add_edge(task_id, req)
if 'requires' in task:
for req in task['requires']:
graph.add_edge(req, task_id)
if 'groups' in task:
for req in task['groups']:
# check if group is defined as regular expression
if req.startswith('/'):
continue
graph.add_edge(task_id, req)
if 'tasks' in task:
for req in task['tasks']:
graph.add_edge(req, task_id)
return graph
def find_cycles(self):
"""Find cycles in graph.
:return: list of cycles in graph
"""
cycles = []
for cycle in nx.simple_cycles(self.graph):
cycles.append(cycle)
return cycles
def is_connected(self):
"""Check if graph is connected.
:return: bool
"""
return nx.is_weakly_connected(self.graph)
def find_empty_nodes(self):
"""Find empty nodes in graph.
:return: list of empty nodes in graph
"""
empty_nodes = []
for node_name, node in self.graph.node.items():
if node == {}:
empty_nodes.append(node_name)
return empty_nodes
|
xarses/fuel-library
|
utils/fuel-tasklib/tasklib/graph.py
|
Python
|
apache-2.0
| 2,428
|
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 16 10:33:52 2015
@author: anderson
"""
import numpy as np
import time
from pyhfo.core import EventList
def find_max(data, thr=None):
'''
return the index of the local maximum
'''
value = (np.diff(np.sign(np.diff(data))) < 0).nonzero()[0] + 1
if thr is not None:
value = [x for x in value if data[x] > thr]
return value
def find_min(data, thr=None):
'''
return the index of the local minimum
'''
value = (np.diff(np.sign(np.diff(data))) > 0).nonzero()[0] + 1
if thr is not None:
value = [x for x in value if data[x] < thr]
return value
def find_maxandmin(data, thr=None):
'''
return the index of the local maximum and minimum
'''
value = np.diff(np.sign(np.diff(data))).nonzero()[0] + 1
if thr is not None:
value = [x for x in value if abs(data[x]) > abs(thr)]
return value
class Timer(object):
def __init__(self, name=None):
self.name = name
def __enter__(self):
self.tstart = time.time()
def __exit__(self, type, value, traceback):
if self.name:
print '[%s]' % self.name,
print 'Elapsed: %.4f seconds' % (time.time() - self.tstart)
def merge_lists(EvList1,EvList2):
ch_labels1 = EvList1.ch_labels
ch_labels2 = EvList2.ch_labels
if set(ch_labels1) != set(ch_labels2):
raise 'Merge should be from same channels list'
time_edge1 = EvList1.time_edge
time_edge2 = EvList2.time_edge
if time_edge1[1]<time_edge2[0]:
new_time = time_edge1[0],time_edge2[1]
else:
new_time = time_edge2[0],time_edge1[1]
NewEvList = EventList(ch_labels1,new_time)
for ev in EvList1:
NewEvList.__addEvent__(ev)
for ev in EvList2:
NewEvList.__addEvent__(ev)
return NewEvList
|
britodasilva/pyhfo
|
pyhfo/core/aux_func.py
|
Python
|
mit
| 1,885
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from blinkbuild.name_style_converter import NameStyleConverter
def cpp_bool(value):
if value is True:
return 'true'
if value is False:
return 'false'
# Return value as is, which for example may be a platform-dependent constant
# such as "defaultSelectTrailingWhitespaceEnabled".
return value
def cpp_name(entry):
return entry['ImplementedAs'] or entry['name'].original
def enum_key_for_css_keyword(keyword):
return 'k' + _upper_camel_case(keyword)
def enum_key_for_css_property(property_name):
return 'k' + _upper_camel_case(property_name)
def enum_key_for_css_property_alias(property_name):
return 'kAlias' + property_name.to_upper_camel_case()
# This id is used to build function names returning CSS properties (e.g.
# GetCSSPropertyX(), GetCSSPropertyXInternal(), etc.)
def id_for_css_property(property_name):
return 'CSSProperty' + _upper_camel_case(property_name)
def id_for_css_property_alias(property_name):
return 'CSSPropertyAlias' + property_name.to_upper_camel_case()
def _upper_camel_case(property_name):
converter = NameStyleConverter(property_name) if isinstance(
property_name, str) else property_name
return converter.to_upper_camel_case()
|
endlessm/chromium-browser
|
third_party/blink/renderer/build/scripts/name_utilities.py
|
Python
|
bsd-3-clause
| 2,778
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_pielou_ascii.py
--------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context):
configFile(alg, parameters, context, True)
def processOutputs(alg, parameters, context):
moveOutputTxtFile(alg, parameters, context)
|
stevenmizuno/QGIS
|
python/plugins/processing/algs/grass7/ext/r_li_pielou_ascii.py
|
Python
|
gpl-2.0
| 1,510
|
import unittest2 as unittest
from config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print 'Please authorize: ' + auth_url
verifier = raw_input('PIN: ').strip()
self.assert_(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assert_(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
|
abhishekgahlot/tweepy
|
tests/test_auth.py
|
Python
|
mit
| 717
|
from django.conf.urls import url
from timeline import views
urlpatterns = [
url(r'^$', views.timelines, name='timelines'),
]
|
fredwulei/fredsneverland
|
fredsneverland/timeline/urls.py
|
Python
|
mit
| 130
|
# Embedded file name: /usr/lib/enigma2/python/Components/Converter/TunerInfo.py
from Components.Sources.TunerInfo import TunerInfo as TunerInfoSource
from Components.Converter.Converter import Converter
from Components.Element import cached
class TunerInfo(Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
self.type = {'TunerUseMask': TunerInfoSource.FE_USE_MASK}[type]
@cached
def getBoolean(self):
if self.type == TunerInfoSource.FE_USE_MASK:
return self.source.getTunerUseMask() and True or False
boolean = property(getBoolean)
@cached
def getText(self):
if self.type == TunerInfoSource.FE_USE_MASK:
return str(self.source.getTunerUseMask())
return ''
text = property(getText)
@cached
def getValue(self):
if self.type == TunerInfoSource.FE_USE_MASK:
return self.source.getTunerUseMask()
return -1
value = property(getValue)
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] == self.type:
Converter.changed(self, what)
|
kingvuplus/boom2
|
lib/python/Components/Converter/TunerInfo.py
|
Python
|
gpl-2.0
| 1,171
|
#!/usr/bin/env python
"""ngo-admin で呼ばれる管理用コマンドモジュール."""
import os
import sys
from ngo.backends import NgoTemplate
def startproject(project_name):
"""ngoプロジェクトを作成する."""
import ngo
top_dir = os.getcwd()
origin_project_path = os.path.join(ngo.__path__[0], 'project_template')
# manaeg.pyの作成
manage_py_path = os.path.join(origin_project_path, 'manage')
with open(manage_py_path, 'r') as fp:
src = fp.read()
template = NgoTemplate(src)
src = template.render(
{'project_name': project_name}
)
new_file_path = os.path.join(top_dir, 'manage.py')
with open(new_file_path, 'w') as fp:
fp.write(src)
top_dir = os.path.join(top_dir, project_name)
# プロジェクトのディレクトリを作成する
os.makedirs(top_dir)
# settings.py, urls.py, wsgi.pyの作成
for file in ['settings', 'urls', 'wsgi']:
file_path = os.path.join(origin_project_path, file)
with open(file_path, 'r') as fp:
src = fp.read()
template = NgoTemplate(src)
src = template.render(
{'project_name': project_name}
)
new_file_path = os.path.join(top_dir, file+'.py')
with open(new_file_path, 'w') as fp:
fp.write(src)
def main():
"""main."""
function_name, args = sys.argv[1], sys.argv[2:]
function = globals()[function_name]
function(*args)
|
naritotakizawa/ngo
|
ngo/admin.py
|
Python
|
mit
| 1,524
|
from sympy.physics.mechanics import (dynamicsymbols, ReferenceFrame, Point,
RigidBody, LagrangesMethod, Particle,
inertia, Lagrangian)
from sympy import symbols, pi, sin, cos, tan, simplify, Function, \
Derivative, Matrix
def test_disc_on_an_incline_plane():
# Disc rolling on an inclined plane
# First the generalized coordinates are created. The mass center of the
# disc is located from top vertex of the inclined plane by the generalized
# coordinate 'y'. The orientation of the disc is defined by the angle
# 'theta'. The mass of the disc is 'm' and its radius is 'R'. The length of
# the inclined path is 'l', the angle of inclination is 'alpha'. 'g' is the
# gravitational constant.
y, theta = dynamicsymbols('y theta')
yd, thetad = dynamicsymbols('y theta', 1)
m, g, R, l, alpha = symbols('m g R l alpha')
# Next, we create the inertial reference frame 'N'. A reference frame 'A'
# is attached to the inclined plane. Finally a frame is created which is attached to the disk.
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [pi/2 - alpha, N.z])
B = A.orientnew('B', 'Axis', [-theta, A.z])
# Creating the disc 'D'; we create the point that represents the mass
# center of the disc and set its velocity. The inertia dyadic of the disc
# is created. Finally, we create the disc.
Do = Point('Do')
Do.set_vel(N, yd * A.x)
I = m * R**2 / 2 * B.z | B.z
D = RigidBody('D', Do, B, m, (I, Do))
# To construct the Lagrangian, 'L', of the disc, we determine its kinetic
# and potential energies, T and U, respectively. L is defined as the
# difference between T and U.
D.set_potential_energy(m * g * (l - y) * sin(alpha))
L = Lagrangian(N, D)
# We then create the list of generalized coordinates and constraint
# equations. The constraint arises due to the disc rolling without slip on
# on the inclined path. We then invoke the 'LagrangesMethod' class and
# supply it the necessary arguments and generate the equations of motion.
# The'rhs' method solves for the q_double_dots (i.e. the second derivative
# with respect to time of the generalized coordinates and the lagrange
# multiplers.
q = [y, theta]
hol_coneqs = [y - R * theta]
m = LagrangesMethod(L, q, hol_coneqs=hol_coneqs)
m.form_lagranges_equations()
rhs = m.rhs()
rhs.simplify()
assert rhs[2] == 2*g*sin(alpha)/3
def test_simp_pen():
# This tests that the equations generated by LagrangesMethod are identical
# to those obtained by hand calculations. The system under consideration is
# the simple pendulum.
# We begin by creating the generalized coordinates as per the requirements
# of LagrangesMethod. Also we created the associate symbols
# that characterize the system: 'm' is the mass of the bob, l is the length
# of the massless rigid rod connecting the bob to a point O fixed in the
# inertial frame.
q, u = dynamicsymbols('q u')
qd, ud = dynamicsymbols('q u ', 1)
l, m, g = symbols('l m g')
# We then create the inertial frame and a frame attached to the massless
# string following which we define the inertial angular velocity of the
# string.
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [q, N.z])
A.set_ang_vel(N, qd * N.z)
# Next, we create the point O and fix it in the inertial frame. We then
# locate the point P to which the bob is attached. Its corresponding
# velocity is then determined by the 'two point formula'.
O = Point('O')
O.set_vel(N, 0)
P = O.locatenew('P', l * A.x)
P.v2pt_theory(O, N, A)
# The 'Particle' which represents the bob is then created and its
# Lagrangian generated.
Pa = Particle('Pa', P, m)
Pa.set_potential_energy(- m * g * l * cos(q))
L = Lagrangian(N, Pa)
# The 'LagrangesMethod' class is invoked to obtain equations of motion.
lm = LagrangesMethod(L, [q])
lm.form_lagranges_equations()
RHS = lm.rhs()
assert RHS[1] == -g*sin(q)/l
def test_nonminimal_pendulum():
q1, q2 = dynamicsymbols('q1:3')
q1d, q2d = dynamicsymbols('q1:3', level=1)
L, m, t = symbols('L, m, t')
g = 9.8
# Compose World Frame
N = ReferenceFrame('N')
pN = Point('N*')
pN.set_vel(N, 0)
# Create point P, the pendulum mass
P = pN.locatenew('P1', q1*N.x + q2*N.y)
P.set_vel(N, P.pos_from(pN).dt(N))
pP = Particle('pP', P, m)
# Constraint Equations
f_c = Matrix([q1**2 + q2**2 - L**2])
# Calculate the lagrangian, and form the equations of motion
Lag = Lagrangian(N, pP)
LM = LagrangesMethod(Lag, [q1, q2], hol_coneqs=f_c,
forcelist=[(P, m*g*N.x)], frame=N)
LM.form_lagranges_equations()
# Check solution
lam1 = LM.lam_vec[0, 0]
eom_sol = Matrix([[m*Derivative(q1, t, t) - 9.8*m + 2*lam1*q1],
[m*Derivative(q2, t, t) + 2*lam1*q2]])
assert LM.eom == eom_sol
# Check multiplier solution
lam_sol = Matrix([(19.6*q1 + 2*q1d**2 + 2*q2d**2)/(4*q1**2/m + 4*q2**2/m)])
assert LM.solve_multipliers(sol_type='Matrix') == lam_sol
def test_dub_pen():
# The system considered is the double pendulum. Like in the
# test of the simple pendulum above, we begin by creating the generalized
# coordinates and the simple generalized speeds and accelerations which
# will be used later. Following this we create frames and points necessary
# for the kinematics. The procedure isn't explicitly explained as this is
# similar to the simple pendulum. Also this is documented on the pydy.org
# website.
q1, q2 = dynamicsymbols('q1 q2')
q1d, q2d = dynamicsymbols('q1 q2', 1)
q1dd, q2dd = dynamicsymbols('q1 q2', 2)
u1, u2 = dynamicsymbols('u1 u2')
u1d, u2d = dynamicsymbols('u1 u2', 1)
l, m, g = symbols('l m g')
N = ReferenceFrame('N')
A = N.orientnew('A', 'Axis', [q1, N.z])
B = N.orientnew('B', 'Axis', [q2, N.z])
A.set_ang_vel(N, q1d * A.z)
B.set_ang_vel(N, q2d * A.z)
O = Point('O')
P = O.locatenew('P', l * A.x)
R = P.locatenew('R', l * B.x)
O.set_vel(N, 0)
P.v2pt_theory(O, N, A)
R.v2pt_theory(P, N, B)
ParP = Particle('ParP', P, m)
ParR = Particle('ParR', R, m)
ParP.set_potential_energy(- m * g * l * cos(q1))
ParR.set_potential_energy(- m * g * l * cos(q1) - m * g * l * cos(q2))
L = Lagrangian(N, ParP, ParR)
lm = LagrangesMethod(L, [q1, q2])
lm.form_lagranges_equations()
assert simplify(l*m*(2*g*sin(q1) + l*sin(q1)*sin(q2)*q2dd
+ l*sin(q1)*cos(q2)*q2d**2 - l*sin(q2)*cos(q1)*q2d**2
+ l*cos(q1)*cos(q2)*q2dd + 2*l*q1dd) - lm.eom[0]) == 0
assert simplify(l*m*(g*sin(q2) + l*sin(q1)*sin(q2)*q1dd
- l*sin(q1)*cos(q2)*q1d**2 + l*sin(q2)*cos(q1)*q1d**2
+ l*cos(q1)*cos(q2)*q1dd + l*q2dd) - lm.eom[1]) == 0
def test_rolling_disc():
# Rolling Disc Example
# Here the rolling disc is formed from the contact point up, removing the
# need to introduce generalized speeds. Only 3 configuration and 3
# speed variables are need to describe this system, along with the
# disc's mass and radius, and the local gravity.
q1, q2, q3 = dynamicsymbols('q1 q2 q3')
q1d, q2d, q3d = dynamicsymbols('q1 q2 q3', 1)
r, m, g = symbols('r m g')
# The kinematics are formed by a series of simple rotations. Each simple
# rotation creates a new frame, and the next rotation is defined by the new
# frame's basis vectors. This example uses a 3-1-2 series of rotations, or
# Z, X, Y series of rotations. Angular velocity for this is defined using
# the second frame's basis (the lean frame).
N = ReferenceFrame('N')
Y = N.orientnew('Y', 'Axis', [q1, N.z])
L = Y.orientnew('L', 'Axis', [q2, Y.x])
R = L.orientnew('R', 'Axis', [q3, L.y])
# This is the translational kinematics. We create a point with no velocity
# in N; this is the contact point between the disc and ground. Next we form
# the position vector from the contact point to the disc's center of mass.
# Finally we form the velocity and acceleration of the disc.
C = Point('C')
C.set_vel(N, 0)
Dmc = C.locatenew('Dmc', r * L.z)
Dmc.v2pt_theory(C, N, R)
# Forming the inertia dyadic.
I = inertia(L, m / 4 * r**2, m / 2 * r**2, m / 4 * r**2)
BodyD = RigidBody('BodyD', Dmc, R, m, (I, Dmc))
# Finally we form the equations of motion, using the same steps we did
# before. Supply the Lagrangian, the generalized speeds.
BodyD.set_potential_energy(- m * g * r * cos(q2))
Lag = Lagrangian(N, BodyD)
q = [q1, q2, q3]
q1 = Function('q1')
q2 = Function('q2')
q3 = Function('q3')
l = LagrangesMethod(Lag, q)
l.form_lagranges_equations()
RHS = l.rhs()
RHS.simplify()
t = symbols('t')
assert (l.mass_matrix[3:6] == [0, 5*m*r**2/4, 0])
assert RHS[4].simplify() == (
(-8*g*sin(q2(t)) + r*(5*sin(2*q2(t))*Derivative(q1(t), t) +
12*cos(q2(t))*Derivative(q3(t), t))*Derivative(q1(t), t))/(10*r))
assert RHS[5] == (-5*cos(q2(t))*Derivative(q1(t), t) + 6*tan(q2(t)
)*Derivative(q3(t), t) + 4*Derivative(q1(t), t)/cos(q2(t))
)*Derivative(q2(t), t)
|
grevutiu-gabriel/sympy
|
sympy/physics/mechanics/tests/test_lagrange.py
|
Python
|
bsd-3-clause
| 9,351
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OperationEntity(Model):
"""The operation supported by Cognitive Services.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: The operation supported by Cognitive Services.
:type display: :class:`OperationDisplayInfo
<azure.mgmt.cognitiveservices.models.OperationDisplayInfo>`
:param origin: The origin of the operation.
:type origin: str
:param properties: Additional properties.
:type properties: object
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplayInfo'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'object'},
}
def __init__(self, name=None, display=None, origin=None, properties=None):
self.name = name
self.display = display
self.origin = origin
self.properties = properties
|
v-iam/azure-sdk-for-python
|
azure-mgmt-cognitiveservices/azure/mgmt/cognitiveservices/models/operation_entity.py
|
Python
|
mit
| 1,487
|
from __future__ import print_function, division
from imgaug import augmenters as iaa
from scipy import misc, ndimage
import numpy as np
from skimage import data
import cv2
from itertools import cycle
POINT_SIZE = 5
DEG_PER_STEP = 1
TIME_PER_STEP = 10
def main():
image = data.astronaut()
cv2.namedWindow("aug", cv2.WINDOW_NORMAL)
cv2.imshow("aug", image)
cv2.waitKey(TIME_PER_STEP)
height, width = image.shape[0], image.shape[1]
center_x = width // 2
center_y = height // 2
r = int(min(image.shape[0], image.shape[1]) / 3)
for deg in cycle(np.arange(0, 360, DEG_PER_STEP)):
rad = np.deg2rad(deg-90)
#print(deg, rad)
point_x = int(center_x + r * np.cos(rad))
point_y = int(center_y + r * np.sin(rad))
direction = deg / 360
aug = iaa.DirectedEdgeDetect(alpha=1.0, direction=direction)
img_aug = aug.augment_image(image)
img_aug[point_y-POINT_SIZE:point_y+POINT_SIZE+1, point_x-POINT_SIZE:point_x+POINT_SIZE+1, :] = np.array([0, 255, 0])
#print(point_x, point_y)
cv2.imshow("aug", img_aug)
cv2.waitKey(TIME_PER_STEP)
if __name__ == "__main__":
main()
|
nektor211/imgaug
|
tests/check_directed_edge_detect.py
|
Python
|
mit
| 1,184
|
class TextrankRuntimeError(RuntimeError):
pass
|
summanlp/textrank
|
summa/exception/textrank_runtime_error.py
|
Python
|
mit
| 50
|
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.plugins.cdh import plugin_utils as pu
from sahara.plugins.cdh.v5_11_0 import config_helper
class PluginUtilsV5110(pu.AbstractPluginUtils):
def __init__(self):
self.c_helper = config_helper.ConfigHelperV5110()
|
shakamunyi/sahara
|
sahara/plugins/cdh/v5_11_0/plugin_utils.py
|
Python
|
apache-2.0
| 819
|
#----------------------------------------------------------------
# Author: Jason Gors <jasonDOTgorsATgmail>
# Creation Date: 09-20-2013
# Purpose: this is the point of call from the bep script.
# License: BSD
#----------------------------------------------------------------
import sys
pyversion = float(sys.version[0:3])
if pyversion < 2.7:
raise SystemExit("Requires Python >= 2.7; Current version is %s" % pyversion)
def _run_bep():
from Bep.run import main
main()
|
b-e-p/bep
|
Bep/__init__.py
|
Python
|
bsd-3-clause
| 487
|
# -*- coding: utf-8 -*-
import os.path
import sys
try:
import secret
except ImportError:
sys.stderr.write(""">> You should create secret.py next to config.py to define the secret key:
# -*- coding: utf-8 -*-
SECRET_KEY = 'qwerty123456'
""")
raise
LANGUAGES = ['fr', 'en', 'zh']
BABEL_DEFAULT_LOCALE = 'fr'
SECRET_KEY = secret.SECRET_KEY
# Bower configuration
BOWER_PATH = os.path.join(os.path.dirname(__file__),
'static/bower_components')
# SQLAlchemy
SQLALCHEMY_DATABASE_URI = 'sqlite:///data.db'
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
FrenchTech-Shanghai/ces-asia
|
ces_asia/config.py
|
Python
|
bsd-3-clause
| 579
|
# -*- coding: utf-8 -*-
#
# Sibyl: A modular Python chat bot framework
# Copyright (c) 2015-2017 Joshua Haas <jahschwa.com>
#
# This file is part of Sibyl.
#
# Sibyl is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
import time,smtplib,imaplib,email
from email.mime.text import MIMEText
from threading import Thread
from Queue import Queue
from sibyl.lib.protocol import User,Room,Message,Protocol
from sibyl.lib.decorators import botconf
################################################################################
# Config options
################################################################################
@botconf
def conf(bot):
return [
{'name':'username','req':True},
{'name':'password','req':True},
{'name':'delete','default':True,'parse':bot.conf.parse_bool},
{'name':'imap'},
{'name':'smtp'},
{'name':'key','parse':bot.conf.parse_pass}
]
################################################################################
# User sub-class
################################################################################
class MailUser(User):
# called on object init; the following are already created by __init__:
# self.protocol = (Protocol) name of this User's protocol as a str
# self.typ = (int) either Message.PRIVATE or Message.GROUP
# self.real = (User) the "real" User behind this user (defaults to self)
# @param user (object) a full username
def parse(self,user):
self.user = user
# @return (str) the username in private chat or the nick name in a room
def get_name(self):
return self.user
# @return (str) the username without resource identifier
def get_base(self):
return self.user
# @param other (object) you must check for class equivalence
# @return (bool) True if self==other (including resource)
def __eq__(self,other):
if not isinstance(other,MailUser):
return False
return self.user==other.user
# @return (str) the full username
def __str__(self):
return self.user
################################################################################
# Room class
################################################################################
class MailRoom(Room):
# called on object init; the following are already created by __init__:
# self.protocol = name of this Room's protocol as a str
# self.nick = the nick name to use in the room (defaults to None)
# self.pword = the password for this room (defaults to None)
# @param name (object) a full roomid
def parse(self,name):
self.name = name
# the return value must be the same for equal Rooms and unique for different
# @return (str) the name of this Room
def get_name(self):
return self.name
# @param other (object) you must check for class equivalence
# @return (bool) true if other is the same room (ignore nick/pword if present)
def __eq__(self,other):
if not isinstance(other,MailRoom):
return False
return self.name==other.name
################################################################################
# Protocol sub-class
################################################################################
class MailProtocol(Protocol):
# called on bot init; the following are already created by __init__:
# self.bot = SibylBot instance
# self.log = the logger you should use
def setup(self):
self.thread = IMAPThread(self)
self.thread.start()
# @raise (ConnectFailure) if can't connect to server
# @raise (AuthFailure) if failed to authenticate to server
def connect(self):
# we use IMAP to get new messages from the server
# it runs in its own thread, appending messages asynchronously to a Queue
self.log.debug('Attempting IMAP connection')
self._connect_imap()
self.log.info('IMAP successful')
# we use SMTP to send emails
self.log.debug('Attempting SMTP connection')
self._connect_smtp()
self.log.info('SMTP successful')
# receive/process messages and call bot._cb_message()
# must ignore msgs from myself and from users not in any of our rooms
# @call bot._cb_message(Message) upon receiving a valid status or message
# @raise (PingTimeout) if implemented
# @raise (ConnectFailure) if disconnected
# @raise (ServerShutdown) if server shutdown
def process(self):
# every time SibylBot calls process(), this method synchronously checks for
# new messages that the IMAPThread added while we were doing other things
while not self.thread.msgs.empty():
# check if there was a problem connecting and raise it syncronously
mail = self.thread.msgs.get()
if isinstance(mail,Exception):
raise mail
# parse the sender
frm = email.utils.parseaddr(mail['From'])[1]
user = MailUser(self,frm)
# handle multi-part messages
body = mail.get_payload()
if isinstance(body,list):
for b in body:
if b.get_content_type()=='plain':
body = b.replace('\r','').strip()
if isinstance(body,list):
self.log.warning('Ignoring multi-part from "%s"; no plaintext' % frm)
self._send('Unable to process multi-part message; no plaintext',user)
return
# check for authentication key if configured
if self.opt('email.key') and self.opt('email.key').get() not in body:
self.log.warning('Invalid key from "%s"; dropping message' % user)
self._send('Invalid or missing key; commands forbidden',user)
continue
# finish parsing the e-mail and send it
body = body.split('\n')[0].strip()
msg = Message(user,body)
ellip = ('...' if len(body)>20 else '')
self.log.debug('mail from "%s" with body "%.20s%s"' % (user,body,ellip))
# pass the message on to the bot for command execution
self.bot._cb_message(msg)
# called when the bot is exiting for whatever reason
# NOTE: sibylbot will already call part_room() on every room in get_rooms()
def shutdown(self):
pass
# send a message to a user
# @param mess (Message) message to be sent
# @raise (ConnectFailure) if failed to send message
# Check: get_emote()
# REF: http://stackoverflow.com/a/14678470
def send(self,mess):
(text,to) = (mess.get_text(),mess.get_to())
# SMTP connections are short-lived, so we might need to reconnect
try:
status = self.smtp.noop()[0]
except:
status = -1
if status!=250:
self._connect_smtp()
msg = MIMEText(text)
msg['Subject'] = 'Sibyl reply'
msg['From'] = self.opt('email.username')
msg['To'] = str(to)
self.smtp.sendmail(msg['From'],msg['To'],msg.as_string())
# send a message with text to every user in a room
# optionally note that the broadcast was requested by a specific User
# @param mess (Message) the message to broadcast
# @return (str,unicode) the text that was actually sent
# Check: get_user(), get_users()
def broadcast(self,mess):
pass
# join the specified room using the specified nick and password
# @param room (Room) the room to join
# @call bot._cb_join_room_success(room) on successful join
# @call bot._cb_join_room_failure(room,error) on failed join
def join_room(self,room):
bot._cb_join_room_failure(room,'Not supported')
# part the specified room
# @param room (Room) the room to leave
def part_room(self,room):
pass
# helper function for get_rooms() for protocol-specific flags
# only needs to handle: FLAG_PARTED, FLAG_PENDING, FLAG_IN, FLAG_ALL
# @param flag (int) one of Room.FLAG_* enums
# @return (list of Room) rooms matching the flag
def _get_rooms(self,flag):
return []
# @param room (Room) the room to query
# @return (list of User) the Users in the specified room
def get_occupants(self,room):
return []
# @param room (Room) the room to query
# @return (str) the nick name we are using in the specified room
def get_nick(self,room):
return None
# @param room (Room) the room to query
# @param nick (str) the nick to examine
# @return (User) the "real" User behind the specified nick/room
def get_real(self,room,nick):
return nick
# @return (User) our username
def get_user(self):
return MailUser(self,self.opt('email.username'))
# @param user (str) a user id to parse
# @param typ (int) [Message.PRIVATE] either Message.GROUP or Message.PRIVATE
# @param real (User) [self] the "real" user behind this user
# @return (User) a new instance of this protocol's User subclass
def new_user(self,user,typ=None,real=None):
return MailUser(self,user,typ,real)
# @param name (object) the identifier for this Room
# @param nick (str) [None] the nick name to use in this Room
# @param pword (str) [None] the password for joining this Room
# @return (Room) a new instance of this protocol's Room subclass
def new_room(self,name,nick=None,pword=None):
return MailRoom(self,name,nick,pword)
################################################################################
# Helper functions
################################################################################
# convenience function for connecting the IMAP thread
def _connect_imap(self):
self.thread.connect()
# convenience function for connecting to the SMTP server
def _connect_smtp(self):
# all major email providers support SSL, so use it
try:
self.smtp = smtplib.SMTP(self._get_smtp(),port=587)
self.smtp.starttls()
self.smtp.ehlo()
except:
raise self.ConnectFailure('SMTP')
# if the protocol raises AuthFailure, SibylBot will never try to reconnect
try:
self.smtp.login(self.opt('email.username'),self.opt('email.password'))
except:
raise self.AuthFailure('SMTP')
# convenience wrapper for sending stuff
def _send(self,text,to):
msg = Message(self.get_user(),text,to=to)
self.bot.send(msg)
# wrapper for imap server in case config opts get edited
def _get_imap(self):
server = self.opt('email.username').split('@')[-1]
return (self.opt('email.imap') or ('imap.'+server))
# wrapper for smtp server in case config opts get edited
def _get_smtp(self):
server = self.opt('email.username').split('@')[-1]
return (self.opt('email.smtp') or ('smtp.'+server))
################################################################################
# IMAPThread class
################################################################################
class IMAPThread(Thread):
def __init__(self,proto):
super(IMAPThread,self).__init__()
self.daemon = True
self.proto = proto
self.imap = None
self.msgs = Queue()
# this method is called when doing IMAPThread().start()
# we will be using IMAP IDLE push notifications as described at:
# https://tools.ietf.org/html/rfc2177
# which allows us to near-instantly respond to new messages without polling
# REF: http://stackoverflow.com/a/18103279
def run(self):
while True:
# reconnect logic is handled in SibylBot, until that happens do nothing
if not self.imap:
time.sleep(1)
continue
# wait for the server to send us a new notification (this command blocks)
line = self.imap.readline().strip()
# if the line is blank, the server closed the connection
if not line:
try:
self.connect()
# raising exceptions in a Thread is messy, so we'll queue it instead
except self.ProtocolError as e:
self.imap = None
self.msgs.put(e)
# if the line ends with "EXISTS" then there is a new message waiting
elif line.endswith('EXISTS'):
# to end the IDLE state and actually get the message we issue "DONE"
self.cmd('DONE')
# after we get the new message(s) and Queue them, we enter IDLE again
self.get_mail()
self.cmd('IDLE')
def connect(self):
try:
self.imap = imaplib.IMAP4_SSL(self.proto._get_imap())
except:
raise self.proto.ConnectFailure('IMAP')
try:
self.imap.login(self.proto.opt('email.username'),
self.proto.opt('email.password'))
except:
raise self.proto.AuthFailure('IMAP')
# we have to specify which Inbox to use, and then enter the IDLE state
self.imap.select()
self.cmd('IDLE')
# @param s (str) the SMTP command to send
def cmd(self,s):
self.imap.send("%s %s\r\n"%(self.imap._new_tag(),s))
def get_mail(self):
# only look at messages that don't have the "\\Seen" flag
(status,nums) = self.imap.search('utf8','UNSEEN')
# get new messages and add them to our Queue
for n in nums[0].split(' '):
msg = self.imap.fetch(n,'(RFC822)')[1][0][1]
self.msgs.put(email.message_from_string(msg))
# flag messages for deletion if configured to do so
if self.proto.opt('email.delete'):
self.imap.store(n,'+FLAGS','\\Deleted')
# this tells the server to actually delete all flagged messages
self.imap.expunge()
|
jfrederickson/sibyl
|
protocols/sibyl_email.py
|
Python
|
gpl-3.0
| 13,650
|
##########################################################################
#
# Copyright (c) 2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.ShaderBall,
"description",
"""
Generates scenes suitable for rendering shader balls.
""",
"childNodesAreReadOnly", True,
plugs = {
"shader" : [
"description",
"""
The shader to be rendered.
""",
"noduleLayout:section", "left",
"nodule:type", "GafferUI::StandardNodule",
],
"resolution" : [
"description",
"""
The resolution of the shader ball image, which
is always a square.
""",
],
}
)
|
hradec/gaffer
|
python/GafferSceneUI/ShaderBallUI.py
|
Python
|
bsd-3-clause
| 2,328
|
from __future__ import print_function, division
import matplotlib
import logging
from sys import stdout
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import (Net, RealApplianceSource,
BLSTMLayer, DimshuffleLayer,
BidirectionalRecurrentLayer)
from neuralnilm.source import (standardise, discretize, fdiff, power_and_fdiff,
RandomSegments, RandomSegmentsInMemory,
SameLocation)
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import (MixtureDensityLayer, DeConv1DLayer,
SharedWeightsDenseLayer)
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import MDNPlotter, CentralOutputPlotter, Plotter
from neuralnilm.updates import clipped_nesterov_momentum
from neuralnilm.disaggregate import disaggregate
from lasagne.nonlinearities import sigmoid, rectify, tanh, identity
from lasagne.objectives import mse, binary_crossentropy
from lasagne.init import Uniform, Normal, Identity
from lasagne.layers import (LSTMLayer, DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer, RecurrentLayer)
from lasagne.layers.batch_norm import BatchNormLayer
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
import gc
"""
447: first attempt at disaggregation
"""
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
#PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 1000
N_SEQ_PER_BATCH = 64
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
window=("2013-03-18", None),
train_buildings=[1, 2, 3, 4, 5],
validation_buildings=[1, 2, 3, 4, 5],
n_seq_per_batch=N_SEQ_PER_BATCH,
standardise_input=True,
standardise_targets=True,
independently_center_inputs=True,
ignore_incomplete=True,
offset_probability=0.5,
ignore_offset_activations=True
)
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
# loss_function=partial(ignore_inactive, loss_func=mdn_nll, seq_length=SEQ_LENGTH),
# loss_function=lambda x, t: mdn_nll(x, t).mean(),
# loss_function=lambda x, t: (mse(x, t) * MASK).mean(),
loss_function=lambda x, t: mse(x, t).mean(),
# loss_function=lambda x, t: binary_crossentropy(x, t).mean(),
# loss_function=partial(scaled_cost, loss_func=mse),
# loss_function=ignore_inactive,
# loss_function=partial(scaled_cost3, ignore_inactive=False),
# updates_func=momentum,
updates_func=clipped_nesterov_momentum,
updates_kwargs={'clip_range': (0, 10)},
learning_rate=1e-2,
learning_rate_changes_by_iteration={
1000: 1e-3,
5000: 1e-4
},
do_save_activations=True,
auto_reshape=False,
# plotter=CentralOutputPlotter
plotter=Plotter(n_seq_to_plot=32)
)
def exp_a(name, target_appliance, seq_length):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
target_appliance=target_appliance,
logger=logging.getLogger(name),
seq_length=seq_length
))
source = SameLocation(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
NUM_FILTERS = 4
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'label': 'conv0',
'type': Conv1DLayer, # convolve over the time axis
'num_filters': NUM_FILTERS,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
},
{
'label': 'dense0',
'type': DenseLayer,
'num_units': (seq_length - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'label': 'dense1',
'type': DenseLayer,
'num_units': seq_length,
'nonlinearity': rectify
},
{
'label': 'dense2',
'type': DenseLayer,
'num_units': 128,
'nonlinearity': rectify
},
{
'label': 'dense3',
'type': DenseLayer,
'num_units': seq_length,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': (seq_length - 3) * NUM_FILTERS,
'nonlinearity': rectify
},
{
'type': ReshapeLayer,
'shape': (N_SEQ_PER_BATCH, seq_length - 3, NUM_FILTERS)
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': DeConv1DLayer,
'num_output_channels': 1,
'filter_length': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'full'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # back to (batch, time, features)
}
]
net = Net(**net_dict_copy)
return net
def main():
APPLIANCES = [
('a', ['fridge freezer', 'fridge', 'freezer'], 800),
('b', "'coffee maker'", 512),
('c', "'dish washer'", 2000),
('d', "'hair dryer'", 256),
('e', "'kettle'", 256),
('f', "'oven'", 2000),
('g', "'toaster'", 256),
('h', "'light'", 2000),
('i', ['washer dryer', 'washing machine'], 1024)
]
for experiment, appliance, seq_length in APPLIANCES[-1:]:
full_exp_name = NAME + experiment
func_call = init_experiment(PATH, 'a', full_exp_name)
func_call = func_call[:-1] + ", {}, {})".format(appliance, seq_length)
logger = logging.getLogger(full_exp_name)
try:
net = eval(func_call)
run_experiment(net, epochs=None)
except KeyboardInterrupt:
logger.info("KeyboardInterrupt")
break
except Exception as exception:
logger.exception("Exception")
# raise
else:
del net.source
del net
gc.collect()
finally:
logging.shutdown()
if __name__ == "__main__":
main()
"""
Emacs variables
Local Variables:
compile-command: "cp /home/jack/workspace/python/neuralnilm/scripts/e475.py /mnt/sshfs/imperial/workspace/python/neuralnilm/scripts/"
End:
"""
|
mmottahedi/neuralnilm_prototype
|
scripts/e475.py
|
Python
|
mit
| 7,035
|
# Copyright (C) 2013 Midokura PTE LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Rossella Sblendido, Midokura Japan KK
# @author: Tomoe Sugihara, Midokura Japan KK
# @author: Ryu Ishimoto, Midokura Japan KK
from neutron.agent.linux import dhcp
from neutron.openstack.common import log as logging
from neutron.plugins.midonet.common import config # noqa
LOG = logging.getLogger(__name__)
class DhcpNoOpDriver(dhcp.DhcpLocalProcess):
@classmethod
def existing_dhcp_networks(cls, conf, root_helper):
"""Return a list of existing networks ids that we have configs for."""
return []
@classmethod
def check_version(cls):
"""Execute version checks on DHCP server."""
return float(1.0)
def disable(self, retain_port=False):
"""Disable DHCP for this network."""
if not retain_port:
self.device_manager.destroy(self.network, self.interface_name)
self._remove_config_files()
def reload_allocations(self):
"""Force the DHCP server to reload the assignment database."""
pass
def spawn_process(self):
pass
|
virtualopensystems/neutron
|
neutron/plugins/midonet/agent/midonet_driver.py
|
Python
|
apache-2.0
| 1,689
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for class Step."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy
from tensorflow.contrib.distribute.python import combinations
from tensorflow.contrib.distribute.python.single_loss_example import single_loss_example
from tensorflow.python.eager import context
from tensorflow.python.eager import test
from tensorflow.python.ops import variables
class SingleLossStepTest(test.TestCase, parameterized.TestCase):
@combinations.generate(
combinations.times(
combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=combinations.graph_and_eager_modes),
combinations.combine(is_tpu=[False])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
optimizer_fn=combinations.optimizers_v1,
mode=["graph"],
is_tpu=[True]))
def testTrainNetwork(self, distribution, optimizer_fn, is_tpu):
with distribution.scope():
single_loss_step, layer = single_loss_example(
optimizer_fn, distribution, use_bias=True, iterations_per_step=2)
if context.executing_eagerly():
single_loss_step.initialize()
run_step = single_loss_step
else:
with self.cached_session() as sess:
sess.run(single_loss_step.initialize())
run_step = sess.make_callable(single_loss_step())
self.evaluate(variables.global_variables_initializer())
weights, biases = [], []
for _ in range(5):
run_step()
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
error = abs(numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(is_not_increasing)
if __name__ == "__main__":
test.main()
|
jbedorf/tensorflow
|
tensorflow/contrib/distribute/python/step_fn_test.py
|
Python
|
apache-2.0
| 2,626
|
from django import forms
from crispy_forms.helper import FormHelper, Layout
from crispy_forms.layout import Field, Div, Row, HTML
from crispy_forms.bootstrap import FormActions, TabHolder, Tab
from ..models import UnidadMedida
from django.utils.translation import ugettext_lazy as _
from apps.utils.forms import smtSave, btnCancel, btnReset
from django.utils.text import capfirst, get_text_list
from unicodedata import normalize
class UnidadMedidaForm(forms.ModelForm):
"""Class UnidadMedidaForm."""
class Meta:
model = UnidadMedida
exclude = ('',)
widgets = {
'codigo': forms.TextInput(attrs={'class': 'form-control', 'required':'true', 'placeholder': 'Ingrese codigo'}),
'nombre': forms.TextInput(attrs={'class': 'form-control', 'required':'true', 'placeholder': 'Ingrese nombre'})
}
|
upeu-jul-20161-epis-ads2/MedicFast
|
apps/atencion/forms/UnidadMedidaForm.py
|
Python
|
bsd-3-clause
| 851
|
#+
# Distutils script to build and install Pycairo. To avoid ending
# up with files in this directory belonging to root instead of
# the current user, do the build/install in two steps. First, as
# an ordinary user:
#
# python3 setup.py build
#
# then:
#
# sudo python3 setup.py install
#
# To get rid of build products afterwards, do
#
# python3 setup.py clean --all
#-
import sys
import os
import subprocess
import distutils.core as dic
from distutils.command.build import \
build as std_build
from distutils.command.clean import \
clean as std_clean
pycairo_version = '1.10.1'
cairo_version_required = '1.10.2'
python_version_required = (3,0)
pkgconfig_file = 'py3cairo.pc'
config_file = 'src/config.h'
module_constants_file = "src/cairomodule_constants.h"
class my_build(std_build) :
"customization of build to generate additional files."
def run(self) :
create_module_constants_file()
createConfigFile(config_file)
super().run()
createPcFile(pkgconfig_file)
#end run
#end my_build
class my_clean(std_clean) :
"customization of clean to remove additional files and directories that I generate."
def run(self) :
super().run()
for \
dir \
in \
(
"doc/_build",
) \
:
if os.path.isdir(dir) :
for root, subdirs, subfiles in os.walk(dir, topdown = False) :
for item in subfiles :
os.unlink(os.path.join(root, item))
#end for
for item in subdirs :
os.rmdir(os.path.join(root, item))
#end for
#end for
os.rmdir(dir)
#end if
#end for
for \
item \
in \
(
pkgconfig_file,
config_file,
module_constants_file,
) \
:
try :
os.unlink(item)
except OSError :
pass # assume ENOENT
#end try
#end for
#end run
#end my_clean
def call(command):
pipe = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
pipe.wait()
return pipe
def pkg_config_version_check(pkg, version):
check = '%s >= %s' % (pkg, version)
pipe = call("pkg-config --print-errors --exists '%s'" % (check,))
if pipe.returncode == 0:
print(check, ' Successful')
else:
print(check, ' Failed')
raise SystemExit(pipe.stderr.read().decode())
def pkg_config_parse(opt, pkg):
check = "pkg-config %s %s" % (opt, pkg)
pipe = call("pkg-config %s %s" % (opt, pkg))
if pipe.returncode != 0:
print(check, ' Failed')
raise SystemExit(pipe.stderr.read().decode())
output = pipe.stdout.read()
output = output.decode() # get the str
opt = opt[-2:]
return [x.lstrip(opt) for x in output.split()]
def createPcFile(PcFile):
print('creating %s' % PcFile)
with open(PcFile, 'w') as fo:
fo.write ("""\
prefix=%s
Name: Pycairo
Description: Python 3 bindings for cairo
Version: %s
Requires: cairo
Cflags: -I${prefix}/include/pycairo
Libs:
""" % (sys.prefix, pycairo_version)
)
def createConfigFile(ConfigFile):
print('creating %s' % ConfigFile)
v = pycairo_version.split('.')
with open(ConfigFile, 'w') as fo:
fo.write ("""\
// Configuration header created by setup.py - do not edit
#ifndef _CONFIG_H
#define _CONFIG_H 1
#define PYCAIRO_VERSION_MAJOR %s
#define PYCAIRO_VERSION_MINOR %s
#define PYCAIRO_VERSION_MICRO %s
#define VERSION "%s"
#endif // _CONFIG_H
""" % (v[0], v[1], v[2], pycairo_version)
)
if sys.version_info < python_version_required:
raise SystemExit('Error: Python >= %s is required' %
(python_version_required,))
pkg_config_version_check ('cairo', cairo_version_required)
if sys.platform == 'win32':
runtime_library_dirs = []
else:
runtime_library_dirs = pkg_config_parse('--libs-only-L', 'cairo')
def create_module_constants_file() :
"generates C source that wraps all the repetitive CAIRO_HAS_xxx constants."
out = open(module_constants_file, "w")
out.write(" /* constants */\n")
for \
name \
in \
(
"ATSUI_FONT",
"FT_FONT",
"FC_FONT",
"GLITZ_SURFACE",
"IMAGE_SURFACE",
"PDF_SURFACE",
"PNG_FUNCTIONS",
"PS_SURFACE",
"RECORDING_SURFACE",
"SVG_SURFACE",
"USER_FONT",
"QUARTZ_SURFACE",
"WIN32_FONT",
"WIN32_SURFACE",
"XCB_SURFACE",
"XLIB_SURFACE",
) \
:
out.write \
(
"#if CAIRO_HAS_%(name)s\n"
" PyModule_AddIntConstant(m, \"HAS_%(name)s\", 1);\n"
"#else\n"
" PyModule_AddIntConstant(m, \"HAS_%(name)s\", 0);\n"
"#endif\n"
%
{
"name" : name,
}
)
#end for
out.flush()
#end create_module_constants_file
cairo = dic.Extension(
name = 'cairo._cairo',
sources = ['src/cairomodule.c',
'src/context.c',
'src/font.c',
'src/matrix.c',
'src/path.c',
'src/pattern.c',
'src/region.c',
'src/surface.c',
],
include_dirs = pkg_config_parse('--cflags-only-I', 'cairo'),
library_dirs = pkg_config_parse('--libs-only-L', 'cairo'),
libraries = pkg_config_parse('--libs-only-l', 'cairo'),
runtime_library_dirs = runtime_library_dirs,
)
dic.setup \
(
cmdclass =
{
"build" : my_build,
"clean" : my_clean,
},
name = "pycairo",
version = pycairo_version,
description = "python interface for cairo",
ext_modules = [cairo],
package_dir = {"cairo" : "src"},
packages = ["cairo"],
data_files =
[
('include/pycairo', ['src/py3cairo.h']),
('lib/pkgconfig', [pkgconfig_file]),
],
)
|
ldo/pycairo
|
setup.py
|
Python
|
gpl-3.0
| 6,235
|
# ------------------------------------------------------------------------------
# This file is part of Appy, a framework for building applications in the Python
# language. Copyright (C) 2007 Gaetan Delannay
# Appy is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
# Appy is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# Appy. If not, see <http://www.gnu.org/licenses/>.
# ------------------------------------------------------------------------------
from appy.fields import Field
from appy.px import Px
# ------------------------------------------------------------------------------
class Integer(Field):
pxView = pxCell = Px('''
<x><x>:value</x>
<input type="hidden" if="masterCss"
class=":masterCss" value=":value" name=":name" id=":name"/>
</x>''')
pxEdit = Px('''
<input id=":name" name=":name" size=":field.width"
maxlength=":field.maxChars"
value=":inRequest and requestValue or value" type="text"/>''')
pxSearch = Px('''
<!-- From -->
<x var="fromName='%s*int' % widgetName">
<label lfor=":fromName">:_('search_from')</label>
<input type="text" name=":fromName" maxlength=":field.maxChars"
value=":field.sdefault[0]" size=":field.swidth"/>
</x>
<!-- To -->
<x var="toName='%s_to' % name">
<label lfor=":toName">:_('search_to')</label>
<input type="text" name=":toName" maxlength=":field.maxChars"
value=":field.sdefault[1]" size=":field.swidth"/>
</x><br/>''')
def __init__(self, validator=None, multiplicity=(0,1), default=None,
show=True, page='main', group=None, layouts=None, move=0,
indexed=False, searchable=False, specificReadPermission=False,
specificWritePermission=False, width=5, height=None,
maxChars=13, colspan=1, master=None, masterValue=None,
focus=False, historized=False, mapping=None, label=None,
sdefault=('',''), scolspan=1, swidth=None, sheight=None,
persist=True):
Field.__init__(self, validator, multiplicity, default, show, page,
group, layouts, move, indexed, searchable,
specificReadPermission, specificWritePermission, width,
height, maxChars, colspan, master, masterValue, focus,
historized, mapping, label, sdefault, scolspan, swidth,
sheight, persist)
self.pythonType = int
def validateValue(self, obj, value):
try:
value = self.pythonType(value)
except ValueError:
return obj.translate('bad_%s' % self.pythonType.__name__)
def getStorableValue(self, value):
if not self.isEmptyValue(value): return self.pythonType(value)
def getFormattedValue(self, obj, value, showChanges=False):
if self.isEmptyValue(value): return ''
return str(value)
# ------------------------------------------------------------------------------
|
Eveler/libs
|
__Python__/ufms_blanks/appy3/fields/integer.py
|
Python
|
gpl-3.0
| 3,493
|
# Copyright 2013 Thierry Carrez <thierry@openstack.org>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators.http import require_POST
from storyboard.projects.models import Project
from storyboard.projects import utils
from storyboard.stories.models import Task
def default_list(request):
return render(request, "projects.list.html", {
'projects': Project.objects.all(),
})
def dashboard(request, projectname, group=False):
ref, projects = utils.retrieve_projects(projectname, group)
bugcount = Task.objects.filter(project__in=projects,
story__is_bug=True,
story__priority=0).count()
if group:
return render(request, "projects.group.html", {
'ref': ref,
'is_group': group,
'bugtriagecount': bugcount,
})
return render(request, "projects.dashboard.html", {
'ref': ref,
'is_group': group,
'bugtriagecount': bugcount,
})
def list_featuretasks(request, projectname, group=False):
page_type = "featuretasks"
ref, projects = utils.retrieve_projects(projectname, group)
bugcount = Task.objects.filter(project__in=projects,
story__is_bug=True,
story__priority=0).count()
featuretasks = Task.objects.filter(project__in=projects,
story__is_bug=False,
status__in=['T', 'R'])
featuretasks = utils.order_results(request, page_type, featuretasks)
p_size, p_count, p_number = utils.get_pagination(request,
len(featuretasks))
if p_size != -1:
featuretasks = featuretasks[p_number * p_size: (p_number + 1) * p_size]
arrow_object = utils.build_order_arrows_object(request, page_type)
return render(request, "projects.list_tasks.html", {
'title': "Active feature tasks",
'page_type': page_type,
'ref': ref,
'is_group': group,
'name': projectname,
'bugtriagecount': bugcount,
'tasks': featuretasks,
'page_count': p_count,
'page_number': p_number,
'page_size': p_size,
'arrow_object': arrow_object,
'is_bug': False
})
def list_bugtasks(request, projectname, group=False):
page_type = "bugtasks"
ref, projects = utils.retrieve_projects(projectname, group)
bugcount = Task.objects.filter(project__in=projects,
story__is_bug=True,
story__priority=0).count()
bugtasks = Task.objects.filter(project__in=projects,
story__is_bug=True,
status__in=['T', 'R'])
bugtasks = utils.order_results(request, page_type, bugtasks)
p_size, p_count, p_number = utils.get_pagination(request, len(bugtasks))
if p_size != -1:
bugtasks = bugtasks[p_number * p_size: (p_number + 1) * p_size]
arrow_object = utils.build_order_arrows_object(request, page_type)
return render(request, "projects.list_tasks.html", {
'title': "Active bug tasks",
'page_type': page_type,
'ref': ref,
'is_group': group,
'bugtriagecount': bugcount,
'tasks': bugtasks,
'page_count': p_count,
'page_number': p_number,
'page_size': p_size,
'arrow_object': arrow_object,
'is_bug': True,
})
def list_bugtriage(request, projectname, group=False):
page_type = "bugtriage"
ref, projects = utils.retrieve_projects(projectname, group)
tasks = Task.objects.filter(project__in=projects,
story__is_bug=True,
story__priority=0)
tasks = utils.order_results(request, page_type, tasks)
bugcount = tasks.count()
p_size, p_count, p_number = utils.get_pagination(request, len(tasks))
if p_size != -1:
tasks = tasks[p_number * p_size: (p_number + 1) * p_size]
arrow_object = utils.build_order_arrows_object(request, page_type)
return render(request, "projects.list_tasks.html", {
'title': "Bugs needing triage",
'page_type': page_type,
'ref': ref,
'is_group': group,
'bugtriagecount': bugcount,
'tasks': tasks,
'page_count': p_count,
'page_number': p_number,
'page_size': p_size,
'arrow_object': arrow_object,
'is_bug': True,
})
@require_POST
def set_order(request):
order_dict = request.session.get("order_dict", dict())
page_type = request.POST.get("page_type")
order_field = request.POST.get("order_field")
# multi_filed ordering will be implemented later with search requests
multi_field = request.POST.get("is_multi_field")
if not order_field:
raise RuntimeError("order_field is not set")
if page_type not in order_dict:
order_dict[page_type] = utils.build_default_order_dict()
order_type = order_dict[page_type].get(order_field)
if not multi_field:
order_dict[page_type] = OrderedDict()
if not order_type:
order_type = "desc"
else:
order_type = "asc" if order_type == "desc" else "desc"
order_dict[page_type][order_field] = order_type
# Save dict to session if it was recently created
request.session["order_dict"] = order_dict
return HttpResponse(status=202)
|
Konovalov-Nik/storyboard
|
storyboard/projects/views.py
|
Python
|
apache-2.0
| 6,194
|
"""
Common interface(s) to Attelo classifiers.
"""
from abc import ABCMeta, abstractmethod
from six import with_metaclass
class AttachClassifier(with_metaclass(ABCMeta, object)):
'''
An attachment classfier associates samples with attachment
probabilities. Attachment classifiers are agnostic to
whether their samples correspond to directed or undirected
edges (the only difference as far as the classifiers are
concerned may be that there are a little over half as many
edges in the undirected case)
Attributes
----------
can_predict_proba: bool
True if scores should be interpreted as probabilities
'''
@abstractmethod
def fit(self, dpacks, targets):
"""
Learns a classifier from a collection of datapacks
Parameters
----------
dpacks: [DataPack]
one datapack per document, each with its own set of
features (we expect multiple datapacks because this
allows for a notion of structured learning that
somehow exploits the grouping of samples)
targets: [[int]]
For each datapack, a list of ternary values encoded
as ints (-1: not attached, 0: unknown, 1: attached).
Each list must have the same number items as there
are samples in its its datapack counterpart.
Returns
-------
self: object
"""
raise NotImplementedError
@abstractmethod
def predict_score(self, dpack):
"""
Parameters
----------
dpack: DataPack
A single document for which we would like to predict
labels
Returns
-------
scores: array(float)
An array (one score per sample)
"""
return NotImplementedError
class LabelClassifier(with_metaclass(ABCMeta, object)):
'''
A label classifier associates labels with scores.
Decisions are returned as a (sample x labels) array
with a score
Attributes
----------
can_predict_proba: bool
True if scores should be interpreted as probabilities
'''
@abstractmethod
def fit(self, dpacks, targets):
"""
Learns a classifier and the labels attribute from a
multipack of documents
Parameters
----------
dpacks: [DataPack]
A list of documents
targets: [[int]]
For each datapack, a list of label numbers, one per
sample. All datapacks are expected to use the same
label numbering scheme. Use `DataPack.get_label`
to recover the string values.
Each list must have the same number items as there
are samples in its its datapack counterpart.
Returns
-------
self: object
"""
raise NotImplementedError
@abstractmethod
def predict_score(self, dpack):
"""
Parameters
----------
dpack: DataPack
A single document for which we would like to predict
labels
Returns
-------
weights: array(float)
A 2D array (sample x label) associating each label with
a score. Mind your array dimensions.
"""
return NotImplementedError
|
kowey/attelo
|
attelo/learning/interface.py
|
Python
|
gpl-3.0
| 3,346
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'ar'
import json
import keras
from keras.layers import Convolution1D, Convolution2D, Convolution3D
from keras.layers import MaxPooling1D, MaxPooling2D, MaxPooling3D,\
AveragePooling1D, AveragePooling2D, AveragePooling3D
from keras.layers import UpSampling1D, UpSampling2D, UpSampling3D
from keras.layers import InputLayer, Activation, Flatten, Dense, Dropout
####################################
dictNonlinFunJson2Keras = {
"SoftPlus" : "softplus",
"SoftSign" : "softsign",
"ReLU" : "relu",
"Tanh" : "tanh",
"Sigmoid" : "sigmoid",
"Hard Sigmoid" : "hard_sigmoid",
"Linear" : "linear"
}
def nonlinFunJson2Keras(strJson):
# if strJson in dictNonlinFunJson2Keras.keys():
# return dictNonlinFunJson2Keras[strJson]
# return 'relu'
#FIXME: currently we belive, that str-name of fucntion is a Keras-available name (see: https://keras.io/activations/)
return strJson
####################################
def buildLayerConvolution1D(cfgNode):
tcfg=cfgNode.jsonParams
# assert (cfgNode.jsonCfg['layerType']=='convolution1d')
numberFilters = int(tcfg['filtersCount']) if tcfg['filtersCount'] else 1
filterSizeX = int(tcfg['filterWidth']) if tcfg['filterWidth'] else 1
strNonLinFunc = tcfg['activationFunction']
isTrainable = tcfg['isTrainable'] if tcfg['isTrainable'] else True
paramStride = (1, 1)
strBorderMode = 'same'
tmpLayer = Convolution1D(numberFilters, filterSizeX,
subsample=paramStride,
border_mode=strBorderMode,
activation=nonlinFunJson2Keras(strNonLinFunc))
tmpLayer.trainable = isTrainable
return tmpLayer
def buildLayerConvolution2D(cfgNode):
tcfg=cfgNode.jsonParams
# assert (cfgNode.jsonCfg['layerType']=='convolution2d')
numberFilters = int(tcfg['filtersCount']) if tcfg['filtersCount'] else 1
# FIXME: add stride to layer config in Builder
paramStride = (1, 1)
filterSizeX = int(tcfg['filterWidth']) if tcfg['filterWidth'] else 1
filterSizeY = int(tcfg['filterHeight']) if tcfg['filterHeight'] else 1
strNonLinFunc = tcfg['activationFunction']
isTrainable = tcfg['isTrainable'] if tcfg['isTrainable'] else True
# FIXME: parameter selection currently not implemented in WEB-UI !!!
strBorderMode = 'same'
tmpLayer = Convolution2D(numberFilters, filterSizeX, filterSizeY,
subsample=paramStride,
border_mode=strBorderMode,
activation=nonlinFunJson2Keras(strNonLinFunc))
tmpLayer.trainable = isTrainable
return tmpLayer
def buildLayerConvolution3D(cfgNode):
tcfg=cfgNode.jsonParams
# assert (cfgNode.jsonCfg['layerType']=='convolution3d')
numberFilters = int(tcfg['filtersCount']) if tcfg['filtersCount'] else 1
filterSizeX = int(tcfg['filterWidth']) if tcfg['filterWidth'] else 1
filterSizeY = int(tcfg['filterHeight']) if tcfg['filterHeight'] else 1
filterSizeZ = int(tcfg['filterDepth']) if tcfg['filterDepth'] else 1
strNonLinFunc = tcfg['activationFunction']
isTrainable = tcfg['isTrainable'] if tcfg['isTrainable'] else True
# FIXME: parameter selection currently not implemented in WEB-UI !!!
paramStride = (1, 1)
strBorderMode = 'same'
tmpLayer = Convolution3D(numberFilters, filterSizeX, filterSizeY, filterSizeZ,
subsample=paramStride,
border_mode=strBorderMode,
activation=nonlinFunJson2Keras(strNonLinFunc))
tmpLayer.trainable = isTrainable
return tmpLayer
def buildLayerPooling1D(cfgNode):
tcfg = cfgNode.jsonParams
# assert ( cfgNode.jsonCfg['layerType'] == 'pooling1d' )
subsamplingSizeWidth = tcfg['subsamplingSizeWidth']
subsamplingType = tcfg['subsamplingType']
#FIXME: parameter selection currently not implemented in WEB-UI !!!
parStrides = None
parBorderMode = 'valid'
if subsamplingType=='max_pooling':
tmpLayer = MaxPooling1D(pool_length=subsamplingSizeWidth,
strides=parStrides,
border_mode=parBorderMode)
else:
tmpLayer = AveragePooling1D(pool_length=subsamplingSizeWidth,
strides=parStrides,
border_mode=parBorderMode)
return tmpLayer
def buildLayerPooling2D(cfgNode):
tcfg = cfgNode.jsonParams
# assert ( cfgNode.jsonCfg['layerType'] == 'pooling2d' )
subsamplingSizeWidth = tcfg['subsamplingSizeWidth']
subsamplingSizeHeight = tcfg['subsamplingSizeHeight']
subsamplingType = tcfg['subsamplingType']
parPoolSize = (subsamplingSizeWidth, subsamplingSizeHeight)
#FIXME: parameter selection currently not implemented in WEB-UI !!!
parStrides = None
parBorderMode = 'valid'
if subsamplingType=='max_pooling':
tmpLayer = MaxPooling2D(pool_size=parPoolSize,
strides=parStrides,
border_mode=parBorderMode)
else:
tmpLayer = AveragePooling2D(pool_size=parPoolSize,
strides=parStrides,
border_mode=parBorderMode)
return tmpLayer
def buildLayerPooling3D(cfgNode):
# assert ( cfgNode.jsonCfg['layerType'] == 'pooling3d' )
tcfg = cfgNode.jsonParams
subsamplingSizeWidth = tcfg['subsamplingSizeWidth']
subsamplingSizeHeight = tcfg['subsamplingSizeHeight']
subsamplingSizeDepth = tcfg['subsamplingSizeDepth']
subsamplingType = tcfg['subsamplingType']
parPoolSize = (subsamplingSizeWidth, subsamplingSizeHeight, subsamplingSizeDepth)
#FIXME: parameter selection currently not implemented in WEB-UI !!!
parStrides = None
parBorderMode = 'valid'
if subsamplingType=='max_pooling':
tmpLayer = MaxPooling3D(pool_size=parPoolSize,
strides=parStrides,
border_mode=parBorderMode)
else:
tmpLayer = AveragePooling3D(pool_size=parPoolSize,
strides=parStrides,
border_mode=parBorderMode)
return tmpLayer
def buildLayerActivation(cfgNode):
# assert (cfgNode.jsonCfg['layerType'] == 'activation')
tcfg = cfgNode.jsonParams
activationFunction = tcfg['activationFunction']
activationFunction = nonlinFunJson2Keras(activationFunction)
tmpLayer = Activation(activation=activationFunction)
return tmpLayer
def buildLayerFlatten(cfgNode):
# assert (cfgNode.jsonCfg['layerType'] == 'flatten')
return Flatten()
def buildLayerDense(cfgNode):
# assert (cfgNode.jsonCfg['layerType'] == 'dense')
tcfg = cfgNode.jsonParams
neuronsCount = tcfg['neuronsCount']
activationFunction = tcfg['activationFunction']
activationFunction = nonlinFunJson2Keras(activationFunction)
isTrainable = tcfg['isTrainable']
tmpLayer = Dense(output_dim=neuronsCount,
activation=activationFunction)
tmpLayer.trainable = isTrainable
return tmpLayer
if __name__ == '__main__':
pass
|
SummaLabs/DLS
|
app/backend/core/models/flow_parser_v2_helper_bk.py
|
Python
|
mit
| 7,298
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2008 Vodafone España, S.A.
# Copyright (C) 2008-2009 Warp Networks, S.L.
# Author: Pablo Martí
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Unittests for the encoding module"""
from twisted.trial import unittest
from wader.common.encoding import (CONTROL_0, CONTROL_1, LATIN_EX_A,
LATIN_EX_B, check_if_ucs2,
pack_ucs2_bytes, unpack_ucs2_bytes,
unpack_ucs2_bytes_in_ts31101_80,
unpack_ucs2_bytes_in_ts31101_81,
unpack_ucs2_bytes_in_ts31101_82)
CTL_0 = '007F'
CTL_1 = '00FF'
LTN_A = '017F'
LTN_B = '024F'
class TestEncoding(unittest.TestCase):
"""Tests for encoding"""
def test_check_if_ucs2(self):
self.assertEqual(check_if_ucs2(CTL_0), True)
self.assertEqual(check_if_ucs2(CTL_1), True)
self.assertEqual(check_if_ucs2(LTN_A), True)
self.assertEqual(check_if_ucs2(LTN_B), True)
self.assertEqual(check_if_ucs2('6C34'), True)
self.assertEqual(
check_if_ucs2('0056006F006400610066006F006E0065'), True)
self.assertEqual(check_if_ucs2('003'), False)
# XXX: This should be invalid but our code fails at the moment
# XXX: Disable as people expect tests to run cleanly unless something
# just broke, and there's no way to express 'known failure'
#self.assertEqual(check_if_ucs2('D834DD1E'), False)
def test_check_if_ucs2_limit_control_0(self):
self.assertEqual(check_if_ucs2(CTL_0, limit=CONTROL_0), True)
self.assertEqual(check_if_ucs2(CTL_1, limit=CONTROL_0), False)
self.assertEqual(check_if_ucs2(LTN_A, limit=CONTROL_0), False)
self.assertEqual(check_if_ucs2(LTN_B, limit=CONTROL_0), False)
self.assertEqual(check_if_ucs2('6C34', limit=CONTROL_0), False)
self.assertEqual(
check_if_ucs2(CTL_0 + CTL_0 + CTL_0, limit=CONTROL_0), True)
self.assertEqual(
check_if_ucs2('6C34' + CTL_0 + CTL_0, limit=CONTROL_0), False)
self.assertEqual(
check_if_ucs2(CTL_0 + '6C34' + CTL_0, limit=CONTROL_0), False)
self.assertEqual(
check_if_ucs2(CTL_0 + CTL_0 + '6C34', limit=CONTROL_0), False)
def test_check_if_ucs2_limit_control_1(self):
self.assertEqual(check_if_ucs2(CTL_0, limit=CONTROL_1), True)
self.assertEqual(check_if_ucs2(CTL_1, limit=CONTROL_1), True)
self.assertEqual(check_if_ucs2(LTN_A, limit=CONTROL_1), False)
self.assertEqual(check_if_ucs2(LTN_B, limit=CONTROL_1), False)
self.assertEqual(check_if_ucs2('6C34', limit=CONTROL_1), False)
self.assertEqual(
check_if_ucs2(CTL_1 + CTL_1 + CTL_1, limit=CONTROL_1), True)
self.assertEqual(
check_if_ucs2('6C34' + CTL_1 + CTL_1, limit=CONTROL_1), False)
self.assertEqual(
check_if_ucs2(CTL_1 + '6C34' + CTL_1, limit=CONTROL_1), False)
self.assertEqual(
check_if_ucs2(CTL_1 + CTL_1 + '6C34', limit=CONTROL_1), False)
def test_check_if_ucs2_limit_extended_latin_a(self):
self.assertEqual(check_if_ucs2(CTL_0, limit=LATIN_EX_A), True)
self.assertEqual(check_if_ucs2(CTL_1, limit=LATIN_EX_A), True)
self.assertEqual(check_if_ucs2(LTN_A, limit=LATIN_EX_A), True)
self.assertEqual(check_if_ucs2(LTN_B, limit=LATIN_EX_A), False)
self.assertEqual(check_if_ucs2('6C34', limit=LATIN_EX_A), False)
self.assertEqual(
check_if_ucs2(LTN_A + LTN_A + LTN_A, limit=LATIN_EX_A), True)
self.assertEqual(
check_if_ucs2('6C34' + LTN_A + LTN_A, limit=LATIN_EX_A), False)
self.assertEqual(
check_if_ucs2(LTN_A + '6C34' + LTN_A, limit=LATIN_EX_A), False)
self.assertEqual(
check_if_ucs2(LTN_A + LTN_A + '6C34', limit=LATIN_EX_A), False)
def test_check_if_ucs2_limit_extended_latin_b(self):
self.assertEqual(check_if_ucs2(CTL_0, limit=LATIN_EX_B), True)
self.assertEqual(check_if_ucs2(CTL_1, limit=LATIN_EX_B), True)
self.assertEqual(check_if_ucs2(LTN_A, limit=LATIN_EX_B), True)
self.assertEqual(check_if_ucs2(LTN_B, limit=LATIN_EX_B), True)
self.assertEqual(check_if_ucs2('6C34', limit=LATIN_EX_B), False)
self.assertEqual(
check_if_ucs2(LTN_B + LTN_B + LTN_B, limit=LATIN_EX_B), True)
self.assertEqual(
check_if_ucs2('6C34' + LTN_B + LTN_B, limit=LATIN_EX_B), False)
self.assertEqual(
check_if_ucs2(LTN_B + '6C34' + LTN_B, limit=LATIN_EX_B), False)
self.assertEqual(
check_if_ucs2(LTN_B + LTN_B + '6C34', limit=LATIN_EX_B), False)
def test_pack_ucs2_bytes(self):
# 07911356131313F311000A9260214365870008AA080068006F006C0061
self.assertEqual(pack_ucs2_bytes('hola'), '0068006F006C0061')
# 07911356131313F311000A9260214365870008AA0A0068006F006C00610073
self.assertEqual(pack_ucs2_bytes('holas'), '0068006F006C00610073')
self.assertEqual(pack_ucs2_bytes(u"中华人民共和国"),
'4E2D534E4EBA6C115171548C56FD')
def test_unpack_ucs2_bytes(self):
self.assertEqual(unpack_ucs2_bytes('0068006F006C0061'), 'hola')
resp = 'holas'
self.assertEqual(unpack_ucs2_bytes('0068006F006C00610073'), resp)
def test_unpack_ucs2_bytes_in_ts31101_80(self):
# From Huawei example
self.assertEqual(
unpack_ucs2_bytes_in_ts31101_80('534E4E3A'), u'华为')
def test_unpack_ucs2_bytes_in_ts31101_81(self):
# From our original Huawei contacts code
self.assertEqual(
unpack_ucs2_bytes_in_ts31101_81('0602A46563746F72FF'), u'Ĥector')
# From Android code
self.assertEqual(
unpack_ucs2_bytes_in_ts31101_81('0A01566FEC6365204DE0696CFFFFFF'),
u'Vo\u00ECce M\u00E0il')
# From TS102221
# Byte 4 indicates GSM Default Alphabet character '53', i.e. 'S'.
# Byte 5 indicates a UCS2 character offset to the base pointer of '15',
# expressed in binary as follows 001 0101, which, when added
# to the base pointer value results in a sixteen bit value of
# 0000 1001 1001 0101, i.e. '0995', which is the Bengali
# letter KA.
# Byte 6 / 7 were not defined in TS102221 example, so just repeated 5
# Byte 8 contains the value 'FF', but as the string length is 5, this
# is a valid character in the string, where the bit pattern
# 111 1111 is added to the base pointer, yielding a sixteen
# bit value of 0000 1001 1111 1111 for the UCS2 character
# (i.e. '09FF').
self.assertEqual(
unpack_ucs2_bytes_in_ts31101_81('051353959595FFFF'), u'Sককক\u09FF')
def test_unpack_ucs2_bytes_in_ts31101_82(self):
# From TS102221
self.assertEqual(
unpack_ucs2_bytes_in_ts31101_82('0505302D82D32D31'), u'-Բփ-1')
|
andrewbird/wader
|
test/test_encoding.py
|
Python
|
gpl-2.0
| 7,859
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""code generator for GLES2 command buffers."""
import itertools
import os
import os.path
import sys
import re
import platform
from optparse import OptionParser
from subprocess import call
_SIZE_OF_UINT32 = 4
_SIZE_OF_COMMAND_HEADER = 4
_FIRST_SPECIFIC_COMMAND_ID = 256
_LICENSE = """// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"""
_DO_NOT_EDIT_WARNING = """// This file is auto-generated from
// gpu/command_buffer/build_gles2_cmd_buffer.py
// It's formatted by clang-format using chromium coding style:
// clang-format -i -style=chromium filename
// DO NOT EDIT!
"""
# This string is copied directly out of the gl2.h file from GLES2.0
#
# Edits:
#
# *) Any argument that is a resourceID has been changed to GLid<Type>.
# (not pointer arguments) and if it's allowed to be zero it's GLidZero<Type>
# If it's allowed to not exist it's GLidBind<Type>
#
# *) All GLenums have been changed to GLenumTypeOfEnum
#
_GL_TYPES = {
'GLenum': 'unsigned int',
'GLboolean': 'unsigned char',
'GLbitfield': 'unsigned int',
'GLbyte': 'signed char',
'GLshort': 'short',
'GLint': 'int',
'GLsizei': 'int',
'GLubyte': 'unsigned char',
'GLushort': 'unsigned short',
'GLuint': 'unsigned int',
'GLfloat': 'float',
'GLclampf': 'float',
'GLvoid': 'void',
'GLfixed': 'int',
'GLclampx': 'int'
}
_GL_TYPES_32 = {
'GLintptr': 'long int',
'GLsizeiptr': 'long int'
}
_GL_TYPES_64 = {
'GLintptr': 'long long int',
'GLsizeiptr': 'long long int'
}
# Capabilites selected with glEnable
_CAPABILITY_FLAGS = [
{'name': 'blend'},
{'name': 'cull_face'},
{'name': 'depth_test', 'state_flag': 'framebuffer_state_.clear_state_dirty'},
{'name': 'dither', 'default': True},
{'name': 'polygon_offset_fill'},
{'name': 'sample_alpha_to_coverage'},
{'name': 'sample_coverage'},
{'name': 'scissor_test'},
{'name': 'stencil_test',
'state_flag': 'framebuffer_state_.clear_state_dirty'},
{'name': 'rasterizer_discard', 'es3': True},
{'name': 'primitive_restart_fixed_index', 'es3': True},
]
_STATES = {
'ClearColor': {
'type': 'Normal',
'func': 'ClearColor',
'enum': 'GL_COLOR_CLEAR_VALUE',
'states': [
{'name': 'color_clear_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'ClearDepthf': {
'type': 'Normal',
'func': 'ClearDepth',
'enum': 'GL_DEPTH_CLEAR_VALUE',
'states': [
{'name': 'depth_clear', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'ColorMask': {
'type': 'Normal',
'func': 'ColorMask',
'enum': 'GL_COLOR_WRITEMASK',
'states': [
{
'name': 'color_mask_red',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_green',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_blue',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_alpha',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'state_flag': 'framebuffer_state_.clear_state_dirty',
},
'ClearStencil': {
'type': 'Normal',
'func': 'ClearStencil',
'enum': 'GL_STENCIL_CLEAR_VALUE',
'states': [
{'name': 'stencil_clear', 'type': 'GLint', 'default': '0'},
],
},
'BlendColor': {
'type': 'Normal',
'func': 'BlendColor',
'enum': 'GL_BLEND_COLOR',
'states': [
{'name': 'blend_color_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'BlendEquation': {
'type': 'SrcDst',
'func': 'BlendEquationSeparate',
'states': [
{
'name': 'blend_equation_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_RGB',
'default': 'GL_FUNC_ADD',
},
{
'name': 'blend_equation_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_ALPHA',
'default': 'GL_FUNC_ADD',
},
],
},
'BlendFunc': {
'type': 'SrcDst',
'func': 'BlendFuncSeparate',
'states': [
{
'name': 'blend_source_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_RGB',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_RGB',
'default': 'GL_ZERO',
},
{
'name': 'blend_source_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_ALPHA',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_ALPHA',
'default': 'GL_ZERO',
},
],
},
'PolygonOffset': {
'type': 'Normal',
'func': 'PolygonOffset',
'states': [
{
'name': 'polygon_offset_factor',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_FACTOR',
'default': '0.0f',
},
{
'name': 'polygon_offset_units',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_UNITS',
'default': '0.0f',
},
],
},
'CullFace': {
'type': 'Normal',
'func': 'CullFace',
'enum': 'GL_CULL_FACE_MODE',
'states': [
{
'name': 'cull_mode',
'type': 'GLenum',
'default': 'GL_BACK',
},
],
},
'FrontFace': {
'type': 'Normal',
'func': 'FrontFace',
'enum': 'GL_FRONT_FACE',
'states': [{'name': 'front_face', 'type': 'GLenum', 'default': 'GL_CCW'}],
},
'DepthFunc': {
'type': 'Normal',
'func': 'DepthFunc',
'enum': 'GL_DEPTH_FUNC',
'states': [{'name': 'depth_func', 'type': 'GLenum', 'default': 'GL_LESS'}],
},
'DepthRange': {
'type': 'Normal',
'func': 'DepthRange',
'enum': 'GL_DEPTH_RANGE',
'states': [
{'name': 'z_near', 'type': 'GLclampf', 'default': '0.0f'},
{'name': 'z_far', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'SampleCoverage': {
'type': 'Normal',
'func': 'SampleCoverage',
'states': [
{
'name': 'sample_coverage_value',
'type': 'GLclampf',
'enum': 'GL_SAMPLE_COVERAGE_VALUE',
'default': '1.0f',
},
{
'name': 'sample_coverage_invert',
'type': 'GLboolean',
'enum': 'GL_SAMPLE_COVERAGE_INVERT',
'default': 'false',
},
],
},
'StencilMask': {
'type': 'FrontBack',
'func': 'StencilMaskSeparate',
'state_flag': 'framebuffer_state_.clear_state_dirty',
'states': [
{
'name': 'stencil_front_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
{
'name': 'stencil_back_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
],
},
'StencilOp': {
'type': 'FrontBack',
'func': 'StencilOpSeparate',
'states': [
{
'name': 'stencil_front_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
],
},
'StencilFunc': {
'type': 'FrontBack',
'func': 'StencilFuncSeparate',
'states': [
{
'name': 'stencil_front_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_front_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_REF',
'default': '0',
},
{
'name': 'stencil_front_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_back_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_BACK_REF',
'default': '0',
},
{
'name': 'stencil_back_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
],
},
'Hint': {
'type': 'NamedParameter',
'func': 'Hint',
'states': [
{
'name': 'hint_generate_mipmap',
'type': 'GLenum',
'enum': 'GL_GENERATE_MIPMAP_HINT',
'default': 'GL_DONT_CARE',
'gl_version_flag': '!is_desktop_core_profile'
},
{
'name': 'hint_fragment_shader_derivative',
'type': 'GLenum',
'enum': 'GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES',
'default': 'GL_DONT_CARE',
'extension_flag': 'oes_standard_derivatives'
}
],
},
'PixelStore': {
'type': 'NamedParameter',
'func': 'PixelStorei',
'states': [
{
'name': 'pack_alignment',
'type': 'GLint',
'enum': 'GL_PACK_ALIGNMENT',
'default': '4'
},
{
'name': 'unpack_alignment',
'type': 'GLint',
'enum': 'GL_UNPACK_ALIGNMENT',
'default': '4'
}
],
},
# TODO: Consider implemenenting these states
# GL_ACTIVE_TEXTURE
'LineWidth': {
'type': 'Normal',
'func': 'LineWidth',
'enum': 'GL_LINE_WIDTH',
'states': [
{
'name': 'line_width',
'type': 'GLfloat',
'default': '1.0f',
'range_checks': [{'check': "<= 0.0f", 'test_value': "0.0f"}],
'nan_check': True,
}],
},
'DepthMask': {
'type': 'Normal',
'func': 'DepthMask',
'enum': 'GL_DEPTH_WRITEMASK',
'states': [
{
'name': 'depth_mask',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'state_flag': 'framebuffer_state_.clear_state_dirty',
},
'Scissor': {
'type': 'Normal',
'func': 'Scissor',
'enum': 'GL_SCISSOR_BOX',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'scissor_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'scissor_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'scissor_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'scissor_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
'Viewport': {
'type': 'Normal',
'func': 'Viewport',
'enum': 'GL_VIEWPORT',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'viewport_x',
'type': 'GLint',
'default': '0',
'expected': 'kViewportX',
},
{
'name': 'viewport_y',
'type': 'GLint',
'default': '0',
'expected': 'kViewportY',
},
{
'name': 'viewport_width',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportWidth',
},
{
'name': 'viewport_height',
'type': 'GLsizei',
'default': '1',
'expected': 'kViewportHeight',
},
],
},
'MatrixValuesCHROMIUM': {
'type': 'NamedParameter',
'func': 'MatrixLoadfEXT',
'states': [
{ 'enum': 'GL_PATH_MODELVIEW_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_MODELVIEW_CHROMIUM',
'name': 'modelview_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
{ 'enum': 'GL_PATH_PROJECTION_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_PROJECTION_CHROMIUM',
'name': 'projection_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
],
},
}
# Named type info object represents a named type that is used in OpenGL call
# arguments. Each named type defines a set of valid OpenGL call arguments. The
# named types are used in 'cmd_buffer_functions.txt'.
# type: The actual GL type of the named type.
# valid: The list of values that are valid for both the client and the service.
# valid_es3: The list of values that are valid in OpenGL ES 3, but not ES 2.
# invalid: Examples of invalid values for the type. At least these values
# should be tested to be invalid.
# deprecated_es3: The list of values that are valid in OpenGL ES 2, but
# deprecated in ES 3.
# is_complete: The list of valid values of type are final and will not be
# modified during runtime.
_NAMED_TYPE_INFO = {
'BlitFilter': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
'invalid': [
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'FrameBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'valid_es3': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'InvalidateFrameBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER',
],
'invalid': [
'GL_DRAW_FRAMEBUFFER' ,
'GL_READ_FRAMEBUFFER' ,
],
},
'RenderBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER',
],
'invalid': [
'GL_FRAMEBUFFER',
],
},
'BufferTarget': {
'type': 'GLenum',
'valid': [
'GL_ARRAY_BUFFER',
'GL_ELEMENT_ARRAY_BUFFER',
],
'valid_es3': [
'GL_COPY_READ_BUFFER',
'GL_COPY_WRITE_BUFFER',
'GL_PIXEL_PACK_BUFFER',
'GL_PIXEL_UNPACK_BUFFER',
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'IndexedBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER',
'GL_UNIFORM_BUFFER',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'MapBufferAccess': {
'type': 'GLenum',
'valid': [
'GL_MAP_READ_BIT',
'GL_MAP_WRITE_BIT',
'GL_MAP_INVALIDATE_RANGE_BIT',
'GL_MAP_INVALIDATE_BUFFER_BIT',
'GL_MAP_FLUSH_EXPLICIT_BIT',
'GL_MAP_UNSYNCHRONIZED_BIT',
],
'invalid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
],
},
'Bufferiv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
'GL_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferuiv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfv': {
'type': 'GLenum',
'valid': [
'GL_COLOR',
'GL_DEPTH',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'Bufferfi': {
'type': 'GLenum',
'valid': [
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_RENDERBUFFER',
],
},
'BufferUsage': {
'type': 'GLenum',
'valid': [
'GL_STREAM_DRAW',
'GL_STATIC_DRAW',
'GL_DYNAMIC_DRAW',
],
'valid_es3': [
'GL_STREAM_READ',
'GL_STREAM_COPY',
'GL_STATIC_READ',
'GL_STATIC_COPY',
'GL_DYNAMIC_READ',
'GL_DYNAMIC_COPY',
],
'invalid': [
'GL_NONE',
],
},
'CompressedTextureFormat': {
'type': 'GLenum',
'valid': [
],
'valid_es3': [
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
],
},
'GLState': {
'type': 'GLenum',
'valid': [
# NOTE: State an Capability entries added later.
'GL_ACTIVE_TEXTURE',
'GL_ALIASED_LINE_WIDTH_RANGE',
'GL_ALIASED_POINT_SIZE_RANGE',
'GL_ALPHA_BITS',
'GL_ARRAY_BUFFER_BINDING',
'GL_BLUE_BITS',
'GL_COMPRESSED_TEXTURE_FORMATS',
'GL_CURRENT_PROGRAM',
'GL_DEPTH_BITS',
'GL_DEPTH_RANGE',
'GL_ELEMENT_ARRAY_BUFFER_BINDING',
'GL_FRAMEBUFFER_BINDING',
'GL_GENERATE_MIPMAP_HINT',
'GL_GREEN_BITS',
'GL_IMPLEMENTATION_COLOR_READ_FORMAT',
'GL_IMPLEMENTATION_COLOR_READ_TYPE',
'GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS',
'GL_MAX_CUBE_MAP_TEXTURE_SIZE',
'GL_MAX_FRAGMENT_UNIFORM_VECTORS',
'GL_MAX_RENDERBUFFER_SIZE',
'GL_MAX_TEXTURE_IMAGE_UNITS',
'GL_MAX_TEXTURE_SIZE',
'GL_MAX_VARYING_VECTORS',
'GL_MAX_VERTEX_ATTRIBS',
'GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS',
'GL_MAX_VERTEX_UNIFORM_VECTORS',
'GL_MAX_VIEWPORT_DIMS',
'GL_NUM_COMPRESSED_TEXTURE_FORMATS',
'GL_NUM_SHADER_BINARY_FORMATS',
'GL_PACK_ALIGNMENT',
'GL_RED_BITS',
'GL_RENDERBUFFER_BINDING',
'GL_SAMPLE_BUFFERS',
'GL_SAMPLE_COVERAGE_INVERT',
'GL_SAMPLE_COVERAGE_VALUE',
'GL_SAMPLES',
'GL_SCISSOR_BOX',
'GL_SHADER_BINARY_FORMATS',
'GL_SHADER_COMPILER',
'GL_SUBPIXEL_BITS',
'GL_STENCIL_BITS',
'GL_TEXTURE_BINDING_2D',
'GL_TEXTURE_BINDING_CUBE_MAP',
'GL_UNPACK_ALIGNMENT',
'GL_BIND_GENERATES_RESOURCE_CHROMIUM',
# we can add this because we emulate it if the driver does not support it.
'GL_VERTEX_ARRAY_BINDING_OES',
'GL_VIEWPORT',
],
'valid_es3': [
'GL_COPY_READ_BUFFER_BINDING',
'GL_COPY_WRITE_BUFFER_BINDING',
'GL_DRAW_BUFFER0',
'GL_DRAW_BUFFER1',
'GL_DRAW_BUFFER2',
'GL_DRAW_BUFFER3',
'GL_DRAW_BUFFER4',
'GL_DRAW_BUFFER5',
'GL_DRAW_BUFFER6',
'GL_DRAW_BUFFER7',
'GL_DRAW_BUFFER8',
'GL_DRAW_BUFFER9',
'GL_DRAW_BUFFER10',
'GL_DRAW_BUFFER11',
'GL_DRAW_BUFFER12',
'GL_DRAW_BUFFER13',
'GL_DRAW_BUFFER14',
'GL_DRAW_BUFFER15',
'GL_DRAW_FRAMEBUFFER_BINDING',
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
'GL_MAJOR_VERSION',
'GL_MAX_3D_TEXTURE_SIZE',
'GL_MAX_ARRAY_TEXTURE_LAYERS',
'GL_MAX_COLOR_ATTACHMENTS',
'GL_MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_COMBINED_UNIFORM_BLOCKS',
'GL_MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS',
'GL_MAX_DRAW_BUFFERS',
'GL_MAX_ELEMENT_INDEX',
'GL_MAX_ELEMENTS_INDICES',
'GL_MAX_ELEMENTS_VERTICES',
'GL_MAX_FRAGMENT_INPUT_COMPONENTS',
'GL_MAX_FRAGMENT_UNIFORM_BLOCKS',
'GL_MAX_FRAGMENT_UNIFORM_COMPONENTS',
'GL_MAX_PROGRAM_TEXEL_OFFSET',
'GL_MAX_SAMPLES',
'GL_MAX_SERVER_WAIT_TIMEOUT',
'GL_MAX_TEXTURE_LOD_BIAS',
'GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS',
'GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS',
'GL_MAX_UNIFORM_BLOCK_SIZE',
'GL_MAX_UNIFORM_BUFFER_BINDINGS',
'GL_MAX_VARYING_COMPONENTS',
'GL_MAX_VERTEX_OUTPUT_COMPONENTS',
'GL_MAX_VERTEX_UNIFORM_BLOCKS',
'GL_MAX_VERTEX_UNIFORM_COMPONENTS',
'GL_MIN_PROGRAM_TEXEL_OFFSET',
'GL_MINOR_VERSION',
'GL_NUM_EXTENSIONS',
'GL_NUM_PROGRAM_BINARY_FORMATS',
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_PIXEL_PACK_BUFFER_BINDING',
'GL_PIXEL_UNPACK_BUFFER_BINDING',
'GL_PROGRAM_BINARY_FORMATS',
'GL_READ_BUFFER',
'GL_READ_FRAMEBUFFER_BINDING',
'GL_SAMPLER_BINDING',
'GL_TEXTURE_BINDING_2D_ARRAY',
'GL_TEXTURE_BINDING_3D',
'GL_TRANSFORM_FEEDBACK_BINDING',
'GL_TRANSFORM_FEEDBACK_ACTIVE',
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_PAUSED',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_OFFSET_ALIGNMENT',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_SKIP_IMAGES',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
# GL_VERTEX_ARRAY_BINDING is the same as GL_VERTEX_ARRAY_BINDING_OES
# 'GL_VERTEX_ARRAY_BINDING',
],
'invalid': [
'GL_FOG_HINT',
],
},
'IndexedGLState': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK_BUFFER_BINDING',
'GL_TRANSFORM_FEEDBACK_BUFFER_SIZE',
'GL_TRANSFORM_FEEDBACK_BUFFER_START',
'GL_UNIFORM_BUFFER_BINDING',
'GL_UNIFORM_BUFFER_SIZE',
'GL_UNIFORM_BUFFER_START',
],
'invalid': [
'GL_FOG_HINT',
],
},
'GetTexParamTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_2D_ARRAY',
'GL_TEXTURE_3D',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'TextureTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP_POSITIVE_X',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_X',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Y',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Y',
'GL_TEXTURE_CUBE_MAP_POSITIVE_Z',
'GL_TEXTURE_CUBE_MAP_NEGATIVE_Z',
],
'invalid': [
'GL_PROXY_TEXTURE_CUBE_MAP',
]
},
'Texture3DTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_2D',
]
},
'TextureBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_2D',
'GL_TEXTURE_CUBE_MAP',
],
'valid_es3': [
'GL_TEXTURE_3D',
'GL_TEXTURE_2D_ARRAY',
],
'invalid': [
'GL_TEXTURE_1D',
'GL_TEXTURE_3D',
],
},
'TransformFeedbackBindTarget': {
'type': 'GLenum',
'valid': [
'GL_TRANSFORM_FEEDBACK',
],
'invalid': [
'GL_TEXTURE_2D',
],
},
'TransformFeedbackPrimitiveMode': {
'type': 'GLenum',
'valid': [
'GL_POINTS',
'GL_LINES',
'GL_TRIANGLES',
],
'invalid': [
'GL_LINE_LOOP',
],
},
'ShaderType': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_SHADER',
'GL_FRAGMENT_SHADER',
],
'invalid': [
'GL_GEOMETRY_SHADER',
],
},
'FaceType': {
'type': 'GLenum',
'valid': [
'GL_FRONT',
'GL_BACK',
'GL_FRONT_AND_BACK',
],
},
'FaceMode': {
'type': 'GLenum',
'valid': [
'GL_CW',
'GL_CCW',
],
},
'CmpFunction': {
'type': 'GLenum',
'valid': [
'GL_NEVER',
'GL_LESS',
'GL_EQUAL',
'GL_LEQUAL',
'GL_GREATER',
'GL_NOTEQUAL',
'GL_GEQUAL',
'GL_ALWAYS',
],
},
'Equation': {
'type': 'GLenum',
'valid': [
'GL_FUNC_ADD',
'GL_FUNC_SUBTRACT',
'GL_FUNC_REVERSE_SUBTRACT',
],
'valid_es3': [
'GL_MIN',
'GL_MAX',
],
'invalid': [
'GL_NONE',
],
},
'SrcBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
'GL_SRC_ALPHA_SATURATE',
],
},
'DstBlendFactor': {
'type': 'GLenum',
'valid': [
'GL_ZERO',
'GL_ONE',
'GL_SRC_COLOR',
'GL_ONE_MINUS_SRC_COLOR',
'GL_DST_COLOR',
'GL_ONE_MINUS_DST_COLOR',
'GL_SRC_ALPHA',
'GL_ONE_MINUS_SRC_ALPHA',
'GL_DST_ALPHA',
'GL_ONE_MINUS_DST_ALPHA',
'GL_CONSTANT_COLOR',
'GL_ONE_MINUS_CONSTANT_COLOR',
'GL_CONSTANT_ALPHA',
'GL_ONE_MINUS_CONSTANT_ALPHA',
],
},
'Capability': {
'type': 'GLenum',
'valid': ["GL_%s" % cap['name'].upper() for cap in _CAPABILITY_FLAGS
if 'es3' not in cap or cap['es3'] != True],
'valid_es3': ["GL_%s" % cap['name'].upper() for cap in _CAPABILITY_FLAGS
if 'es3' in cap and cap['es3'] == True],
'invalid': [
'GL_CLIP_PLANE0',
'GL_POINT_SPRITE',
],
},
'DrawMode': {
'type': 'GLenum',
'valid': [
'GL_POINTS',
'GL_LINE_STRIP',
'GL_LINE_LOOP',
'GL_LINES',
'GL_TRIANGLE_STRIP',
'GL_TRIANGLE_FAN',
'GL_TRIANGLES',
],
'invalid': [
'GL_QUADS',
'GL_POLYGON',
],
},
'IndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
],
'valid_es3': [
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'GetMaxIndexType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_INT',
],
},
'Attachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_ATTACHMENT0',
'GL_DEPTH_ATTACHMENT',
'GL_STENCIL_ATTACHMENT',
],
'valid_es3': [
'GL_DEPTH_STENCIL_ATTACHMENT',
],
},
'BackbufferAttachment': {
'type': 'GLenum',
'valid': [
'GL_COLOR_EXT',
'GL_DEPTH_EXT',
'GL_STENCIL_EXT',
],
},
'BufferParameter': {
'type': 'GLenum',
'valid': [
'GL_BUFFER_SIZE',
'GL_BUFFER_USAGE',
],
'valid_es3': [
'GL_BUFFER_ACCESS_FLAGS',
'GL_BUFFER_MAPPED',
'GL_BUFFER_MAP_LENGTH',
'GL_BUFFER_MAP_OFFSET',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'BufferMode': {
'type': 'GLenum',
'valid': [
'GL_INTERLEAVED_ATTRIBS',
'GL_SEPARATE_ATTRIBS',
],
'invalid': [
'GL_PIXEL_PACK_BUFFER',
],
},
'FrameBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE',
],
'valid_es3': [
'GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE',
'GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE',
'GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING',
'GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER',
],
},
'MatrixMode': {
'type': 'GLenum',
'valid': [
'GL_PATH_PROJECTION_CHROMIUM',
'GL_PATH_MODELVIEW_CHROMIUM',
],
},
'ProgramParameter': {
'type': 'GLenum',
'valid': [
'GL_DELETE_STATUS',
'GL_LINK_STATUS',
'GL_VALIDATE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_ATTACHED_SHADERS',
'GL_ACTIVE_ATTRIBUTES',
'GL_ACTIVE_ATTRIBUTE_MAX_LENGTH',
'GL_ACTIVE_UNIFORMS',
'GL_ACTIVE_UNIFORM_MAX_LENGTH',
],
'valid_es3': [
'GL_ACTIVE_UNIFORM_BLOCKS',
'GL_ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH',
'GL_TRANSFORM_FEEDBACK_BUFFER_MODE',
'GL_TRANSFORM_FEEDBACK_VARYINGS',
'GL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH',
],
'invalid': [
'GL_PROGRAM_BINARY_RETRIEVABLE_HINT', # not supported in Chromium.
],
},
'QueryObjectParameter': {
'type': 'GLenum',
'valid': [
'GL_QUERY_RESULT_EXT',
'GL_QUERY_RESULT_AVAILABLE_EXT',
],
},
'QueryParameter': {
'type': 'GLenum',
'valid': [
'GL_CURRENT_QUERY_EXT',
],
},
'QueryTarget': {
'type': 'GLenum',
'valid': [
'GL_ANY_SAMPLES_PASSED_EXT',
'GL_ANY_SAMPLES_PASSED_CONSERVATIVE_EXT',
'GL_COMMANDS_ISSUED_CHROMIUM',
'GL_LATENCY_QUERY_CHROMIUM',
'GL_ASYNC_PIXEL_UNPACK_COMPLETED_CHROMIUM',
'GL_ASYNC_PIXEL_PACK_COMPLETED_CHROMIUM',
'GL_COMMANDS_COMPLETED_CHROMIUM',
],
},
'RenderBufferParameter': {
'type': 'GLenum',
'valid': [
'GL_RENDERBUFFER_RED_SIZE',
'GL_RENDERBUFFER_GREEN_SIZE',
'GL_RENDERBUFFER_BLUE_SIZE',
'GL_RENDERBUFFER_ALPHA_SIZE',
'GL_RENDERBUFFER_DEPTH_SIZE',
'GL_RENDERBUFFER_STENCIL_SIZE',
'GL_RENDERBUFFER_WIDTH',
'GL_RENDERBUFFER_HEIGHT',
'GL_RENDERBUFFER_INTERNAL_FORMAT',
],
'valid_es3': [
'GL_RENDERBUFFER_SAMPLES',
],
},
'InternalFormatParameter': {
'type': 'GLenum',
'valid': [
'GL_NUM_SAMPLE_COUNTS',
'GL_SAMPLES',
],
},
'SamplerParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
'GL_TEXTURE_WRAP_R',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_COMPARE_FUNC',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'ShaderParameter': {
'type': 'GLenum',
'valid': [
'GL_SHADER_TYPE',
'GL_DELETE_STATUS',
'GL_COMPILE_STATUS',
'GL_INFO_LOG_LENGTH',
'GL_SHADER_SOURCE_LENGTH',
'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
],
},
'ShaderPrecision': {
'type': 'GLenum',
'valid': [
'GL_LOW_FLOAT',
'GL_MEDIUM_FLOAT',
'GL_HIGH_FLOAT',
'GL_LOW_INT',
'GL_MEDIUM_INT',
'GL_HIGH_INT',
],
},
'StringType': {
'type': 'GLenum',
'valid': [
'GL_VENDOR',
'GL_RENDERER',
'GL_VERSION',
'GL_SHADING_LANGUAGE_VERSION',
'GL_EXTENSIONS',
],
},
'TextureParameter': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_MAG_FILTER',
'GL_TEXTURE_MIN_FILTER',
'GL_TEXTURE_POOL_CHROMIUM',
'GL_TEXTURE_WRAP_S',
'GL_TEXTURE_WRAP_T',
],
'valid_es3': [
'GL_TEXTURE_BASE_LEVEL',
'GL_TEXTURE_COMPARE_FUNC',
'GL_TEXTURE_COMPARE_MODE',
'GL_TEXTURE_IMMUTABLE_FORMAT',
'GL_TEXTURE_IMMUTABLE_LEVELS',
'GL_TEXTURE_MAX_LEVEL',
'GL_TEXTURE_MAX_LOD',
'GL_TEXTURE_MIN_LOD',
'GL_TEXTURE_WRAP_R',
],
'invalid': [
'GL_GENERATE_MIPMAP',
],
},
'TexturePool': {
'type': 'GLenum',
'valid': [
'GL_TEXTURE_POOL_MANAGED_CHROMIUM',
'GL_TEXTURE_POOL_UNMANAGED_CHROMIUM',
],
},
'TextureWrapMode': {
'type': 'GLenum',
'valid': [
'GL_CLAMP_TO_EDGE',
'GL_MIRRORED_REPEAT',
'GL_REPEAT',
],
},
'TextureMinFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
'GL_NEAREST_MIPMAP_NEAREST',
'GL_LINEAR_MIPMAP_NEAREST',
'GL_NEAREST_MIPMAP_LINEAR',
'GL_LINEAR_MIPMAP_LINEAR',
],
},
'TextureMagFilterMode': {
'type': 'GLenum',
'valid': [
'GL_NEAREST',
'GL_LINEAR',
],
},
'TextureCompareFunc': {
'type': 'GLenum',
'valid': [
'GL_LEQUAL',
'GL_GEQUAL',
'GL_LESS',
'GL_GREATER',
'GL_EQUAL',
'GL_NOTEQUAL',
'GL_ALWAYS',
'GL_NEVER',
],
},
'TextureCompareMode': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_COMPARE_REF_TO_TEXTURE',
],
},
'TextureUsage': {
'type': 'GLenum',
'valid': [
'GL_NONE',
'GL_FRAMEBUFFER_ATTACHMENT_ANGLE',
],
},
'VertexAttribute': {
'type': 'GLenum',
'valid': [
# some enum that the decoder actually passes through to GL needs
# to be the first listed here since it's used in unit tests.
'GL_VERTEX_ATTRIB_ARRAY_NORMALIZED',
'GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING',
'GL_VERTEX_ATTRIB_ARRAY_ENABLED',
'GL_VERTEX_ATTRIB_ARRAY_SIZE',
'GL_VERTEX_ATTRIB_ARRAY_STRIDE',
'GL_VERTEX_ATTRIB_ARRAY_TYPE',
'GL_CURRENT_VERTEX_ATTRIB',
],
'valid_es3': [
'GL_VERTEX_ATTRIB_ARRAY_INTEGER',
'GL_VERTEX_ATTRIB_ARRAY_DIVISOR',
],
},
'VertexPointer': {
'type': 'GLenum',
'valid': [
'GL_VERTEX_ATTRIB_ARRAY_POINTER',
],
},
'HintTarget': {
'type': 'GLenum',
'valid': [
'GL_GENERATE_MIPMAP_HINT',
],
'valid_es3': [
'GL_FRAGMENT_SHADER_DERIVATIVE_HINT',
],
'invalid': [
'GL_PERSPECTIVE_CORRECTION_HINT',
],
},
'HintMode': {
'type': 'GLenum',
'valid': [
'GL_FASTEST',
'GL_NICEST',
'GL_DONT_CARE',
],
},
'PixelStore': {
'type': 'GLenum',
'valid': [
'GL_PACK_ALIGNMENT',
'GL_UNPACK_ALIGNMENT',
],
'valid_es3': [
'GL_PACK_ROW_LENGTH',
'GL_PACK_SKIP_PIXELS',
'GL_PACK_SKIP_ROWS',
'GL_UNPACK_ROW_LENGTH',
'GL_UNPACK_IMAGE_HEIGHT',
'GL_UNPACK_SKIP_PIXELS',
'GL_UNPACK_SKIP_ROWS',
'GL_UNPACK_SKIP_IMAGES',
],
'invalid': [
'GL_PACK_SWAP_BYTES',
'GL_UNPACK_SWAP_BYTES',
],
},
'PixelStoreAlignment': {
'type': 'GLint',
'valid': [
'1',
'2',
'4',
'8',
],
'invalid': [
'3',
'9',
],
},
'ReadPixelFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RGBA_INTEGER',
],
'deprecated_es3': [
'GL_ALPHA',
'GL_RGB',
],
},
'PixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'valid_es3': [
'GL_BYTE',
'GL_UNSIGNED_SHORT',
'GL_SHORT',
'GL_UNSIGNED_INT',
'GL_INT',
'GL_HALF_FLOAT',
'GL_FLOAT',
'GL_UNSIGNED_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_10F_11F_11F_REV',
'GL_UNSIGNED_INT_5_9_9_9_REV',
'GL_UNSIGNED_INT_24_8',
'GL_FLOAT_32_UNSIGNED_INT_24_8_REV',
],
'invalid': [
'GL_UNSIGNED_BYTE_3_3_2',
],
},
'ReadPixelType': {
'type': 'GLenum',
'valid': [
'GL_UNSIGNED_BYTE',
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
'invalid': [
'GL_SHORT',
],
'valid_es3': [
'GL_UNSIGNED_INT',
'GL_INT',
'GL_FLOAT',
],
'deprecated_es3': [
'GL_UNSIGNED_SHORT_5_6_5',
'GL_UNSIGNED_SHORT_4_4_4_4',
'GL_UNSIGNED_SHORT_5_5_5_1',
],
},
'RenderBufferFormat': {
'type': 'GLenum',
'valid': [
'GL_RGBA4',
'GL_RGB565',
'GL_RGB5_A1',
'GL_DEPTH_COMPONENT16',
'GL_STENCIL_INDEX8',
],
'valid_es3': [
'GL_R8',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGB10_A2',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
},
'ShaderBinaryFormat': {
'type': 'GLenum',
'valid': [
],
},
'StencilOp': {
'type': 'GLenum',
'valid': [
'GL_KEEP',
'GL_ZERO',
'GL_REPLACE',
'GL_INCR',
'GL_INCR_WRAP',
'GL_DECR',
'GL_DECR_WRAP',
'GL_INVERT',
],
},
'TextureFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_RED',
'GL_RED_INTEGER',
'GL_RG',
'GL_RG_INTEGER',
'GL_RGB_INTEGER',
'GL_RGBA_INTEGER',
'GL_DEPTH_COMPONENT',
'GL_DEPTH_STENCIL',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_ALPHA',
'GL_LUMINANCE',
'GL_LUMINANCE_ALPHA',
'GL_RGB',
'GL_RGBA',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_RGB8',
'GL_SRGB8',
'GL_RGB565',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_RGBA8',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB5_A1',
'GL_RGBA4',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
# The DEPTH/STENCIL formats are not supported in CopyTexImage2D.
# We will reject them dynamically in GPU command buffer.
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
],
'invalid': [
'GL_BGRA',
'GL_BGR',
],
},
'TextureInternalFormatStorage': {
'type': 'GLenum',
'valid': [
'GL_RGB565',
'GL_RGBA4',
'GL_RGB5_A1',
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_RGB8_OES',
'GL_RGBA8_OES',
],
'valid_es3': [
'GL_R8',
'GL_R8_SNORM',
'GL_R16F',
'GL_R32F',
'GL_R8UI',
'GL_R8I',
'GL_R16UI',
'GL_R16I',
'GL_R32UI',
'GL_R32I',
'GL_RG8',
'GL_RG8_SNORM',
'GL_RG16F',
'GL_RG32F',
'GL_RG8UI',
'GL_RG8I',
'GL_RG16UI',
'GL_RG16I',
'GL_RG32UI',
'GL_RG32I',
'GL_SRGB8',
'GL_RGB8_SNORM',
'GL_R11F_G11F_B10F',
'GL_RGB9_E5',
'GL_RGB16F',
'GL_RGB32F',
'GL_RGB8UI',
'GL_RGB8I',
'GL_RGB16UI',
'GL_RGB16I',
'GL_RGB32UI',
'GL_RGB32I',
'GL_SRGB8_ALPHA8',
'GL_RGBA8_SNORM',
'GL_RGB10_A2',
'GL_RGBA16F',
'GL_RGBA32F',
'GL_RGBA8UI',
'GL_RGBA8I',
'GL_RGB10_A2UI',
'GL_RGBA16UI',
'GL_RGBA16I',
'GL_RGBA32UI',
'GL_RGBA32I',
'GL_DEPTH_COMPONENT16',
'GL_DEPTH_COMPONENT24',
'GL_DEPTH_COMPONENT32F',
'GL_DEPTH24_STENCIL8',
'GL_DEPTH32F_STENCIL8',
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
],
'deprecated_es3': [
'GL_ALPHA8_EXT',
'GL_LUMINANCE8_EXT',
'GL_LUMINANCE8_ALPHA8_EXT',
'GL_ALPHA16F_EXT',
'GL_LUMINANCE16F_EXT',
'GL_LUMINANCE_ALPHA16F_EXT',
'GL_ALPHA32F_EXT',
'GL_LUMINANCE32F_EXT',
'GL_LUMINANCE_ALPHA32F_EXT',
],
},
'ImageInternalFormat': {
'type': 'GLenum',
'valid': [
'GL_RGB',
'GL_RGB_YUV_420_CHROMIUM',
'GL_RGBA',
],
},
'ImageUsage': {
'type': 'GLenum',
'valid': [
'GL_MAP_CHROMIUM',
'GL_SCANOUT_CHROMIUM'
],
},
'ValueBufferTarget': {
'type': 'GLenum',
'valid': [
'GL_SUBSCRIBED_VALUES_BUFFER_CHROMIUM',
],
},
'SubscriptionTarget': {
'type': 'GLenum',
'valid': [
'GL_MOUSE_POSITION_CHROMIUM',
],
},
'UniformParameter': {
'type': 'GLenum',
'valid': [
'GL_UNIFORM_SIZE',
'GL_UNIFORM_TYPE',
'GL_UNIFORM_NAME_LENGTH',
'GL_UNIFORM_BLOCK_INDEX',
'GL_UNIFORM_OFFSET',
'GL_UNIFORM_ARRAY_STRIDE',
'GL_UNIFORM_MATRIX_STRIDE',
'GL_UNIFORM_IS_ROW_MAJOR',
],
'invalid': [
'GL_UNIFORM_BLOCK_NAME_LENGTH',
],
},
'UniformBlockParameter': {
'type': 'GLenum',
'valid': [
'GL_UNIFORM_BLOCK_BINDING',
'GL_UNIFORM_BLOCK_DATA_SIZE',
'GL_UNIFORM_BLOCK_NAME_LENGTH',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS',
'GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES',
'GL_UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER',
'GL_UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER',
],
'invalid': [
'GL_NEAREST',
],
},
'VertexAttribType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
# 'GL_FIXED', // This is not available on Desktop GL.
'GL_FLOAT',
],
'valid_es3': [
'GL_INT',
'GL_UNSIGNED_INT',
'GL_HALF_FLOAT',
'GL_INT_2_10_10_10_REV',
'GL_UNSIGNED_INT_2_10_10_10_REV',
],
'invalid': [
'GL_DOUBLE',
],
},
'VertexAttribIType': {
'type': 'GLenum',
'valid': [
'GL_BYTE',
'GL_UNSIGNED_BYTE',
'GL_SHORT',
'GL_UNSIGNED_SHORT',
'GL_INT',
'GL_UNSIGNED_INT',
],
'invalid': [
'GL_FLOAT',
'GL_DOUBLE',
],
},
'TextureBorder': {
'type': 'GLint',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'VertexAttribSize': {
'type': 'GLint',
'valid': [
'1',
'2',
'3',
'4',
],
'invalid': [
'0',
'5',
],
},
'ZeroOnly': {
'type': 'GLint',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'FalseOnly': {
'type': 'GLboolean',
'is_complete': True,
'valid': [
'false',
],
'invalid': [
'true',
],
},
'ResetStatus': {
'type': 'GLenum',
'valid': [
'GL_GUILTY_CONTEXT_RESET_ARB',
'GL_INNOCENT_CONTEXT_RESET_ARB',
'GL_UNKNOWN_CONTEXT_RESET_ARB',
],
},
'SyncCondition': {
'type': 'GLenum',
'is_complete': True,
'valid': [
'GL_SYNC_GPU_COMMANDS_COMPLETE',
],
'invalid': [
'0',
],
},
'SyncFlags': {
'type': 'GLbitfield',
'is_complete': True,
'valid': [
'0',
],
'invalid': [
'1',
],
},
'SyncFlushFlags': {
'type': 'GLbitfield',
'valid': [
'GL_SYNC_FLUSH_COMMANDS_BIT',
'0',
],
'invalid': [
'0xFFFFFFFF',
],
},
'SyncParameter': {
'type': 'GLenum',
'valid': [
'GL_SYNC_STATUS', # This needs to be the 1st; all others are cached.
'GL_OBJECT_TYPE',
'GL_SYNC_CONDITION',
'GL_SYNC_FLAGS',
],
'invalid': [
'GL_SYNC_FENCE',
],
},
}
# This table specifies the different pepper interfaces that are supported for
# GL commands. 'dev' is true if it's a dev interface.
_PEPPER_INTERFACES = [
{'name': '', 'dev': False},
{'name': 'InstancedArrays', 'dev': False},
{'name': 'FramebufferBlit', 'dev': False},
{'name': 'FramebufferMultisample', 'dev': False},
{'name': 'ChromiumEnableFeature', 'dev': False},
{'name': 'ChromiumMapSub', 'dev': False},
{'name': 'Query', 'dev': False},
{'name': 'VertexArrayObject', 'dev': False},
{'name': 'DrawBuffers', 'dev': True},
]
# A function info object specifies the type and other special data for the
# command that will be generated. A base function info object is generated by
# parsing the "cmd_buffer_functions.txt", one for each function in the
# file. These function info objects can be augmented and their values can be
# overridden by adding an object to the table below.
#
# Must match function names specified in "cmd_buffer_functions.txt".
#
# cmd_comment: A comment added to the cmd format.
# type: defines which handler will be used to generate code.
# decoder_func: defines which function to call in the decoder to execute the
# corresponding GL command. If not specified the GL command will
# be called directly.
# gl_test_func: GL function that is expected to be called when testing.
# cmd_args: The arguments to use for the command. This overrides generating
# them based on the GL function arguments.
# gen_cmd: Whether or not this function geneates a command. Default = True.
# data_transfer_methods: Array of methods that are used for transfering the
# pointer data. Possible values: 'immediate', 'shm', 'bucket'.
# The default is 'immediate' if the command has one pointer
# argument, otherwise 'shm'. One command is generated for each
# transfer method. Affects only commands which are not of type
# 'HandWritten', 'GETn' or 'GLcharN'.
# Note: the command arguments that affect this are the final args,
# taking cmd_args override into consideration.
# impl_func: Whether or not to generate the GLES2Implementation part of this
# command.
# impl_decl: Whether or not to generate the GLES2Implementation declaration
# for this command.
# needs_size: If True a data_size field is added to the command.
# count: The number of units per element. For PUTn or PUT types.
# use_count_func: If True the actual data count needs to be computed; the count
# argument specifies the maximum count.
# unit_test: If False no service side unit test will be generated.
# client_test: If False no client side unit test will be generated.
# expectation: If False the unit test will have no expected calls.
# gen_func: Name of function that generates GL resource for corresponding
# bind function.
# states: array of states that get set by this function corresponding to
# the given arguments
# state_flag: name of flag that is set to true when function is called.
# no_gl: no GL function is called.
# valid_args: A dictionary of argument indices to args to use in unit tests
# when they can not be automatically determined.
# pepper_interface: The pepper interface that is used for this extension
# pepper_name: The name of the function as exposed to pepper.
# pepper_args: A string representing the argument list (what would appear in
# C/C++ between the parentheses for the function declaration)
# that the Pepper API expects for this function. Use this only if
# the stable Pepper API differs from the GLES2 argument list.
# invalid_test: False if no invalid test needed.
# shadowed: True = the value is shadowed so no glGetXXX call will be made.
# first_element_only: For PUT types, True if only the first element of an
# array is used and we end up calling the single value
# corresponding function. eg. TexParameteriv -> TexParameteri
# extension: Function is an extension to GL and should not be exposed to
# pepper unless pepper_interface is defined.
# extension_flag: Function is an extension and should be enabled only when
# the corresponding feature info flag is enabled. Implies
# 'extension': True.
# not_shared: For GENn types, True if objects can't be shared between contexts
# unsafe: True = no validation is implemented on the service side and the
# command is only available with --enable-unsafe-es3-apis.
# id_mapping: A list of resource type names whose client side IDs need to be
# mapped to service side IDs. This is only used for unsafe APIs.
_FUNCTION_INFO = {
'ActiveTexture': {
'decoder_func': 'DoActiveTexture',
'unit_test': False,
'impl_func': False,
'client_test': False,
},
'AttachShader': {'decoder_func': 'DoAttachShader'},
'BindAttribLocation': {
'type': 'GLchar',
'data_transfer_methods': ['bucket'],
'needs_size': True,
},
'BindBuffer': {
'type': 'Bind',
'decoder_func': 'DoBindBuffer',
'gen_func': 'GenBuffersARB',
},
'BindBufferBase': {
'type': 'Bind',
'id_mapping': [ 'Buffer' ],
'gen_func': 'GenBuffersARB',
'unsafe': True,
},
'BindBufferRange': {
'type': 'Bind',
'id_mapping': [ 'Buffer' ],
'gen_func': 'GenBuffersARB',
'valid_args': {
'3': '4',
'4': '4'
},
'unsafe': True,
},
'BindFramebuffer': {
'type': 'Bind',
'decoder_func': 'DoBindFramebuffer',
'gl_test_func': 'glBindFramebufferEXT',
'gen_func': 'GenFramebuffersEXT',
'trace_level': 1,
},
'BindRenderbuffer': {
'type': 'Bind',
'decoder_func': 'DoBindRenderbuffer',
'gl_test_func': 'glBindRenderbufferEXT',
'gen_func': 'GenRenderbuffersEXT',
},
'BindSampler': {
'type': 'Bind',
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'BindTexture': {
'type': 'Bind',
'decoder_func': 'DoBindTexture',
'gen_func': 'GenTextures',
# TODO(gman): remove this once client side caching works.
'client_test': False,
'trace_level': 2,
},
'BindTransformFeedback': {
'type': 'Bind',
'id_mapping': [ 'TransformFeedback' ],
'unsafe': True,
},
'BlitFramebufferCHROMIUM': {
'decoder_func': 'DoBlitFramebufferCHROMIUM',
'unit_test': False,
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferBlit',
'pepper_name': 'BlitFramebufferEXT',
'defer_reads': True,
'defer_draws': True,
'trace_level': 1,
},
'BufferData': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
},
'BufferSubData': {
'type': 'Data',
'client_test': False,
'decoder_func': 'DoBufferSubData',
'data_transfer_methods': ['shm'],
'trace_level': 2,
},
'CheckFramebufferStatus': {
'type': 'Is',
'decoder_func': 'DoCheckFramebufferStatus',
'gl_test_func': 'glCheckFramebufferStatusEXT',
'error_value': 'GL_FRAMEBUFFER_UNSUPPORTED',
'result': ['GLenum'],
},
'Clear': {
'decoder_func': 'DoClear',
'defer_draws': True,
'trace_level': 2,
},
'ClearBufferiv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferuiv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferfv': {
'type': 'PUT',
'use_count_func': True,
'count': 4,
'unsafe': True,
'trace_level': 2,
},
'ClearBufferfi': {
'unsafe': True,
'trace_level': 2,
},
'ClearColor': {
'type': 'StateSet',
'state': 'ClearColor',
},
'ClearDepthf': {
'type': 'StateSet',
'state': 'ClearDepthf',
'decoder_func': 'glClearDepth',
'gl_test_func': 'glClearDepth',
'valid_args': {
'0': '0.5f'
},
},
'ClientWaitSync': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint timeout_0, GLuint timeout_1, GLenum* result',
'unsafe': True,
'result': ['GLenum'],
'trace_level': 2,
},
'ColorMask': {
'type': 'StateSet',
'state': 'ColorMask',
'no_gl': True,
'expectation': False,
},
'ConsumeTextureCHROMIUM': {
'decoder_func': 'DoConsumeTextureCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 2,
},
'CopyBufferSubData': {
'unsafe': True,
},
'CreateAndConsumeTextureCHROMIUM': {
'decoder_func': 'DoCreateAndConsumeTextureCHROMIUM',
'impl_func': False,
'type': 'HandWritten',
'data_transfer_methods': ['immediate'],
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 2,
},
'GenValuebuffersCHROMIUM': {
'type': 'GENn',
'gl_test_func': 'glGenValuebuffersCHROMIUM',
'resource_type': 'Valuebuffer',
'resource_types': 'Valuebuffers',
'unit_test': False,
'extension': True,
'chromium': True,
},
'DeleteValuebuffersCHROMIUM': {
'type': 'DELn',
'gl_test_func': 'glDeleteValuebuffersCHROMIUM',
'resource_type': 'Valuebuffer',
'resource_types': 'Valuebuffers',
'unit_test': False,
'extension': True,
'chromium': True,
},
'IsValuebufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoIsValuebufferCHROMIUM',
'expectation': False,
'extension': True,
'chromium': True,
},
'BindValuebufferCHROMIUM': {
'type': 'Bind',
'decoder_func': 'DoBindValueBufferCHROMIUM',
'gen_func': 'GenValueBuffersCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'SubscribeValueCHROMIUM': {
'decoder_func': 'DoSubscribeValueCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'PopulateSubscribedValuesCHROMIUM': {
'decoder_func': 'DoPopulateSubscribedValuesCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'UniformValuebufferCHROMIUM': {
'decoder_func': 'DoUniformValueBufferCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'ClearStencil': {
'type': 'StateSet',
'state': 'ClearStencil',
},
'EnableFeatureCHROMIUM': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'decoder_func': 'DoEnableFeatureCHROMIUM',
'expectation': False,
'cmd_args': 'GLuint bucket_id, GLint* result',
'result': ['GLint'],
'extension': True,
'chromium': True,
'pepper_interface': 'ChromiumEnableFeature',
},
'CompileShader': {'decoder_func': 'DoCompileShader', 'unit_test': False},
'CompressedTexImage2D': {
'type': 'Manual',
'data_transfer_methods': ['bucket', 'shm'],
'trace_level': 1,
},
'CompressedTexSubImage2D': {
'type': 'Data',
'data_transfer_methods': ['bucket', 'shm'],
'decoder_func': 'DoCompressedTexSubImage2D',
'trace_level': 1,
},
'CopyTexImage2D': {
'decoder_func': 'DoCopyTexImage2D',
'unit_test': False,
'defer_reads': True,
'trace_level': 1,
},
'CopyTexSubImage2D': {
'decoder_func': 'DoCopyTexSubImage2D',
'defer_reads': True,
'trace_level': 1,
},
'CompressedTexImage3D': {
'type': 'Manual',
'data_transfer_methods': ['bucket', 'shm'],
'unsafe': True,
'trace_level': 1,
},
'CompressedTexSubImage3D': {
'type': 'Data',
'data_transfer_methods': ['bucket', 'shm'],
'decoder_func': 'DoCompressedTexSubImage3D',
'unsafe': True,
'trace_level': 1,
},
'CopyTexSubImage3D': {
'defer_reads': True,
'unsafe': True,
'trace_level': 1,
},
'CreateImageCHROMIUM': {
'type': 'Manual',
'cmd_args':
'ClientBuffer buffer, GLsizei width, GLsizei height, '
'GLenum internalformat',
'result': ['GLuint'],
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'DestroyImageCHROMIUM': {
'type': 'Manual',
'client_test': False,
'gen_cmd': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'CreateGpuMemoryBufferImageCHROMIUM': {
'type': 'Manual',
'cmd_args':
'GLsizei width, GLsizei height, GLenum internalformat, GLenum usage',
'result': ['GLuint'],
'client_test': False,
'gen_cmd': False,
'expectation': False,
'extension': "CHROMIUM_image",
'chromium': True,
'trace_level': 1,
},
'CreateProgram': {
'type': 'Create',
'client_test': False,
},
'CreateShader': {
'type': 'Create',
'client_test': False,
},
'BlendColor': {
'type': 'StateSet',
'state': 'BlendColor',
},
'BlendEquation': {
'type': 'StateSetRGBAlpha',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendEquationSeparate': {
'type': 'StateSet',
'state': 'BlendEquation',
'valid_args': {
'0': 'GL_FUNC_SUBTRACT'
},
},
'BlendFunc': {
'type': 'StateSetRGBAlpha',
'state': 'BlendFunc',
},
'BlendFuncSeparate': {
'type': 'StateSet',
'state': 'BlendFunc',
},
'BlendBarrierKHR': {
'gl_test_func': 'glBlendBarrierKHR',
'extension': True,
'extension_flag': 'blend_equation_advanced',
'client_test': False,
},
'SampleCoverage': {'decoder_func': 'DoSampleCoverage'},
'StencilFunc': {
'type': 'StateSetFrontBack',
'state': 'StencilFunc',
},
'StencilFuncSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilFunc',
},
'StencilOp': {
'type': 'StateSetFrontBack',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'StencilOpSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilOp',
'valid_args': {
'1': 'GL_INCR'
},
},
'Hint': {
'type': 'StateSetNamedParameter',
'state': 'Hint',
},
'CullFace': {'type': 'StateSet', 'state': 'CullFace'},
'FrontFace': {'type': 'StateSet', 'state': 'FrontFace'},
'DepthFunc': {'type': 'StateSet', 'state': 'DepthFunc'},
'LineWidth': {
'type': 'StateSet',
'state': 'LineWidth',
'valid_args': {
'0': '0.5f'
},
},
'PolygonOffset': {
'type': 'StateSet',
'state': 'PolygonOffset',
},
'DeleteBuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'DeleteFramebuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
'trace_level': 2,
},
'DeleteProgram': { 'type': 'Delete' },
'DeleteRenderbuffers': {
'type': 'DELn',
'gl_test_func': 'glDeleteRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
'trace_level': 2,
},
'DeleteSamplers': {
'type': 'DELn',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'unsafe': True,
},
'DeleteShader': { 'type': 'Delete' },
'DeleteSync': {
'type': 'Delete',
'cmd_args': 'GLuint sync',
'resource_type': 'Sync',
'unsafe': True,
},
'DeleteTextures': {
'type': 'DELn',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'DeleteTransformFeedbacks': {
'type': 'DELn',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'unsafe': True,
},
'DepthRangef': {
'decoder_func': 'DoDepthRangef',
'gl_test_func': 'glDepthRange',
},
'DepthMask': {
'type': 'StateSet',
'state': 'DepthMask',
'no_gl': True,
'expectation': False,
},
'DetachShader': {'decoder_func': 'DoDetachShader'},
'Disable': {
'decoder_func': 'DoDisable',
'impl_func': False,
'client_test': False,
},
'DisableVertexAttribArray': {
'decoder_func': 'DoDisableVertexAttribArray',
'impl_decl': False,
},
'DrawArrays': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count',
'defer_draws': True,
'trace_level': 2,
},
'DrawElements': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset',
'client_test': False,
'defer_draws': True,
'trace_level': 2,
},
'DrawRangeElements': {
'type': 'Manual',
'gen_cmd': 'False',
'unsafe': True,
},
'Enable': {
'decoder_func': 'DoEnable',
'impl_func': False,
'client_test': False,
},
'EnableVertexAttribArray': {
'decoder_func': 'DoEnableVertexAttribArray',
'impl_decl': False,
},
'FenceSync': {
'type': 'Create',
'client_test': False,
'unsafe': True,
'trace_level': 1,
},
'Finish': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoFinish',
'defer_reads': True,
'trace_level': 1,
},
'Flush': {
'impl_func': False,
'decoder_func': 'DoFlush',
'trace_level': 1,
},
'FramebufferRenderbuffer': {
'decoder_func': 'DoFramebufferRenderbuffer',
'gl_test_func': 'glFramebufferRenderbufferEXT',
'trace_level': 1,
},
'FramebufferTexture2D': {
'decoder_func': 'DoFramebufferTexture2D',
'gl_test_func': 'glFramebufferTexture2DEXT',
'trace_level': 1,
},
'FramebufferTexture2DMultisampleEXT': {
'decoder_func': 'DoFramebufferTexture2DMultisample',
'gl_test_func': 'glFramebufferTexture2DMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'FramebufferTextureLayer': {
'decoder_func': 'DoFramebufferTextureLayer',
'unsafe': True,
'trace_level': 1,
},
'GenerateMipmap': {
'decoder_func': 'DoGenerateMipmap',
'gl_test_func': 'glGenerateMipmapEXT',
'trace_level': 1,
},
'GenBuffers': {
'type': 'GENn',
'gl_test_func': 'glGenBuffersARB',
'resource_type': 'Buffer',
'resource_types': 'Buffers',
},
'GenMailboxCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
},
'GenFramebuffers': {
'type': 'GENn',
'gl_test_func': 'glGenFramebuffersEXT',
'resource_type': 'Framebuffer',
'resource_types': 'Framebuffers',
},
'GenRenderbuffers': {
'type': 'GENn', 'gl_test_func': 'glGenRenderbuffersEXT',
'resource_type': 'Renderbuffer',
'resource_types': 'Renderbuffers',
},
'GenSamplers': {
'type': 'GENn',
'gl_test_func': 'glGenSamplers',
'resource_type': 'Sampler',
'resource_types': 'Samplers',
'unsafe': True,
},
'GenTextures': {
'type': 'GENn',
'gl_test_func': 'glGenTextures',
'resource_type': 'Texture',
'resource_types': 'Textures',
},
'GenTransformFeedbacks': {
'type': 'GENn',
'gl_test_func': 'glGenTransformFeedbacks',
'resource_type': 'TransformFeedback',
'resource_types': 'TransformFeedbacks',
'unsafe': True,
},
'GetActiveAttrib': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniform': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
},
'GetActiveUniformBlockiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'unsafe': True,
},
'GetActiveUniformBlockName': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': ['int32_t'],
'unsafe': True,
},
'GetActiveUniformsiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t indices_bucket_id, GLenum pname, '
'GLint* params',
'result': ['SizedResult<GLint>'],
'unsafe': True,
},
'GetAttachedShaders': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLidProgram program, void* result, uint32_t result_size',
'result': ['SizedResult<GLuint>'],
},
'GetAttribLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
},
'GetFragDataLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1,
'unsafe': True,
},
'GetBooleanv': {
'type': 'GETn',
'result': ['SizedResult<GLboolean>'],
'decoder_func': 'DoGetBooleanv',
'gl_test_func': 'glGetBooleanv',
},
'GetBufferParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetBufferParameteriv',
'expectation': False,
'shadowed': True,
},
'GetError': {
'type': 'Is',
'decoder_func': 'GetErrorState()->GetGLError',
'impl_func': False,
'result': ['GLenum'],
'client_test': False,
},
'GetFloatv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'decoder_func': 'DoGetFloatv',
'gl_test_func': 'glGetFloatv',
},
'GetFramebufferAttachmentParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetFramebufferAttachmentParameteriv',
'gl_test_func': 'glGetFramebufferAttachmentParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetGraphicsResetStatusKHR': {
'extension': True,
'client_test': False,
'gen_cmd': False,
'trace_level': 1,
},
'GetInteger64v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'client_test': False,
'decoder_func': 'DoGetInteger64v',
'unsafe': True
},
'GetIntegerv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'decoder_func': 'DoGetIntegerv',
'client_test': False,
},
'GetInteger64i_v': {
'type': 'GETn',
'result': ['SizedResult<GLint64>'],
'client_test': False,
'unsafe': True
},
'GetIntegeri_v': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'client_test': False,
'unsafe': True
},
'GetInternalformativ': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
'cmd_args':
'GLenumRenderBufferTarget target, GLenumRenderBufferFormat format, '
'GLenumInternalFormatParameter pname, GLint* params',
'unsafe': True,
},
'GetMaxValueInBufferCHROMIUM': {
'type': 'Is',
'decoder_func': 'DoGetMaxValueInBufferCHROMIUM',
'result': ['GLuint'],
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
'impl_func': False,
},
'GetProgramiv': {
'type': 'GETn',
'decoder_func': 'DoGetProgramiv',
'result': ['SizedResult<GLint>'],
'expectation': False,
},
'GetProgramInfoCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': [
'uint32_t link_status',
'uint32_t num_attribs',
'uint32_t num_uniforms',
],
},
'GetProgramInfoLog': {
'type': 'STRn',
'expectation': False,
},
'GetRenderbufferParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetRenderbufferParameteriv',
'gl_test_func': 'glGetRenderbufferParameterivEXT',
'result': ['SizedResult<GLint>'],
},
'GetSamplerParameterfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'GetSamplerParameteriv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'GetShaderiv': {
'type': 'GETn',
'decoder_func': 'DoGetShaderiv',
'result': ['SizedResult<GLint>'],
},
'GetShaderInfoLog': {
'type': 'STRn',
'get_len_func': 'glGetShaderiv',
'get_len_enum': 'GL_INFO_LOG_LENGTH',
'unit_test': False,
},
'GetShaderPrecisionFormat': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLenumShaderType shadertype, GLenumShaderPrecision precisiontype, '
'void* result',
'result': [
'int32_t success',
'int32_t min_range',
'int32_t max_range',
'int32_t precision',
],
},
'GetShaderSource': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_SHADER_SOURCE_LENGTH',
'unit_test': False,
'client_test': False,
},
'GetString': {
'type': 'Custom',
'client_test': False,
'cmd_args': 'GLenumStringType name, uint32_t bucket_id',
},
'GetSynciv': {
'type': 'GETn',
'cmd_args': 'GLuint sync, GLenumSyncParameter pname, void* values',
'result': ['SizedResult<GLint>'],
'id_mapping': ['Sync'],
'unsafe': True,
},
'GetTexParameterfv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameterfv',
'result': ['SizedResult<GLfloat>']
},
'GetTexParameteriv': {
'type': 'GETn',
'decoder_func': 'DoGetTexParameteriv',
'result': ['SizedResult<GLint>']
},
'GetTranslatedShaderSourceANGLE': {
'type': 'STRn',
'get_len_func': 'DoGetShaderiv',
'get_len_enum': 'GL_TRANSLATED_SHADER_SOURCE_LENGTH_ANGLE',
'unit_test': False,
'extension': True,
},
'GetUniformBlockIndex': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLuint* index',
'result': ['GLuint'],
'error_return': 'GL_INVALID_INDEX',
'unsafe': True,
},
'GetUniformBlocksCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetUniformsES3CHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetTransformFeedbackVarying': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, GLuint index, uint32_t name_bucket_id, '
'void* result',
'result': [
'int32_t success',
'int32_t size',
'uint32_t type',
],
'unsafe': True,
},
'GetTransformFeedbackVaryingsCHROMIUM': {
'type': 'Custom',
'expectation': False,
'impl_func': False,
'extension': True,
'chromium': True,
'client_test': False,
'cmd_args': 'GLidProgram program, uint32_t bucket_id',
'result': ['uint32_t'],
'unsafe': True,
},
'GetUniformfv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLfloat>'],
},
'GetUniformiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLint>'],
},
'GetUniformuiv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'unsafe': True,
},
'GetUniformIndices': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'cmd_args': 'GLidProgram program, uint32_t names_bucket_id, '
'GLuint* indices',
'unsafe': True,
},
'GetUniformLocation': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args':
'GLidProgram program, uint32_t name_bucket_id, GLint* location',
'result': ['GLint'],
'error_return': -1, # http://www.opengl.org/sdk/docs/man/xhtml/glGetUniformLocation.xml
},
'GetVertexAttribfv': {
'type': 'GETn',
'result': ['SizedResult<GLfloat>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribfv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribiv',
'expectation': False,
'client_test': False,
},
'GetVertexAttribIiv': {
'type': 'GETn',
'result': ['SizedResult<GLint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribIiv',
'expectation': False,
'client_test': False,
'unsafe': True,
},
'GetVertexAttribIuiv': {
'type': 'GETn',
'result': ['SizedResult<GLuint>'],
'impl_decl': False,
'decoder_func': 'DoGetVertexAttribIuiv',
'expectation': False,
'client_test': False,
'unsafe': True,
},
'GetVertexAttribPointerv': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'result': ['SizedResult<GLuint>'],
'client_test': False,
},
'InvalidateFramebuffer': {
'type': 'PUTn',
'count': 1,
'client_test': False,
'unit_test': False,
'unsafe': True,
},
'InvalidateSubFramebuffer': {
'type': 'PUTn',
'count': 1,
'client_test': False,
'unit_test': False,
'unsafe': True,
},
'IsBuffer': {
'type': 'Is',
'decoder_func': 'DoIsBuffer',
'expectation': False,
},
'IsEnabled': {
'type': 'Is',
'decoder_func': 'DoIsEnabled',
'client_test': False,
'impl_func': False,
'expectation': False,
},
'IsFramebuffer': {
'type': 'Is',
'decoder_func': 'DoIsFramebuffer',
'expectation': False,
},
'IsProgram': {
'type': 'Is',
'decoder_func': 'DoIsProgram',
'expectation': False,
},
'IsRenderbuffer': {
'type': 'Is',
'decoder_func': 'DoIsRenderbuffer',
'expectation': False,
},
'IsShader': {
'type': 'Is',
'decoder_func': 'DoIsShader',
'expectation': False,
},
'IsSampler': {
'type': 'Is',
'id_mapping': [ 'Sampler' ],
'expectation': False,
'unsafe': True,
},
'IsSync': {
'type': 'Is',
'id_mapping': [ 'Sync' ],
'cmd_args': 'GLuint sync',
'expectation': False,
'unsafe': True,
},
'IsTexture': {
'type': 'Is',
'decoder_func': 'DoIsTexture',
'expectation': False,
},
'IsTransformFeedback': {
'type': 'Is',
'id_mapping': [ 'TransformFeedback' ],
'expectation': False,
'unsafe': True,
},
'LinkProgram': {
'decoder_func': 'DoLinkProgram',
'impl_func': False,
'trace_level': 1,
},
'MapBufferCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'chromium': True,
'client_test': False,
'trace_level': 1,
},
'MapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_sub_image",
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'MapBufferRange': {
'type': 'Custom',
'data_transfer_methods': ['shm'],
'cmd_args': 'GLenumBufferTarget target, GLintptrNotNegative offset, '
'GLsizeiptr size, GLbitfieldMapBufferAccess access, '
'uint32_t data_shm_id, uint32_t data_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset',
'unsafe': True,
'result': ['uint32_t'],
'trace_level': 1,
},
'PauseTransformFeedback': {
'unsafe': True,
},
'PixelStorei': {'type': 'Manual'},
'PostSubBufferCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'ProduceTextureCHROMIUM': {
'decoder_func': 'DoProduceTextureCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 1,
},
'ProduceTextureDirectCHROMIUM': {
'decoder_func': 'DoProduceTextureDirectCHROMIUM',
'impl_func': False,
'type': 'PUT',
'count': 64, # GL_MAILBOX_SIZE_CHROMIUM
'unit_test': False,
'client_test': False,
'extension': "CHROMIUM_texture_mailbox",
'chromium': True,
'trace_level': 1,
},
'RenderbufferStorage': {
'decoder_func': 'DoRenderbufferStorage',
'gl_test_func': 'glRenderbufferStorageEXT',
'expectation': False,
'trace_level': 1,
},
'RenderbufferStorageMultisampleCHROMIUM': {
'cmd_comment':
'// GL_CHROMIUM_framebuffer_multisample\n',
'decoder_func': 'DoRenderbufferStorageMultisampleCHROMIUM',
'gl_test_func': 'glRenderbufferStorageMultisampleCHROMIUM',
'expectation': False,
'unit_test': False,
'extension_flag': 'chromium_framebuffer_multisample',
'pepper_interface': 'FramebufferMultisample',
'pepper_name': 'RenderbufferStorageMultisampleEXT',
'trace_level': 1,
},
'RenderbufferStorageMultisampleEXT': {
'cmd_comment':
'// GL_EXT_multisampled_render_to_texture\n',
'decoder_func': 'DoRenderbufferStorageMultisampleEXT',
'gl_test_func': 'glRenderbufferStorageMultisampleEXT',
'expectation': False,
'unit_test': False,
'extension_flag': 'multisampled_render_to_texture',
'trace_level': 1,
},
'ReadBuffer': {
'unsafe': True,
'trace_level': 1,
},
'ReadPixels': {
'cmd_comment':
'// ReadPixels has the result separated from the pixel buffer so that\n'
'// it is easier to specify the result going to some specific place\n'
'// that exactly fits the rectangle of pixels.\n',
'type': 'Custom',
'data_transfer_methods': ['shm'],
'impl_func': False,
'client_test': False,
'cmd_args':
'GLint x, GLint y, GLsizei width, GLsizei height, '
'GLenumReadPixelFormat format, GLenumReadPixelType type, '
'uint32_t pixels_shm_id, uint32_t pixels_shm_offset, '
'uint32_t result_shm_id, uint32_t result_shm_offset, '
'GLboolean async',
'result': ['uint32_t'],
'defer_reads': True,
'trace_level': 1,
},
'ReleaseShaderCompiler': {
'decoder_func': 'DoReleaseShaderCompiler',
'unit_test': False,
},
'ResumeTransformFeedback': {
'unsafe': True,
},
'SamplerParameterf': {
'valid_args': {
'2': 'GL_NEAREST'
},
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameterf',
'decoder_func': 'DoSamplerParameterfv',
'first_element_only': True,
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameteri': {
'valid_args': {
'2': 'GL_NEAREST'
},
'id_mapping': [ 'Sampler' ],
'unsafe': True,
},
'SamplerParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'gl_test_func': 'glSamplerParameteri',
'decoder_func': 'DoSamplerParameteriv',
'first_element_only': True,
'unsafe': True,
},
'ShaderBinary': {
'type': 'Custom',
'client_test': False,
},
'ShaderSource': {
'type': 'PUTSTR',
'decoder_func': 'DoShaderSource',
'expectation': False,
'data_transfer_methods': ['bucket'],
'cmd_args':
'GLuint shader, const char** str',
'pepper_args':
'GLuint shader, GLsizei count, const char** str, const GLint* length',
},
'StencilMask': {
'type': 'StateSetFrontBack',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'StencilMaskSeparate': {
'type': 'StateSetFrontBackSeparate',
'state': 'StencilMask',
'no_gl': True,
'expectation': False,
},
'SwapBuffers': {
'impl_func': False,
'decoder_func': 'DoSwapBuffers',
'unit_test': False,
'client_test': False,
'extension': True,
'trace_level': 1,
},
'SwapInterval': {
'impl_func': False,
'decoder_func': 'DoSwapInterval',
'unit_test': False,
'client_test': False,
'extension': True,
'trace_level': 1,
},
'TexImage2D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
},
'TexImage3D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'unsafe': True,
'trace_level': 2,
},
'TexParameterf': {
'decoder_func': 'DoTexParameterf',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameteri': {
'decoder_func': 'DoTexParameteri',
'valid_args': {
'2': 'GL_NEAREST'
},
},
'TexParameterfv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameterfv',
'gl_test_func': 'glTexParameterf',
'first_element_only': True,
},
'TexParameteriv': {
'type': 'PUT',
'data_value': 'GL_NEAREST',
'count': 1,
'decoder_func': 'DoTexParameteriv',
'gl_test_func': 'glTexParameteri',
'first_element_only': True,
},
'TexStorage3D': {
'unsafe': True,
'trace_level': 2,
},
'TexSubImage2D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal'
},
'TexSubImage3D': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'trace_level': 2,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, GLint zoffset, '
'GLsizei width, GLsizei height, GLsizei depth, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, GLboolean internal',
'unsafe': True,
},
'TransformFeedbackVaryings': {
'type': 'PUTSTR',
'data_transfer_methods': ['bucket'],
'decoder_func': 'DoTransformFeedbackVaryings',
'cmd_args':
'GLuint program, const char** varyings, GLenum buffermode',
'unsafe': True,
},
'Uniform1f': {'type': 'PUTXn', 'count': 1},
'Uniform1fv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1fv',
},
'Uniform1i': {'decoder_func': 'DoUniform1i', 'unit_test': False},
'Uniform1iv': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoUniform1iv',
'unit_test': False,
},
'Uniform1ui': {
'type': 'PUTXn',
'count': 1,
'unsafe': True,
},
'Uniform1uiv': {
'type': 'PUTn',
'count': 1,
'unsafe': True,
},
'Uniform2i': {'type': 'PUTXn', 'count': 2},
'Uniform2f': {'type': 'PUTXn', 'count': 2},
'Uniform2fv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2fv',
},
'Uniform2iv': {
'type': 'PUTn',
'count': 2,
'decoder_func': 'DoUniform2iv',
},
'Uniform2ui': {
'type': 'PUTXn',
'count': 2,
'unsafe': True,
},
'Uniform2uiv': {
'type': 'PUTn',
'count': 2,
'unsafe': True,
},
'Uniform3i': {'type': 'PUTXn', 'count': 3},
'Uniform3f': {'type': 'PUTXn', 'count': 3},
'Uniform3fv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3fv',
},
'Uniform3iv': {
'type': 'PUTn',
'count': 3,
'decoder_func': 'DoUniform3iv',
},
'Uniform3ui': {
'type': 'PUTXn',
'count': 3,
'unsafe': True,
},
'Uniform3uiv': {
'type': 'PUTn',
'count': 3,
'unsafe': True,
},
'Uniform4i': {'type': 'PUTXn', 'count': 4},
'Uniform4f': {'type': 'PUTXn', 'count': 4},
'Uniform4fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4fv',
},
'Uniform4iv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniform4iv',
},
'Uniform4ui': {
'type': 'PUTXn',
'count': 4,
'unsafe': True,
},
'Uniform4uiv': {
'type': 'PUTn',
'count': 4,
'unsafe': True,
},
'UniformMatrix2fv': {
'type': 'PUTn',
'count': 4,
'decoder_func': 'DoUniformMatrix2fv',
},
'UniformMatrix2x3fv': {
'type': 'PUTn',
'count': 6,
'unsafe': True,
},
'UniformMatrix2x4fv': {
'type': 'PUTn',
'count': 8,
'unsafe': True,
},
'UniformMatrix3fv': {
'type': 'PUTn',
'count': 9,
'decoder_func': 'DoUniformMatrix3fv',
},
'UniformMatrix3x2fv': {
'type': 'PUTn',
'count': 6,
'unsafe': True,
},
'UniformMatrix3x4fv': {
'type': 'PUTn',
'count': 12,
'unsafe': True,
},
'UniformMatrix4fv': {
'type': 'PUTn',
'count': 16,
'decoder_func': 'DoUniformMatrix4fv',
},
'UniformMatrix4x2fv': {
'type': 'PUTn',
'count': 8,
'unsafe': True,
},
'UniformMatrix4x3fv': {
'type': 'PUTn',
'count': 12,
'unsafe': True,
},
'UniformBlockBinding': {
'type': 'Custom',
'impl_func': False,
'unsafe': True,
},
'UnmapBufferCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_pixel_transfer_buffer_object",
'chromium': True,
'client_test': False,
'trace_level': 1,
},
'UnmapBufferSubDataCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UnmapBuffer': {
'type': 'Custom',
'unsafe': True,
'trace_level': 1,
},
'UnmapTexSubImage2DCHROMIUM': {
'gen_cmd': False,
'extension': "CHROMIUM_sub_image",
'chromium': True,
'client_test': False,
'pepper_interface': 'ChromiumMapSub',
'trace_level': 1,
},
'UseProgram': {
'type': 'Bind',
'decoder_func': 'DoUseProgram',
},
'ValidateProgram': {'decoder_func': 'DoValidateProgram'},
'VertexAttrib1f': {'decoder_func': 'DoVertexAttrib1f'},
'VertexAttrib1fv': {
'type': 'PUT',
'count': 1,
'decoder_func': 'DoVertexAttrib1fv',
},
'VertexAttrib2f': {'decoder_func': 'DoVertexAttrib2f'},
'VertexAttrib2fv': {
'type': 'PUT',
'count': 2,
'decoder_func': 'DoVertexAttrib2fv',
},
'VertexAttrib3f': {'decoder_func': 'DoVertexAttrib3f'},
'VertexAttrib3fv': {
'type': 'PUT',
'count': 3,
'decoder_func': 'DoVertexAttrib3fv',
},
'VertexAttrib4f': {'decoder_func': 'DoVertexAttrib4f'},
'VertexAttrib4fv': {
'type': 'PUT',
'count': 4,
'decoder_func': 'DoVertexAttrib4fv',
},
'VertexAttribI4i': {
'unsafe': True,
'decoder_func': 'DoVertexAttribI4i',
},
'VertexAttribI4iv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'decoder_func': 'DoVertexAttribI4iv',
},
'VertexAttribI4ui': {
'unsafe': True,
'decoder_func': 'DoVertexAttribI4ui',
},
'VertexAttribI4uiv': {
'type': 'PUT',
'count': 4,
'unsafe': True,
'decoder_func': 'DoVertexAttribI4uiv',
},
'VertexAttribIPointer': {
'type': 'Manual',
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribIType type, GLsizei stride, '
'GLuint offset',
'client_test': False,
'unsafe': True,
},
'VertexAttribPointer': {
'type': 'Manual',
'cmd_args': 'GLuint indx, GLintVertexAttribSize size, '
'GLenumVertexAttribType type, GLboolean normalized, '
'GLsizei stride, GLuint offset',
'client_test': False,
},
'WaitSync': {
'type': 'Custom',
'cmd_args': 'GLuint sync, GLbitfieldSyncFlushFlags flags, '
'GLuint timeout_0, GLuint timeout_1',
'impl_func': False,
'client_test': False,
'unsafe': True,
'trace_level': 1,
},
'Scissor': {
'type': 'StateSet',
'state': 'Scissor',
},
'Viewport': {
'decoder_func': 'DoViewport',
},
'ResizeCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'GetRequestableExtensionsCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'cmd_args': 'uint32_t bucket_id',
'extension': True,
'chromium': True,
},
'RequestExtensionCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'uint32_t bucket_id',
'extension': True,
'chromium': True,
},
'RateLimitOffscreenContextCHROMIUM': {
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'CreateStreamTextureCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'TexImageIOSurface2DCHROMIUM': {
'decoder_func': 'DoTexImageIOSurface2DCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'CopyTextureCHROMIUM': {
'decoder_func': 'DoCopyTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'chromium': True,
'trace_level': 2,
},
'CopySubTextureCHROMIUM': {
'decoder_func': 'DoCopySubTextureCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_copy_texture",
'chromium': True,
'trace_level': 2,
},
'CompressedCopyTextureCHROMIUM': {
'decoder_func': 'DoCompressedCopyTextureCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'TexStorage2DEXT': {
'unit_test': False,
'extension': True,
'decoder_func': 'DoTexStorage2DEXT',
'trace_level': 2,
},
'DrawArraysInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLint first, GLsizei count, '
'GLsizei primcount',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'DrawBuffersEXT': {
'type': 'PUTn',
'decoder_func': 'DoDrawBuffersEXT',
'count': 1,
'client_test': False,
'unit_test': False,
# could use 'extension_flag': 'ext_draw_buffers' but currently expected to
# work without.
'extension': True,
'pepper_interface': 'DrawBuffers',
'trace_level': 2,
},
'DrawElementsInstancedANGLE': {
'type': 'Manual',
'cmd_args': 'GLenumDrawMode mode, GLsizei count, '
'GLenumIndexType type, GLuint index_offset, GLsizei primcount',
'extension': True,
'unit_test': False,
'client_test': False,
'pepper_interface': 'InstancedArrays',
'defer_draws': True,
'trace_level': 2,
},
'VertexAttribDivisorANGLE': {
'type': 'Manual',
'cmd_args': 'GLuint index, GLuint divisor',
'extension': True,
'unit_test': False,
'pepper_interface': 'InstancedArrays',
},
'GenQueriesEXT': {
'type': 'GENn',
'gl_test_func': 'glGenQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'not_shared': 'True',
'extension': "occlusion_query_EXT",
},
'DeleteQueriesEXT': {
'type': 'DELn',
'gl_test_func': 'glDeleteQueriesARB',
'resource_type': 'Query',
'resource_types': 'Queries',
'unit_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'IsQueryEXT': {
'gen_cmd': False,
'client_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLidQuery id, void* sync_data',
'data_transfer_methods': ['shm'],
'gl_test_func': 'glBeginQuery',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BeginTransformFeedback': {
'unsafe': True,
},
'EndQueryEXT': {
'type': 'Manual',
'cmd_args': 'GLenumQueryTarget target, GLuint submit_count',
'gl_test_func': 'glEndnQuery',
'client_test': False,
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'EndTransformFeedback': {
'unsafe': True,
},
'GetQueryivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'GetQueryObjectuivEXT': {
'gen_cmd': False,
'client_test': False,
'gl_test_func': 'glGetQueryObjectuiv',
'pepper_interface': 'Query',
'extension': "occlusion_query_EXT",
},
'BindUniformLocationCHROMIUM': {
'type': 'GLchar',
'extension': True,
'data_transfer_methods': ['bucket'],
'needs_size': True,
'gl_test_func': 'DoBindUniformLocationCHROMIUM',
},
'InsertEventMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoInsertEventMarkerEXT',
'expectation': False,
'extension': True,
},
'PushGroupMarkerEXT': {
'type': 'GLcharN',
'decoder_func': 'DoPushGroupMarkerEXT',
'expectation': False,
'extension': True,
},
'PopGroupMarkerEXT': {
'decoder_func': 'DoPopGroupMarkerEXT',
'expectation': False,
'extension': True,
'impl_func': False,
},
'GenVertexArraysOES': {
'type': 'GENn',
'extension': True,
'gl_test_func': 'glGenVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'BindVertexArrayOES': {
'type': 'Bind',
'extension': True,
'gl_test_func': 'glBindVertexArrayOES',
'decoder_func': 'DoBindVertexArrayOES',
'gen_func': 'GenVertexArraysOES',
'unit_test': False,
'client_test': False,
'pepper_interface': 'VertexArrayObject',
},
'DeleteVertexArraysOES': {
'type': 'DELn',
'extension': True,
'gl_test_func': 'glDeleteVertexArraysOES',
'resource_type': 'VertexArray',
'resource_types': 'VertexArrays',
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'IsVertexArrayOES': {
'type': 'Is',
'extension': True,
'gl_test_func': 'glIsVertexArrayOES',
'decoder_func': 'DoIsVertexArrayOES',
'expectation': False,
'unit_test': False,
'pepper_interface': 'VertexArrayObject',
},
'BindTexImage2DCHROMIUM': {
'decoder_func': 'DoBindTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
'chromium': True,
},
'ReleaseTexImage2DCHROMIUM': {
'decoder_func': 'DoReleaseTexImage2DCHROMIUM',
'unit_test': False,
'extension': "CHROMIUM_image",
'chromium': True,
},
'ShallowFinishCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'ShallowFlushCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': "CHROMIUM_miscellaneous",
'chromium': True,
'client_test': False,
},
'OrderingBarrierCHROMIUM': {
'impl_func': False,
'gen_cmd': False,
'extension': True,
'chromium': True,
'client_test': False,
},
'TraceBeginCHROMIUM': {
'type': 'Custom',
'impl_func': False,
'client_test': False,
'cmd_args': 'GLuint category_bucket_id, GLuint name_bucket_id',
'extension': True,
'chromium': True,
},
'TraceEndCHROMIUM': {
'impl_func': False,
'client_test': False,
'decoder_func': 'DoTraceEndCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
},
'AsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLintTextureInternalFormat internalformat, '
'GLsizei width, GLsizei height, '
'GLintTextureBorder border, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* pixels, '
'uint32_t async_upload_token, '
'void* sync_data',
'extension': True,
'chromium': True,
'trace_level': 2,
},
'AsyncTexSubImage2DCHROMIUM': {
'type': 'Manual',
'data_transfer_methods': ['shm'],
'client_test': False,
'cmd_args': 'GLenumTextureTarget target, GLint level, '
'GLint xoffset, GLint yoffset, '
'GLsizei width, GLsizei height, '
'GLenumTextureFormat format, GLenumPixelType type, '
'const void* data, '
'uint32_t async_upload_token, '
'void* sync_data',
'extension': True,
'chromium': True,
'trace_level': 2,
},
'WaitAsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'client_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'WaitAllAsyncTexImage2DCHROMIUM': {
'type': 'Manual',
'client_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'DiscardFramebufferEXT': {
'type': 'PUTn',
'count': 1,
'decoder_func': 'DoDiscardFramebufferEXT',
'unit_test': False,
'client_test': False,
'extension_flag': 'ext_discard_framebuffer',
'trace_level': 2,
},
'LoseContextCHROMIUM': {
'decoder_func': 'DoLoseContextCHROMIUM',
'unit_test': False,
'extension': True,
'chromium': True,
'trace_level': 1,
},
'InsertSyncPointCHROMIUM': {
'type': 'HandWritten',
'impl_func': False,
'extension': "CHROMIUM_sync_point",
'chromium': True,
'trace_level': 1,
},
'WaitSyncPointCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'extension': "CHROMIUM_sync_point",
'chromium': True,
'trace_level': 1,
},
'DiscardBackbufferCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'extension': True,
'chromium': True,
'trace_level': 2,
},
'ScheduleOverlayPlaneCHROMIUM': {
'type': 'Custom',
'impl_func': True,
'unit_test': False,
'client_test': False,
'extension': True,
'chromium': True,
},
'MatrixLoadfCHROMIUM': {
'type': 'PUT',
'count': 16,
'data_type': 'GLfloat',
'decoder_func': 'DoMatrixLoadfCHROMIUM',
'gl_test_func': 'glMatrixLoadfEXT',
'chromium': True,
'extension': True,
'extension_flag': 'chromium_path_rendering',
},
'MatrixLoadIdentityCHROMIUM': {
'decoder_func': 'DoMatrixLoadIdentityCHROMIUM',
'gl_test_func': 'glMatrixLoadIdentityEXT',
'chromium': True,
'extension': True,
'extension_flag': 'chromium_path_rendering',
},
}
def Grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def SplitWords(input_string):
"""Split by '_' if found, otherwise split at uppercase/numeric chars.
Will split "some_TEXT" into ["some", "TEXT"], "CamelCase" into ["Camel",
"Case"], and "Vector3" into ["Vector", "3"].
"""
if input_string.find('_') > -1:
# 'some_TEXT_' -> 'some TEXT'
return input_string.replace('_', ' ').strip().split()
else:
if re.search('[A-Z]', input_string) and re.search('[a-z]', input_string):
# mixed case.
# look for capitalization to cut input_strings
# 'SomeText' -> 'Some Text'
input_string = re.sub('([A-Z])', r' \1', input_string).strip()
# 'Vector3' -> 'Vector 3'
input_string = re.sub('([^0-9])([0-9])', r'\1 \2', input_string)
return input_string.split()
def ToUnderscore(input_string):
"""converts CamelCase to camel_case."""
words = SplitWords(input_string)
return '_'.join([word.lower() for word in words])
def CachedStateName(item):
if item.get('cached', False):
return 'cached_' + item['name']
return item['name']
def ToGLExtensionString(extension_flag):
"""Returns GL-type extension string of a extension flag."""
if extension_flag == "oes_compressed_etc1_rgb8_texture":
return "OES_compressed_ETC1_RGB8_texture" # Fixup inconsitency with rgb8,
# unfortunate.
uppercase_words = [ 'img', 'ext', 'arb', 'chromium', 'oes', 'amd', 'bgra8888',
'egl', 'atc', 'etc1', 'angle']
parts = extension_flag.split('_')
return "_".join(
[part.upper() if part in uppercase_words else part for part in parts])
def ToCamelCase(input_string):
"""converts ABC_underscore_case to ABCUnderscoreCase."""
return ''.join(w[0].upper() + w[1:] for w in input_string.split('_'))
def GetGLGetTypeConversion(result_type, value_type, value):
"""Makes a gl compatible type conversion string for accessing state variables.
Useful when accessing state variables through glGetXXX calls.
glGet documetation (for example, the manual pages):
[...] If glGetIntegerv is called, [...] most floating-point values are
rounded to the nearest integer value. [...]
Args:
result_type: the gl type to be obtained
value_type: the GL type of the state variable
value: the name of the state variable
Returns:
String that converts the state variable to desired GL type according to GL
rules.
"""
if result_type == 'GLint':
if value_type == 'GLfloat':
return 'static_cast<GLint>(round(%s))' % value
return 'static_cast<%s>(%s)' % (result_type, value)
class CWriter(object):
"""Writes to a file formatting it for Google's style guidelines."""
def __init__(self, filename):
self.filename = filename
self.content = []
def Write(self, string):
"""Writes a string to a file spliting if it's > 80 characters."""
lines = string.splitlines()
num_lines = len(lines)
for ii in range(0, num_lines):
self.content.append(lines[ii])
if ii < (num_lines - 1) or string[-1] == '\n':
self.content.append('\n')
def Close(self):
"""Close the file."""
content = "".join(self.content)
write_file = True
if os.path.exists(self.filename):
old_file = open(self.filename, "rb");
old_content = old_file.read()
old_file.close();
if content == old_content:
write_file = False
if write_file:
file = open(self.filename, "wb")
file.write(content)
file.close()
class CHeaderWriter(CWriter):
"""Writes a C Header file."""
_non_alnum_re = re.compile(r'[^a-zA-Z0-9]')
def __init__(self, filename, file_comment = None):
CWriter.__init__(self, filename)
base = os.path.abspath(filename)
while os.path.basename(base) != 'src':
new_base = os.path.dirname(base)
assert new_base != base # Prevent infinite loop.
base = new_base
hpath = os.path.relpath(filename, base)
self.guard = self._non_alnum_re.sub('_', hpath).upper() + '_'
self.Write(_LICENSE)
self.Write(_DO_NOT_EDIT_WARNING)
if not file_comment == None:
self.Write(file_comment)
self.Write("#ifndef %s\n" % self.guard)
self.Write("#define %s\n\n" % self.guard)
def Close(self):
self.Write("#endif // %s\n\n" % self.guard)
CWriter.Close(self)
class TypeHandler(object):
"""This class emits code for a particular type of function."""
_remove_expected_call_re = re.compile(r' EXPECT_CALL.*?;\n', re.S)
def __init__(self):
pass
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
if func.GetInfo('needs_size') and not func.name.endswith('Bucket'):
func.AddCmdArg(DataSizeArgument('data_size'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return func.num_pointer_args >= 1
def WriteStruct(self, func, file):
"""Writes a structure that matches the arguments to a function."""
comment = func.GetInfo('cmd_comment')
if not comment == None:
file.Write(comment)
file.Write("struct %s {\n" % func.name)
file.Write(" typedef %s ValueType;\n" % func.name)
file.Write(" static const CommandId kCmdId = k%s;\n" % func.name)
func.WriteCmdArgFlag(file)
func.WriteCmdFlag(file)
file.Write("\n")
result = func.GetInfo('result')
if not result == None:
if len(result) == 1:
file.Write(" typedef %s Result;\n\n" % result[0])
else:
file.Write(" struct Result {\n")
for line in result:
file.Write(" %s;\n" % line)
file.Write(" };\n\n")
func.WriteCmdComputeSize(file)
func.WriteCmdSetHeader(file)
func.WriteCmdInit(file)
func.WriteCmdSet(file)
file.Write(" gpu::CommandHeader header;\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s %s;\n" % (arg.cmd_type, arg.name))
consts = func.GetCmdConstants()
for const in consts:
file.Write(" static const %s %s = %s;\n" %
(const.cmd_type, const.name, const.GetConstantValue()))
file.Write("};\n")
file.Write("\n")
size = len(args) * _SIZE_OF_UINT32 + _SIZE_OF_COMMAND_HEADER
file.Write("static_assert(sizeof(%s) == %d,\n" % (func.name, size))
file.Write(" \"size of %s should be %d\");\n" %
(func.name, size))
file.Write("static_assert(offsetof(%s, header) == 0,\n" % func.name)
file.Write(" \"offset of %s header should be 0\");\n" %
func.name)
offset = _SIZE_OF_COMMAND_HEADER
for arg in args:
file.Write("static_assert(offsetof(%s, %s) == %d,\n" %
(func.name, arg.name, offset))
file.Write(" \"offset of %s %s should be %d\");\n" %
(func.name, arg.name, offset))
offset += _SIZE_OF_UINT32
if not result == None and len(result) > 1:
offset = 0;
for line in result:
parts = line.split()
name = parts[-1]
check = """
static_assert(offsetof(%(cmd_name)s::Result, %(field_name)s) == %(offset)d,
"offset of %(cmd_name)s Result %(field_name)s should be "
"%(offset)d");
"""
file.Write((check.strip() + "\n") % {
'cmd_name': func.name,
'field_name': name,
'offset': offset,
})
offset += _SIZE_OF_UINT32
file.Write("\n")
def WriteHandlerImplementation(self, func, file):
"""Writes the handler implementation for this command."""
if func.IsUnsafe() and func.GetInfo('id_mapping'):
code_no_gen = """ if (!group_->Get%(type)sServiceId(
%(var)s, &%(service_var)s)) {
LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, "%(func)s", "invalid %(var)s id");
return error::kNoError;
}
"""
code_gen = """ if (!group_->Get%(type)sServiceId(
%(var)s, &%(service_var)s)) {
if (!group_->bind_generates_resource()) {
LOCAL_SET_GL_ERROR(
GL_INVALID_OPERATION, "%(func)s", "invalid %(var)s id");
return error::kNoError;
}
GLuint client_id = %(var)s;
gl%(gen_func)s(1, &%(service_var)s);
Create%(type)s(client_id, %(service_var)s);
}
"""
gen_func = func.GetInfo('gen_func')
for id_type in func.GetInfo('id_mapping'):
service_var = id_type.lower()
if id_type == 'Sync':
service_var = "service_%s" % service_var
file.Write(" GLsync %s = 0;\n" % service_var)
if gen_func and id_type in gen_func:
file.Write(code_gen % { 'type': id_type,
'var': id_type.lower(),
'service_var': service_var,
'func': func.GetGLFunctionName(),
'gen_func': gen_func })
else:
file.Write(code_no_gen % { 'type': id_type,
'var': id_type.lower(),
'service_var': service_var,
'func': func.GetGLFunctionName() })
args = []
for arg in func.GetOriginalArgs():
if arg.type == "GLsync":
args.append("service_%s" % arg.name)
elif arg.name.endswith("size") and arg.type == "GLsizei":
args.append("num_%s" % func.GetLastOriginalArg().name)
elif arg.name == "length":
args.append("nullptr")
else:
args.append(arg.name)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), ", ".join(args)))
def WriteCmdSizeTest(self, func, file):
"""Writes the size test for a command."""
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteFormatTest(self, func, file):
"""Writes a format test for a command."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(");\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
func.type_handler.WriteCmdSizeTest(func, file)
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd));\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Writes a format test for an immediate version of a command."""
pass
def WriteBucketFormatTest(self, func, file):
"""Writes a format test for a bucket version of a command."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Writes the code to set data_size used in validation"""
pass
def WriteImmediateCmdSizeTest(self, func, file):
"""Writes a size test for an immediate version of a command."""
file.Write(" // TODO(gman): Compute correct size.\n")
file.Write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def __WriteIdMapping(self, func, file):
"""Writes client side / service side ID mapping."""
if not func.IsUnsafe() or not func.GetInfo('id_mapping'):
return
for id_type in func.GetInfo('id_mapping'):
file.Write(" group_->Get%sServiceId(%s, &%s);\n" %
(id_type, id_type.lower(), id_type.lower()))
def WriteImmediateHandlerImplementation (self, func, file):
"""Writes the handler impl for the immediate version of a command."""
self.__WriteIdMapping(func, file)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteBucketHandlerImplementation (self, func, file):
"""Writes the handler impl for the bucket version of a command."""
self.__WriteIdMapping(func, file)
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteServiceHandlerFunctionHeader(self, func, file):
"""Writes function header for service implementation handlers."""
file.Write("""error::Error GLES2DecoderImpl::Handle%(name)s(
uint32_t immediate_data_size, const void* cmd_data) {
""" % {'name': func.name})
if func.IsUnsafe():
file.Write("""if (!unsafe_es3_apis_enabled())
return error::kUnknownCommand;
""")
file.Write("""const gles2::cmds::%(name)s& c =
*static_cast<const gles2::cmds::%(name)s*>(cmd_data);
(void)c;
""" % {'name': func.name})
def WriteServiceImplementation(self, func, file):
"""Writes the service implementation for a command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
if len(func.GetOriginalArgs()) > 0:
last_arg = func.GetLastOriginalArg()
all_but_last_arg = func.GetOriginalArgs()[:-1]
for arg in all_but_last_arg:
arg.WriteGetCode(file)
self.WriteGetDataSizeCode(func, file)
last_arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteImmediateServiceImplementation(self, func, file):
"""Writes the service implementation for an immediate version of command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, file)
arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteBucketServiceImplementation(self, func, file):
"""Writes the service implementation for a bucket version of command."""
self.WriteServiceHandlerFunctionHeader(func, file)
self.WriteHandlerExtensionCheck(func, file)
self.WriteHandlerDeferReadWrite(func, file);
for arg in func.GetCmdArgs():
arg.WriteGetCode(file)
func.WriteHandlerValidation(file)
func.WriteHandlerImplementation(file)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteHandlerExtensionCheck(self, func, file):
if func.GetInfo('extension_flag'):
file.Write(" if (!features().%s) {\n" % func.GetInfo('extension_flag'))
file.Write(" LOCAL_SET_GL_ERROR(GL_INVALID_OPERATION, \"gl%s\","
" \"function not available\");\n" % func.original_name)
file.Write(" return error::kNoError;")
file.Write(" }\n\n")
def WriteHandlerDeferReadWrite(self, func, file):
"""Writes the code to handle deferring reads or writes."""
defer_draws = func.GetInfo('defer_draws')
defer_reads = func.GetInfo('defer_reads')
if defer_draws or defer_reads:
file.Write(" error::Error error;\n")
if defer_draws:
file.Write(" error = WillAccessBoundFramebufferForDraw();\n")
file.Write(" if (error != error::kNoError)\n")
file.Write(" return error;\n")
if defer_reads:
file.Write(" error = WillAccessBoundFramebufferForRead();\n")
file.Write(" if (error != error::kNoError)\n")
file.Write(" return error;\n")
def WriteValidUnitTest(self, func, file, test, *extras):
"""Writes a valid unit test for the service implementation."""
if func.GetInfo('expectation') == False:
test = self._remove_expected_call_re.sub('', test)
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
gl_arg_strings = [
arg.GetValidGLArg(func) \
for arg in func.GetOriginalArgs()
]
gl_func_name = func.GetGLTestFunctionName()
vars = {
'name':name,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
}
for extra in extras:
vars.update(extra)
old_test = ""
while (old_test != test):
old_test = test
test = test % vars
file.Write(test % vars)
def WriteInvalidUnitTest(self, func, file, test, *extras):
"""Writes an invalid unit test for the service implementation."""
if func.IsUnsafe():
return
for invalid_arg_index, invalid_arg in enumerate(func.GetOriginalArgs()):
# Service implementation does not test constants, as they are not part of
# the call in the service side.
if invalid_arg.IsConstant():
continue
num_invalid_values = invalid_arg.GetNumInvalidValues(func)
for value_index in range(0, num_invalid_values):
arg_strings = []
parse_result = "kNoError"
gl_error = None
for arg in func.GetOriginalArgs():
if arg.IsConstant():
continue
if invalid_arg is arg:
(arg_string, parse_result, gl_error) = arg.GetInvalidArg(
value_index)
else:
arg_string = arg.GetValidArg(func)
arg_strings.append(arg_string)
gl_arg_strings = []
for arg in func.GetOriginalArgs():
gl_arg_strings.append("_")
gl_func_name = func.GetGLTestFunctionName()
gl_error_test = ''
if not gl_error == None:
gl_error_test = '\n EXPECT_EQ(%s, GetGLError());' % gl_error
vars = {
'name': func.name,
'arg_index': invalid_arg_index,
'value_index': value_index,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'all_but_last_args': ", ".join(arg_strings[:-1]),
'gl_args': ", ".join(gl_arg_strings),
'parse_result': parse_result,
'gl_error_test': gl_error_test,
}
for extra in extras:
vars.update(extra)
file.Write(test % vars)
def WriteServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
if func.name == 'Enable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, true);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
elif func.name == 'Disable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, false);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, valid_test, *extras)
if not func.IsUnsafe():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for an immediate command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteImmediateValidationCode(self, func, file):
"""Writes the validation code for an immediate version of a command."""
pass
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a bucket command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteBucketValidationCode(self, func, file):
"""Writes the validation code for a bucket version of a command."""
file.Write("// TODO(gman): %s\n" % func.name)
def WriteGLES2ImplementationDeclaration(self, func, file):
"""Writes the GLES2 Implemention declaration."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteGLES2CLibImplementation(self, func, file):
file.Write("%s GLES2%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(" %sgles2::GetGLContext()->%s(%s);\n" %
(result_string, func.original_name,
func.MakeOriginalArgString("")))
file.Write("}\n")
def WriteGLES2Header(self, func, file):
"""Writes a re-write macro for GLES"""
file.Write("#define gl%s GLES2_GET_FUN(%s)\n" %(func.name, func.name))
def WriteClientGLCallLog(self, func, file):
"""Writes a logging macro for the client side code."""
comma = ""
if len(func.GetOriginalArgs()):
comma = " << "
file.Write(
' GPU_CLIENT_LOG("[" << GetLogPrefix() << "] gl%s("%s%s << ")");\n' %
(func.original_name, comma, func.MakeLogArgString()))
def WriteClientGLReturnLog(self, func, file):
"""Writes the return value logging code."""
if func.return_type != "void":
file.Write(' GPU_CLIENT_LOG("return:" << result)\n')
def WriteGLES2ImplementationHeader(self, func, file):
"""Writes the GLES2 Implemention."""
self.WriteGLES2ImplementationDeclaration(func, file)
def WriteGLES2TraceImplementationHeader(self, func, file):
"""Writes the GLES2 Trace Implemention header."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2TraceImplementation(self, func, file):
"""Writes the GLES2 Trace Implemention."""
file.Write("%s GLES2TraceImplementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
file.Write(' TRACE_EVENT_BINARY_EFFICIENT0("gpu", "GLES2Trace::%s");\n' %
func.name)
file.Write(" %sgl_->%s(%s);\n" %
(result_string, func.name, func.MakeOriginalArgString("")))
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
gen_cmd = func.GetInfo('gen_cmd')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True) and
(gen_cmd == None or gen_cmd == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteClientGLCallLog(func, file)
func.WriteDestinationInitalizationValidation(file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeHelperArgString("")))
file.Write(" CheckGLError();\n")
self.WriteClientGLReturnLog(func, file)
file.Write("}\n")
file.Write("\n")
def WriteGLES2InterfaceHeader(self, func, file):
"""Writes the GLES2 Interface."""
file.Write("virtual %s %s(%s) = 0;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteMojoGLES2ImplHeader(self, func, file):
"""Writes the Mojo GLES2 implementation header."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteMojoGLES2Impl(self, func, file):
"""Writes the Mojo GLES2 implementation."""
file.Write("%s MojoGLES2Impl::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
extensions = ["CHROMIUM_sync_point", "CHROMIUM_texture_mailbox",
"CHROMIUM_sub_image", "CHROMIUM_miscellaneous",
"occlusion_query_EXT", "CHROMIUM_image",
"CHROMIUM_copy_texture",
"CHROMIUM_pixel_transfer_buffer_object"]
if func.IsCoreGLFunction() or func.GetInfo("extension") in extensions:
file.Write("MojoGLES2MakeCurrent(context_);");
func_return = "gl" + func.original_name + "(" + \
func.MakeOriginalArgString("") + ");"
if func.return_type == "void":
file.Write(func_return);
else:
file.Write("return " + func_return);
else:
file.Write("NOTREACHED() << \"Unimplemented %s.\";\n" %
func.original_name);
if func.return_type != "void":
file.Write("return 0;")
file.Write("}")
def WriteGLES2InterfaceStub(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStubImpl(self, func, file):
"""Writes the GLES2 Interface stub declaration."""
args = func.GetOriginalArgs()
arg_string = ", ".join(
["%s /* %s */" % (arg.type, arg.name) for arg in args])
file.Write("%s GLES2InterfaceStub::%s(%s) {\n" %
(func.return_type, func.original_name, arg_string))
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test')
if (func.can_auto_generate and
(client_test == None or client_test == True)):
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [arg for arg in func.GetOriginalArgs() if arg.IsConstant()]
if constants:
code = """
TEST_F(GLES2ImplementationTest, %(name)sInvalidConstantArg%(invalid_index)d) {
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
else:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
})
else:
if client_test != False:
file.Write("// TODO(zmo): Implement unit test for %s\n" % func.name)
def WriteDestinationInitalizationValidation(self, func, file):
"""Writes the client side destintion initialization validation."""
for arg in func.GetOriginalArgs():
arg.WriteDestinationInitalizationValidation(file, func)
def WriteTraceEvent(self, func, file):
file.Write(' TRACE_EVENT0("gpu", "GLES2Implementation::%s");\n' %
func.original_name)
def WriteImmediateCmdComputeSize(self, func, file):
"""Writes the size computation code for the immediate version of a cmd."""
file.Write(" static uint32_t ComputeSize(uint32_t size_in_bytes) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + // NOLINT\n")
file.Write(" RoundSizeToMultipleOfEntries(size_in_bytes));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Writes the SetHeader function for the immediate version of a cmd."""
file.Write(" void SetHeader(uint32_t size_in_bytes) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(size_in_bytes);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Writes the Init function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteImmediateCmdSet(self, func, file):
"""Writes the Set function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteCmdHelper(self, func, file):
"""Writes the cmd helper definition for a cmd."""
code = """ void %(name)s(%(typed_args)s) {
gles2::cmds::%(name)s* c = GetCmdSpace<gles2::cmds::%(name)s>();
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
def WriteImmediateCmdHelper(self, func, file):
"""Writes the cmd helper definition for the immediate version of a cmd."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t s = 0; // TODO(gman): compute correct size
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(s);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
class StateSetHandler(TypeHandler):
"""Handler for commands that simply set state."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
for ndx,item in enumerate(states):
code = []
if 'range_checks' in item:
for range_check in item['range_checks']:
code.append("%s %s" % (args[ndx].name, range_check['check']))
if 'nan_check' in item:
# Drivers might generate an INVALID_VALUE error when a value is set
# to NaN. This is allowed behavior under GLES 3.0 section 2.1.1 or
# OpenGL 4.5 section 2.3.4.1 - providing NaN allows undefined results.
# Make this behavior consistent within Chromium, and avoid leaking GL
# errors by generating the error in the command buffer instead of
# letting the GL driver generate it.
code.append("std::isnan(%s)" % args[ndx].name)
if len(code):
file.Write(" if (%s) {\n" % " ||\n ".join(code))
file.Write(
' LOCAL_SET_GL_ERROR(GL_INVALID_VALUE,'
' "%s", "%s out of range");\n' %
(func.name, args[ndx].name))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx,item in enumerate(states):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
for ndx,item in enumerate(states):
if item.get('cached', False):
file.Write(" state_.%s = %s;\n" %
(CachedStateName(item), args[ndx].name))
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, file, *extras)
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
for ndx,item in enumerate(states):
if 'range_checks' in item:
for check_ndx, range_check in enumerate(item['range_checks']):
valid_test = """
TEST_P(%(test_name)s, %(name)sInvalidValue%(ndx)d_%(check_ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = range_check['test_value']
vars = {
'name': name,
'ndx': ndx,
'check_ndx': check_ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
vars.update(extra)
file.Write(valid_test % vars)
if 'nan_check' in item:
valid_test = """
TEST_P(%(test_name)s, %(name)sNaNValue%(ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = 'nanf("")'
vars = {
'name': name,
'ndx': ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
vars.update(extra)
file.Write(valid_test % vars)
class StateSetRGBAlphaHandler(TypeHandler):
"""Handler for commands that simply set state that have rgb/alpha."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx % num_args].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for ndx, item in enumerate(states):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx % num_args].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackSeparateHandler(TypeHandler):
"""Handler for commands that simply set state that have front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
face = args[0].name
num_args = len(args)
file.Write(" bool changed = false;\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
code = []
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx + 1].name))
file.Write(" changed |= %s;\n" % " ||\n ".join(code))
file.Write(" }\n")
file.Write(" if (changed) {\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
file.Write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" %
(item['name'], args[ndx + 1].name))
file.Write(" }\n")
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetFrontBackHandler(TypeHandler):
"""Handler for commands that simply set state that set both front/back."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
file.Write(" if (%s) {\n" % " ||\n ".join(code))
for group_ndx, group in enumerate(Grouper(num_args, states)):
for ndx, item in enumerate(group):
file.Write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'state_flag' in state:
file.Write(" %s = true;\n" % state['state_flag'])
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
class StateSetNamedParameter(TypeHandler):
"""Handler for commands that set a state chosen with an enum parameter."""
def __init__(self):
TypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overridden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATES[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
assert num_args == 2
file.Write(" switch (%s) {\n" % args[0].name)
for state in states:
file.Write(" case %s:\n" % state['enum'])
file.Write(" if (state_.%s != %s) {\n" %
(state['name'], args[1].name))
file.Write(" state_.%s = %s;\n" % (state['name'], args[1].name))
if not func.GetInfo("no_gl"):
file.Write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" }\n")
file.Write(" break;\n")
file.Write(" default:\n")
file.Write(" NOTREACHED();\n")
file.Write(" }\n")
class CustomHandler(TypeHandler):
"""Handler for commands that are auto-generated but require minor tweaks."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(
" uint32_t total_size = 0; // TODO(gman): get correct size.\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
class TodoHandler(CustomHandler):
"""Handle for commands that are not yet implemented."""
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" SetGLError("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
if func.return_type != "void":
file.Write(" return 0;\n")
file.Write("}\n")
file.Write("\n")
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
file.Write(" // TODO: for now this is a no-op\n")
file.Write(
" LOCAL_SET_GL_ERROR("
"GL_INVALID_OPERATION, \"gl%s\", \"not implemented\");\n" %
func.name)
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
class HandWrittenHandler(CustomHandler):
"""Handler for comands where everything must be written by hand."""
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
CustomHandler.InitFunction(self, func)
func.can_auto_generate = False
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
# If specified explicitly, force the data transfer method.
if func.GetInfo('data_transfer_methods'):
return True
return False
def WriteStruct(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteDocs(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
def WriteBucketFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Write test for %s\n" % func.name)
class ManualHandler(CustomHandler):
"""Handler for commands who's handlers must be written by hand."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name == 'CompressedTexImage2DBucket' or
func.name == 'CompressedTexImage3DBucket'):
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
else:
CustomHandler.InitFunction(self, func)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.GetInfo('impl_func'):
super(ManualHandler, self).WriteGLES2Implementation(func, file)
def WriteGLES2ImplementationHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write("\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
CustomHandler.WriteImmediateCmdGetTotalSize(self, func, file)
class DataHandler(TypeHandler):
"""Handler for glBufferData, glBufferSubData, glTexImage*D, glTexSubImage*D,
glCompressedTexImage*D, glCompressedTexImageSub*D."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name == 'CompressedTexSubImage2DBucket' or
func.name == 'CompressedTexSubImage3DBucket'):
func.cmd_args = func.cmd_args[:-1]
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Move this data to _FUNCTION_INFO?
name = func.name
if name.endswith("Immediate"):
name = name[0:-9]
if name == 'BufferData' or name == 'BufferSubData':
file.Write(" uint32_t data_size = size;\n")
elif (name == 'CompressedTexImage2D' or
name == 'CompressedTexSubImage2D' or
name == 'CompressedTexImage3D' or
name == 'CompressedTexSubImage3D'):
file.Write(" uint32_t data_size = imageSize;\n")
elif (name == 'CompressedTexSubImage2DBucket' or
name == 'CompressedTexSubImage3DBucket'):
file.Write(" Bucket* bucket = GetBucket(c.bucket_id);\n")
file.Write(" uint32_t data_size = bucket->size();\n")
file.Write(" GLsizei imageSize = data_size;\n")
elif name == 'TexImage2D' or name == 'TexSubImage2D':
code = """ uint32_t data_size;
if (!GLES2Util::ComputeImageDataSize(
width, height, format, type, unpack_alignment_, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
else:
file.Write(
"// uint32_t data_size = 0; // TODO(gman): get correct size!\n")
def WriteImmediateCmdGetTotalSize(self, func, file):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSizeTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" EXPECT_EQ(sizeof(cmd), total_size);\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, file)
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
# TODO(gman): Remove this exception.
file.Write("// TODO(gman): Implement test for %s\n" % func.name)
return
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
file.Write("// TODO(gman): %s\n\n" % func.name)
def WriteBucketServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if ((not func.name == 'CompressedTexSubImage2DBucket') and
(not func.name == 'CompressedTexSubImage3DBucket')):
TypeHandler.WriteBucketServiceImplemenation(self, func, file)
class BindHandler(TypeHandler):
"""Handler for glBind___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
if len(func.GetOriginalArgs()) == 1:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_type': func.GetOriginalArgs()[0].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_,
%(gl_func_name)s(%(gl_args_with_new_id)s));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args_with_new_id)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != NULL);
}
"""
gl_args_with_new_id = []
args_with_new_id = []
for arg in func.GetOriginalArgs():
if hasattr(arg, 'resource_type'):
gl_args_with_new_id.append('kNewServiceId')
args_with_new_id.append('kNewClientId')
else:
gl_args_with_new_id.append(arg.GetValidGLArg(func))
args_with_new_id.append(arg.GetValidArg(func))
self.WriteValidUnitTest(func, file, valid_test, {
'args_with_new_id': ", ".join(args_with_new_id),
'gl_args_with_new_id': ", ".join(gl_args_with_new_id),
'resource_type': func.GetResourceIdArg().resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
def WriteGLES2Implementation(self, func, file):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func')
impl_decl = func.GetInfo('impl_decl')
if (func.can_auto_generate and
(impl_func == None or impl_func == True) and
(impl_decl == None or impl_decl == True)):
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ if (Is%(type)sReservedId(%(id)s)) {
SetGLError(GL_INVALID_OPERATION, "%(name)s\", \"%(id)s reserved id");
return;
}
%(name)sHelper(%(arg_string)s);
CheckGLError();
}
"""
name_arg = func.GetResourceIdArg()
file.Write(code % {
'name': func.name,
'arg_string': func.MakeOriginalArgString(""),
'id': name_arg.name,
'type': name_arg.resource_type,
'lc_type': name_arg.resource_type.lower(),
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == False:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));"""
if not func.IsUnsafe():
code += """
ClearCommands();
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());"""
code += """
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
class GENnHandler(TypeHandler):
"""Handler for glGen___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
pass
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
file.Write(""" for (GLsizei ii = 0; ii < n; ++ii) {
if (group_->Get%(resource_name)sServiceId(%(last_arg_name)s[ii], NULL)) {
return error::kInvalidArguments;
}
}
scoped_ptr<GLuint[]> service_ids(new GLuint[n]);
gl%(func_name)s(n, service_ids.get());
for (GLsizei ii = 0; ii < n; ++ii) {
group_->Add%(resource_name)sId(%(last_arg_name)s[ii], service_ids[ii]);
}
""" % { 'func_name': func.original_name,
'last_arg_name': func.GetLastOriginalArg().name,
'resource_name': func.GetInfo('resource_type') })
else:
file.Write(" if (!%sHelper(n, %s)) {\n"
" return error::kInvalidArguments;\n"
" }\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
log_code = (""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});""" % func.GetOriginalArgs()[1].name)
args = {
'log_code': log_code,
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_types': func.GetInfo('resource_types'),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
not_shared = func.GetInfo('not_shared')
if not_shared:
alloc_code = (
""" IdAllocator* id_allocator = GetIdAllocator(id_namespaces::k%s);
for (GLsizei ii = 0; ii < n; ++ii)
%s[ii] = id_allocator->AllocateID();""" %
(func.GetInfo('resource_types'), func.GetOriginalArgs()[1].name))
else:
alloc_code = (""" GetIdHandler(id_namespaces::k%(resource_types)s)->
MakeIds(this, 0, %(args)s);""" % args)
args['alloc_code'] = alloc_code
code = """ GPU_CLIENT_SINGLE_THREAD_CHECK();
%(alloc_code)s
%(name)sHelper(%(args)s);
helper_->%(name)sImmediate(%(args)s);
if (share_group_->bind_generates_resource())
helper_->CommandBufferHelper::Flush();
%(log_code)s
CheckGLError();
}
"""
file.Write(code % args)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { 0, };
struct Cmds {
cmds::%(name)sImmediate gen;
GLuint data[2];
};
Cmds expected;
expected.gen.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(k%(types)sStartId, ids[0]);
EXPECT_EQ(k%(types)sStartId + 1, ids[1]);
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
GetSharedMemoryAs<GLuint*>()[0] = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
GLuint service_id;
EXPECT_TRUE(Get%(resource_name)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(kNewServiceId, service_id)
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_name)s(kNewClientId, &service_id) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgumentPointee<1>(kNewServiceId));
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
cmd->Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
GLuint service_id;
EXPECT_TRUE(Get%(resource_name)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(kNewServiceId, service_id);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
cmd->Init(1, &client_%(resource_name)s_id_);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
decoder_->set_unsafe_es3_apis_enabled(false);
}
"""
else:
invalid_test += """
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class CreateHandler(TypeHandler):
"""Handler for glCreate___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("client_id", 'uint32_t'))
def __GetResourceType(self, func):
if func.return_type == "GLsync":
return "Sync"
else:
return func.name[6:] # Create*
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
%(id_type_cast)sEXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(Return(%(const_service_id)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
%(return_type)s service_id = 0;
EXPECT_TRUE(Get%(resource_type)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(%(const_service_id)s, service_id);
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_type)s(kNewClientId));
}
"""
comma = ""
cmd_arg_count = 0
for arg in func.GetOriginalArgs():
if not arg.IsConstant():
cmd_arg_count += 1
if cmd_arg_count:
comma = ", "
if func.return_type == 'GLsync':
id_type_cast = ("const GLsync kNewServiceIdGLuint = reinterpret_cast"
"<GLsync>(kNewServiceId);\n ")
const_service_id = "kNewServiceIdGLuint"
else:
id_type_cast = ""
const_service_id = "kNewServiceId"
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
'resource_type': self.__GetResourceType(func),
'return_type': func.return_type,
'id_type_cast': id_type_cast,
'const_service_id': const_service_id,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
code = """ uint32_t client_id = c.client_id;
%(return_type)s service_id = 0;
if (group_->Get%(resource_name)sServiceId(client_id, &service_id)) {
return error::kInvalidArguments;
}
service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
group_->Add%(resource_name)sId(client_id, service_id);
}
"""
else:
code = """ uint32_t client_id = c.client_id;
if (Get%(resource_name)s(client_id)) {
return error::kInvalidArguments;
}
%(return_type)s service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
Create%(resource_name)s(client_id, service_id%(gl_args_with_comma)s);
}
"""
file.Write(code % {
'resource_name': self.__GetResourceType(func),
'return_type': func.return_type,
'gl_func_name': func.GetGLFunctionName(),
'gl_args': func.MakeOriginalArgString(""),
'gl_args_with_comma': func.MakeOriginalArgString("", True) })
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" GLuint client_id;\n")
if func.return_type == "GLsync":
file.Write(
" GetIdHandler(id_namespaces::kSyncs)->\n")
else:
file.Write(
" GetIdHandler(id_namespaces::kProgramsAndShaders)->\n")
file.Write(" MakeIds(this, 0, 1, &client_id);\n")
file.Write(" helper_->%s(%s);\n" %
(func.name, func.MakeCmdArgString("")))
file.Write(' GPU_CLIENT_LOG("returned " << client_id);\n')
file.Write(" CheckGLError();\n")
if func.return_type == "GLsync":
file.Write(" return reinterpret_cast<GLsync>(client_id);\n")
else:
file.Write(" return client_id;\n")
file.Write("}\n")
file.Write("\n")
class DeleteHandler(TypeHandler):
"""Handler for glDelete___ single resource type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
TypeHandler.WriteServiceImplementation(self, func, file)
# HandleDeleteShader and HandleDeleteProgram are manually written.
pass
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(
" GPU_CLIENT_DCHECK(%s != 0);\n" % func.GetOriginalArgs()[-1].name)
file.Write(" %sHelper(%s);\n" %
(func.original_name, func.GetOriginalArgs()[-1].name))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
assert len(func.GetOriginalArgs()) == 1
arg = func.GetOriginalArgs()[0]
if func.IsUnsafe():
file.Write(""" %(arg_type)s service_id = 0;
if (group_->Get%(resource_type)sServiceId(%(arg_name)s, &service_id)) {
glDelete%(resource_type)s(service_id);
group_->Remove%(resource_type)sId(%(arg_name)s);
} else {
LOCAL_SET_GL_ERROR(
GL_INVALID_VALUE, "gl%(func_name)s", "unknown %(arg_name)s");
}
""" % { 'resource_type': func.GetInfo('resource_type'),
'arg_name': arg.name,
'arg_type': arg.type,
'func_name': func.original_name })
else:
file.Write(" %sHelper(%s);\n" % (func.original_name, arg.name))
class DELnHandler(TypeHandler):
"""Handler for glDelete___ type functions."""
def __init__(self):
TypeHandler.__init__(self)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!SafeMultiplyUint32(n, sizeof(GLuint), &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code)
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
GLuint ids[2] = { k%(types)sStartId, k%(types)sStartId + 1 };
struct Cmds {
cmds::%(name)sImmediate del;
GLuint data[2];
};
Cmds expected;
expected.del.Init(arraysize(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(arraysize(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
file.Write(code % {
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
GetSharedMemoryAs<GLuint*>()[0] = kInvalidClientId;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd.Init(1, &client_%(resource_name)s_id_);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
EXPECT_FALSE(Get%(upper_resource_name)sServiceId(
client_%(resource_name)s_id_, NULL));
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
}
"""
else:
valid_test += """
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == NULL);
}
"""
self.WriteValidUnitTest(func, file, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
GLuint temp = kInvalidClientId;
cmd.Init(1, &temp);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
else:
invalid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" %sHelper(n, %s);\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation (self, func, file):
"""Overrriden from TypeHandler."""
if func.IsUnsafe():
file.Write(""" for (GLsizei ii = 0; ii < n; ++ii) {
GLuint service_id = 0;
if (group_->Get%(resource_type)sServiceId(
%(last_arg_name)s[ii], &service_id)) {
glDelete%(resource_type)ss(1, &service_id);
group_->Remove%(resource_type)sId(%(last_arg_name)s[ii]);
}
}
""" % { 'resource_type': func.GetInfo('resource_type'),
'last_arg_name': func.GetLastOriginalArg().name })
else:
file.Write(" %sHelper(n, %s);\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
args = {
'return_type': func.return_type,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_type': func.GetInfo('resource_type').lower(),
'count_name': func.GetOriginalArgs()[0].name,
}
file.Write(
"%(return_type)s GLES2Implementation::%(name)s(%(typed_args)s) {\n" %
args)
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});
""" % func.GetOriginalArgs()[1].name)
file.Write(""" GPU_CLIENT_DCHECK_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
DCHECK(%s[i] != 0);
}
});
""" % func.GetOriginalArgs()[1].name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
code = """ %(name)sHelper(%(args)s);
CheckGLError();
}
"""
file.Write(code % args)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei n) {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(GLuint) * n); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei n) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + ComputeDataSize(n)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei n) {\n")
file.Write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(n));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
file.Write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize(_n);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(n);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" static GLuint ids[] = { 12, 23, 34, };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd, static_cast<GLsizei>(arraysize(ids)), ids);\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
file.Write(" cmd.header.size * 4u);\n")
file.Write(" EXPECT_EQ(static_cast<GLsizei>(arraysize(ids)), cmd.n);\n");
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(arraysize(ids) * 4u));\n")
file.Write(" // TODO(gman): Check that ids were inserted;\n")
file.Write("}\n")
file.Write("\n")
class GETnHandler(TypeHandler):
"""Handler for GETn for glGetBooleanv, glGetFloatv, ... type functions."""
def __init__(self):
TypeHandler.__init__(self)
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
GLsizei num_values = 0;
GetNumValuesReturnedForGLGet(pname, &num_values);
Result* result = GetSharedMemoryAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
Result::ComputeSize(num_values));
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : NULL;
"""
file.Write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
func.WriteHandlerValidation(file)
code = """ // Check that the client initialized the result.
if (result->size != 0) {
return error::kInvalidArguments;
}
"""
shadowed = func.GetInfo('shadowed')
if not shadowed:
file.Write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name)
file.Write(code)
func.WriteHandlerImplementation(file)
if shadowed:
code = """ result->SetNumResults(num_values);
return error::kNoError;
}
"""
else:
code = """ GLenum error = LOCAL_PEEK_GL_ERROR("%(func_name)s");
if (error == GL_NO_ERROR) {
result->SetNumResults(num_values);
}
return error::kNoError;
}
"""
file.Write(code % {'func_name': func.name})
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_decl = func.GetInfo('impl_decl')
if impl_decl == None or impl_decl == True:
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
all_but_last_args = func.GetOriginalArgs()[:-1]
args = []
has_length_arg = False
for arg in all_but_last_args:
if arg.type == 'GLsync':
args.append('ToGLuint(%s)' % arg.name)
elif arg.name.endswith('size') and arg.type == 'GLsizei':
continue
elif arg.name == 'length':
has_length_arg = True
continue
else:
args.append(arg.name)
arg_string = ", ".join(args)
all_arg_string = (
", ".join([
"%s" % arg.name
for arg in func.GetOriginalArgs() if not arg.IsConstant()]))
self.WriteTraceEvent(func, file)
code = """ if (%(func_name)sHelper(%(all_arg_string)s)) {
return;
}
typedef cmds::%(func_name)s::Result Result;
Result* result = GetResultAs<Result*>();
if (!result) {
return;
}
result->SetNumResults(0);
helper_->%(func_name)s(%(arg_string)s,
GetResultShmId(), GetResultShmOffset());
WaitForCmd();
result->CopyResult(%(last_arg_name)s);
GPU_CLIENT_LOG_CODE_BLOCK({
for (int32_t i = 0; i < result->GetNumResults(); ++i) {
GPU_CLIENT_LOG(" " << i << ": " << result->GetData()[i]);
}
});"""
if has_length_arg:
code += """
if (length) {
*length = result->GetNumResults();
}"""
code += """
CheckGLError();
}
"""
file.Write(code % {
'func_name': func.name,
'arg_string': arg_string,
'all_arg_string': all_arg_string,
'last_arg_name': func.GetLastOriginalArg().name,
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result::Type ResultType;
ResultType result = 0;
Cmds expected;
ExpectedMemoryInfo result1 = GetExpectedResultMemory(
sizeof(uint32_t) + sizeof(ResultType));
expected.cmd.Init(%(cmd_args)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, SizedResultHelper<ResultType>(1)))
.RetiresOnSaturation();
gl_->%(name)s(%(args)s, &result);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(static_cast<ResultType>(1), result);
}
"""
first_cmd_arg = func.GetCmdArgs()[0].GetValidNonCachedClientSideCmdArg(func)
if not first_cmd_arg:
return
first_gl_arg = func.GetOriginalArgs()[0].GetValidNonCachedClientSideArg(
func)
cmd_arg_strings = [first_cmd_arg]
for arg in func.GetCmdArgs()[1:-2]:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = [first_gl_arg]
for arg in func.GetOriginalArgs()[1:-1]:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, GetError())
.WillOnce(Return(GL_NO_ERROR))
.WillOnce(Return(GL_NO_ERROR))
.RetiresOnSaturation();
SpecializedSetup<cmds::%(name)s, 0>(true);
typedef cmds::%(name)s::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
EXPECT_CALL(*gl_, %(gl_func_name)s(%(local_gl_args)s));
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(decoder_->GetGLES2Util()->GLGetNumValuesReturned(
%(valid_pname)s),
result->GetNumResults());
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
gl_arg_strings = []
cmd_arg_strings = []
valid_pname = ''
for arg in func.GetOriginalArgs()[:-1]:
if arg.name == 'length':
gl_arg_value = 'nullptr'
elif arg.name.endswith('size'):
gl_arg_value = ("decoder_->GetGLES2Util()->GLGetNumValuesReturned(%s)" %
valid_pname)
elif arg.type == 'GLsync':
gl_arg_value = 'reinterpret_cast<GLsync>(kServiceSyncId)'
else:
gl_arg_value = arg.GetValidGLArg(func)
gl_arg_strings.append(gl_arg_value)
if arg.name == 'pname':
valid_pname = gl_arg_value
if arg.name.endswith('size') or arg.name == 'length':
continue
if arg.type == 'GLsync':
arg_value = 'client_sync_id_'
else:
arg_value = arg.GetValidArg(func)
cmd_arg_strings.append(arg_value)
if func.GetInfo('gl_test_func') == 'glGetIntegerv':
gl_arg_strings.append("_")
else:
gl_arg_strings.append("result->GetData()")
cmd_arg_strings.append("shared_memory_id_")
cmd_arg_strings.append("shared_memory_offset_")
self.WriteValidUnitTest(func, file, valid_test, {
'local_gl_args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'valid_pname': valid_pname,
}, *extras)
if not func.IsUnsafe():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s::Result* result =
static_cast<cmds::%(name)s::Result*>(shared_memory_address_);
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));
EXPECT_EQ(0u, result->size);%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, *extras)
class ArrayArgTypeHandler(TypeHandler):
"""Base class for type handlers that handle args that are arrays"""
def __init__(self):
TypeHandler.__init__(self)
def GetArrayType(self, func):
"""Returns the type of the element in the element array being PUT to."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
element_type = arg.GetPointedType()
return element_type
# Special case: array type handler is used for a function that is forwarded
# to the actual array type implementation
element_type = func.GetOriginalArgs()[-1].type
assert all(arg.type == element_type \
for arg in func.GetOriginalArgs()[-self.GetArrayCount(func):])
return element_type
def GetArrayCount(self, func):
"""Returns the count of the elements in the array being PUT to."""
return func.GetInfo('count')
class PUTHandler(ArrayArgTypeHandler):
"""Handler for glTexParameter_v, glVertexAttrib_v functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
expected_call = "EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));"
if func.GetInfo("first_element_only"):
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()
]
gl_arg_strings[-1] = "*" + gl_arg_strings[-1]
expected_call = ("EXPECT_CALL(*gl_, %%(gl_func_name)s(%s));" %
", ".join(gl_arg_strings))
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
%(expected_call)s
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
extra = {
'data_type': self.GetArrayType(func),
'data_value': func.GetInfo('data_value') or '0',
'expected_call': expected_call,
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Writes the service unit test for a command."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(gl_args)s, &temp[0]);
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s, %(data_ref)sreinterpret_cast<
%(data_type)s*>(ImmediateDataAddress(&cmd))));"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));"""
valid_test += """
}
"""
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_any_strings = ["_"] * len(gl_arg_strings)
extra = {
'data_ref': ("*" if func.GetInfo('first_element_only') else ""),
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'data_value': func.GetInfo('data_value') or '0',
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();"""
if func.IsUnsafe():
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(1);
"""
else:
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
"""
invalid_test += """
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(all_but_last_args)s, &temp[0]);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
decoder_->set_unsafe_es3_apis_enabled(false);
}
"""
else:
invalid_test += """
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!ComputeDataSize(1, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (self.GetArrayType(func), self.GetArrayCount(func)))
if func.IsImmediate():
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def __NeedsToCalcDataCount(self, func):
use_count_func = func.GetInfo('use_count_func')
return use_count_func != None and use_count_func != False
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
if self.__NeedsToCalcDataCount(func):
file.Write(" size_t count = GLES2Util::Calc%sDataCount(%s);\n" %
(func.name, func.GetOriginalArgs()[0].name))
file.Write(" DCHECK_LE(count, %du);\n" % self.GetArrayCount(func))
else:
file.Write(" size_t count = %d;" % self.GetArrayCount(func))
file.Write(" for (size_t ii = 0; ii < count; ++ii)\n")
file.Write(' GPU_CLIENT_LOG("value[" << ii << "]: " << %s[ii]);\n' %
func.GetLastOriginalArg().name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test')
if (client_test != None and client_test != True):
return;
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count)d] = {0};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count)d];
};
for (int jj = 0; jj < %(count)d; ++jj) {
data[jj] = static_cast<%(type)s>(jj);
}
Cmds expected;
expected.cmd.Init(%(cmd_args)s, &data[0]);
gl_->%(name)s(%(args)s, &data[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()[0:-2]
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
file.Write(code % {
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize() {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * %d);\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" }\n")
file.Write("\n")
if self.__NeedsToCalcDataCount(func):
file.Write(" static uint32_t ComputeEffectiveDataSize(%s %s) {\n" %
(func.GetOriginalArgs()[0].type,
func.GetOriginalArgs()[0].name))
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * GLES2Util::Calc%sDataCount(%s));\n" %
(self.GetArrayType(func), func.original_name,
func.GetOriginalArgs()[0].name))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize() {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize());\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader() {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize());\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
file.Write(" SetHeader();\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
if self.__NeedsToCalcDataCount(func):
file.Write(" _%s, ComputeEffectiveDataSize(%s));" %
(last_arg.name, func.GetOriginalArgs()[0].name))
file.Write("""
DCHECK_GE(ComputeDataSize(), ComputeEffectiveDataSize(%(arg)s));
char* pointer = reinterpret_cast<char*>(ImmediateDataAddress(this)) +
ComputeEffectiveDataSize(%(arg)s);
memset(pointer, 0, ComputeDataSize() - ComputeEffectiveDataSize(%(arg)s));
""" % { 'arg': func.GetOriginalArgs()[0].name, })
else:
file.Write(" _%s, ComputeDataSize());\n" % last_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
file.Write(" const uint32_t size = ComputeSize();\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize();
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func)):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
file.Write(",\n data);\n")
args = func.GetCmdArgs()
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n"
% func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)),\n")
file.Write(" cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 11, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTnHandler(ArrayArgTypeHandler):
"""Handler for PUTn 'glUniform__v' type functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overridden from TypeHandler."""
ArrayArgTypeHandler.WriteServiceUnitTest(self, func, file, *extras)
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgsCountTooLarge) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
# hardcoded to match unit tests.
if count == 0:
# the location of the second element of the 2nd uniform.
# defined in GLES2DecoderBase::SetupShaderForUniform
gl_arg_strings.append("3")
arg_strings.append("ProgramManager::MakeFakeLocation(1, 1)")
elif count == 1:
# the number of elements that gl will be called with.
gl_arg_strings.append("3")
# the number of elements requested in the command.
arg_strings.append("5")
else:
gl_arg_strings.append(arg.GetValidGLArg(func))
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'gl_args': ", ".join(gl_arg_strings),
'args': ", ".join(arg_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, file, *extras):
"""Overridden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s,
reinterpret_cast<%(data_type)s*>(ImmediateDataAddress(&cmd))));
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(args)s, &temp[0]);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand,
ExecuteImmediateCmd(cmd, sizeof(temp)));"""
valid_test += """
}
"""
gl_arg_strings = []
gl_any_strings = []
arg_strings = []
for arg in func.GetOriginalArgs()[0:-1]:
gl_arg_strings.append(arg.GetValidGLArg(func))
gl_any_strings.append("_")
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, file, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, file):
"""Overrriden from TypeHandler."""
code = """ uint32_t data_size;
if (!ComputeDataSize(count, sizeof(%s), %d, &data_size)) {
return error::kOutOfBounds;
}
"""
file.Write(code % (self.GetArrayType(func), self.GetArrayCount(func)))
if func.IsImmediate():
file.Write(" if (data_size > immediate_data_size) {\n")
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
last_pointer_name = func.GetLastOriginalPointerArg().name
file.Write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < count; ++i) {
""")
values_str = ' << ", " << '.join(
["%s[%d + i * %d]" % (
last_pointer_name, ndx, self.GetArrayCount(func)) for ndx in range(
0, self.GetArrayCount(func))])
file.Write(' GPU_CLIENT_LOG(" " << i << ": " << %s);\n' % values_str)
file.Write(" }\n });\n")
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
file.Write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeInitString("")))
file.Write(" CheckGLError();\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count_param)d][%(count)d];
};
Cmds expected;
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for arg in func.GetCmdArgs():
if arg.name.endswith("_shm_id"):
cmd_arg_strings.append("&data[0][0]")
elif arg.name.endswith("_shm_offset"):
continue
else:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = []
count_param = 0
for arg in func.GetOriginalArgs():
if arg.IsPointer():
valid_value = "&data[0][0]"
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
file.Write(code % {
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'count_param': count_param,
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [
arg for arg in func.GetOriginalArgs()[0:-1] if arg.IsConstant()
]
if not constants:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)sInvalidConstantArg%(invalid_index)d) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
elif arg.IsPointer():
gl_arg_strings.append("&data[0][0]")
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
file.Write(code % {
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
'count_param': count_param,
})
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeDataSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(%s) * %d * count); // NOLINT\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" }\n")
file.Write("\n")
file.Write(" static uint32_t ComputeSize(GLsizei count) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(
" sizeof(ValueType) + ComputeDataSize(count)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void SetHeader(GLsizei count) {\n")
file.Write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize(count));\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void Init(%s) {\n" %
func.MakeTypedInitString("_"))
file.Write(" SetHeader(_count);\n")
args = func.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" memcpy(ImmediateDataAddress(this),\n")
pointer_arg = func.GetLastOriginalPointerArg()
file.Write(" _%s, ComputeDataSize(_count));\n" % pointer_arg.name)
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedInitString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" %
func.MakeInitString("_"))
file.Write(" const uint32_t size = ComputeSize(_count);\n")
file.Write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = gles2::cmds::%(name)s::ComputeSize(count);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<gles2::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedInitString(""),
"args": func.MakeInitString("")
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
args = func.GetOriginalArgs()
count_param = 0
for arg in args:
if arg.name == "count":
count_param = int(arg.GetValidClientSideCmdArg(func))
file.Write("TEST_F(GLES2FormatTest, %s) {\n" % func.name)
file.Write(" const int kSomeBaseValueToTestWith = 51;\n")
file.Write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func) * count_param):
file.Write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
file.Write(" };\n")
file.Write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
file.Write(" const GLsizei kNumElements = %d;\n" % count_param)
file.Write(" const size_t kExpectedCmdSize =\n")
file.Write(" sizeof(cmd) + kNumElements * sizeof(%s) * %d;\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
file.Write(" void* next_cmd = cmd.Set(\n")
file.Write(" &cmd")
for value, arg in enumerate(args):
if arg.IsPointer():
file.Write(",\n data")
elif arg.IsConstant():
continue
else:
file.Write(",\n static_cast<%s>(%d)" % (arg.type, value + 1))
file.Write(");\n")
file.Write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
file.Write(" cmd.header.command);\n")
file.Write(" EXPECT_EQ(kExpectedCmdSize, cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
if arg.IsPointer() or arg.IsConstant():
continue
file.Write(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);\n" %
(arg.type, value + 1, arg.name))
file.Write(" CheckBytesWrittenMatchesExpectedSize(\n")
file.Write(" next_cmd, sizeof(cmd) +\n")
file.Write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
file.Write(" // TODO(gman): Check that data was inserted;\n")
file.Write("}\n")
file.Write("\n")
class PUTSTRHandler(ArrayArgTypeHandler):
"""Handler for functions that pass a string array."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def __GetDataArg(self, func):
"""Return the argument that points to the 2D char arrays"""
for arg in func.GetOriginalArgs():
if arg.IsPointer2D():
return arg
return None
def __GetLengthArg(self, func):
"""Return the argument that holds length for each char array"""
for arg in func.GetOriginalArgs():
if arg.IsPointer() and not arg.IsPointer2D():
return arg
return None
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
data_arg = self.__GetDataArg(func)
length_arg = self.__GetLengthArg(func)
log_code_block = """ GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei ii = 0; ii < count; ++ii) {
if (%(data)s[ii]) {"""
if length_arg == None:
log_code_block += """
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");"""
else:
log_code_block += """
if (%(length)s && %(length)s[ii] >= 0) {
const std::string my_str(%(data)s[ii], %(length)s[ii]);
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << my_str << "\\n---");
} else {
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");
}"""
log_code_block += """
} else {
GPU_CLIENT_LOG(" " << ii << ": NULL");
}
}
});
"""
file.Write(log_code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else ''
})
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(file, func)
bucket_args = []
for arg in func.GetOriginalArgs():
if arg.name == 'count' or arg == self.__GetLengthArg(func):
continue
if arg == self.__GetDataArg(func):
bucket_args.append('kResultBucketId')
else:
bucket_args.append(arg.name)
code_block = """
if (!PackStringsToBucket(count, %(data)s, %(length)s, "gl%(func_name)s")) {
return;
}
helper_->%(func_name)sBucket(%(bucket_args)s);
helper_->SetBucketSize(kResultBucketId, 0);
CheckGLError();
}
"""
file.Write(code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else 'NULL',
'func_name': func.name,
'bucket_args': ', '.join(bucket_args),
})
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
const uint32 kBucketId = GLES2Implementation::kResultBucketId;
const char* kString1 = "happy";
const char* kString2 = "ending";
const size_t kString1Size = ::strlen(kString1) + 1;
const size_t kString2Size = ::strlen(kString2) + 1;
const size_t kHeaderSize = sizeof(GLint) * 3;
const size_t kSourceSize = kHeaderSize + kString1Size + kString2Size;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedString1Size =
transfer_buffer_->RoundToAlignment(kString1Size);
const size_t kPaddedString2Size =
transfer_buffer_->RoundToAlignment(kString2Size);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data1;
cmd::SetToken set_token2;
cmd::SetBucketData set_bucket_data2;
cmd::SetToken set_token3;
cmds::%(name)sBucket cmd_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedString1Size);
ExpectedMemoryInfo mem2 = GetExpectedMemory(kPaddedString2Size);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data1.Init(
kBucketId, kHeaderSize, kString1Size, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.set_bucket_data2.Init(
kBucketId, kHeaderSize + kString1Size, kString2Size, mem2.id,
mem2.offset);
expected.set_token3.Init(GetNextToken());
expected.cmd_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString1, kString2 };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
bucket_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
bucket_args.append('kBucketId')
elif arg == self.__GetLengthArg(func):
gl_args.append('NULL')
elif arg.name == 'count':
gl_args.append('2')
else:
gl_args.append(arg.GetValidClientSideArg(func))
bucket_args.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
if self.__GetLengthArg(func) == None:
return
code = """
TEST_F(GLES2ImplementationTest, %(name)sWithLength) {
const uint32 kBucketId = GLES2Implementation::kResultBucketId;
const char* kString = "foobar******";
const size_t kStringSize = 6; // We only need "foobar".
const size_t kHeaderSize = sizeof(GLint) * 2;
const size_t kSourceSize = kHeaderSize + kStringSize + 1;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedStringSize =
transfer_buffer_->RoundToAlignment(kStringSize + 1);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data;
cmd::SetToken set_token2;
cmds::ShaderSourceBucket shader_source_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedStringSize);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data.Init(
kBucketId, kHeaderSize, kStringSize + 1, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.shader_source_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString };
const GLint kLength[] = { kStringSize };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
elif arg == self.__GetLengthArg(func):
gl_args.append('kLength')
elif arg.name == 'count':
gl_args.append('1')
else:
gl_args.append(arg.GetValidClientSideArg(func))
file.Write(code % {
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
def WriteBucketServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
cmd_args = []
cmd_args_with_invalid_id = []
gl_args = []
for index, arg in enumerate(func.GetOriginalArgs()):
if arg == self.__GetLengthArg(func):
gl_args.append('_')
elif arg.name == 'count':
gl_args.append('1')
elif arg == self.__GetDataArg(func):
cmd_args.append('kBucketId')
cmd_args_with_invalid_id.append('kBucketId')
gl_args.append('_')
elif index == 0: # Resource ID arg
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append('kInvalidClientId')
gl_args.append(arg.GetValidGLArg(func))
else:
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append(arg.GetValidArg(func))
gl_args.append(arg.GetValidGLArg(func))
test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));"""
if func.IsUnsafe():
test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));
"""
test += """
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
'gl_args': ", ".join(gl_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
decoder_->set_unsafe_es3_apis_enabled(true);
cmds::%(name)s cmd;
// Test no bucket.
cmd.Init(%(cmd_args)s);
EXPECT_NE(error::kNoError, ExecuteCmd(cmd));
// Test invalid client.
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmd.Init(%(cmd_args_with_invalid_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
'cmd_args_with_invalid_id': ", ".join(cmd_args_with_invalid_id),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidHeader) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
const GLsizei kCount = static_cast<GLsizei>(arraysize(kSource));
const GLsizei kTests[] = {
kCount + 1,
0,
std::numeric_limits<GLsizei>::max(),
-1,
};
decoder_->set_unsafe_es3_apis_enabled(true);
for (size_t ii = 0; ii < arraysize(kTests); ++ii) {
SetBucketAsCStrings(kBucketId, 1, kSource, kTests[ii], kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidStringEnding) {
const uint32 kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kInvalidStrEnd = '*';
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kInvalidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
decoder_->set_unsafe_es3_apis_enabled(true);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, file, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
class PUTXnHandler(ArrayArgTypeHandler):
"""Handler for glUniform?f functions."""
def __init__(self):
ArrayArgTypeHandler.__init__(self)
def WriteHandlerImplementation(self, func, file):
"""Overrriden from TypeHandler."""
code = """ %(type)s temp[%(count)s] = { %(values)s};"""
if func.IsUnsafe():
code += """
gl%(name)sv(%(location)s, 1, &temp[0]);
"""
else:
code += """
Do%(name)sv(%(location)s, 1, &temp[0]);
"""
values = ""
args = func.GetOriginalArgs()
count = int(self.GetArrayCount(func))
num_args = len(args)
for ii in range(count):
values += "%s, " % args[len(args) - count + ii].name
file.Write(code % {
'name': func.name,
'count': self.GetArrayCount(func),
'type': self.GetArrayType(func),
'location': args[0].name,
'args': func.MakeOriginalArgString(""),
'values': values,
})
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(name)sv(%(local_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
args = func.GetOriginalArgs()
local_args = "%s, 1, _" % args[0].GetValidGLArg(func)
self.WriteValidUnitTest(func, file, valid_test, {
'name': func.name,
'count': self.GetArrayCount(func),
'local_args': local_args,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(name)sv(_, _, _).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'name': func.GetInfo('name'),
'count': self.GetArrayCount(func),
})
class GLcharHandler(CustomHandler):
"""Handler for functions that pass a single string ."""
def __init__(self):
CustomHandler.__init__(self)
def WriteImmediateCmdComputeSize(self, func, file):
"""Overrriden from TypeHandler."""
file.Write(" static uint32_t ComputeSize(uint32_t data_size) {\n")
file.Write(" return static_cast<uint32_t>(\n")
file.Write(" sizeof(ValueType) + data_size); // NOLINT\n")
file.Write(" }\n")
def WriteImmediateCmdSetHeader(self, func, file):
"""Overrriden from TypeHandler."""
code = """
void SetHeader(uint32_t data_size) {
header.SetCmdBySize<ValueType>(data_size);
}
"""
file.Write(code)
def WriteImmediateCmdInit(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
args = func.GetCmdArgs()
set_code = []
for arg in args:
set_code.append(" %s = _%s;" % (arg.name, arg.name))
code = """
void Init(%(typed_args)s, uint32_t _data_size) {
SetHeader(_data_size);
%(set_code)s
memcpy(ImmediateDataAddress(this), _%(last_arg)s, _data_size);
}
"""
file.Write(code % {
"typed_args": func.MakeTypedArgString("_"),
"set_code": "\n".join(set_code),
"last_arg": last_arg.name
})
def WriteImmediateCmdSet(self, func, file):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
file.Write(" void* Set(void* cmd%s, uint32_t _data_size) {\n" %
func.MakeTypedCmdArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s, _data_size);\n" %
func.MakeCmdArgString("_"))
file.Write(" return NextImmediateCmdAddress<ValueType>("
"cmd, _data_size);\n")
file.Write(" }\n")
file.Write("\n")
def WriteImmediateCmdHelper(self, func, file):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t data_size = strlen(name);
gles2::cmds::%(name)s* c =
GetImmediateCmdSpace<gles2::cmds::%(name)s>(data_size);
if (c) {
c->Init(%(args)s, data_size);
}
}
"""
file.Write(code % {
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, file):
"""Overrriden from TypeHandler."""
init_code = []
check_code = []
all_but_last_arg = func.GetCmdArgs()[:-1]
for value, arg in enumerate(all_but_last_arg):
init_code.append(" static_cast<%s>(%d)," % (arg.type, value + 11))
for value, arg in enumerate(all_but_last_arg):
check_code.append(" EXPECT_EQ(static_cast<%s>(%d), cmd.%s);" %
(arg.type, value + 11, arg.name))
code = """
TEST_F(GLES2FormatTest, %(func_name)s) {
cmds::%(func_name)s& cmd = *GetBufferAs<cmds::%(func_name)s>();
static const char* const test_str = \"test string\";
void* next_cmd = cmd.Set(
&cmd,
%(init_code)s
test_str,
strlen(test_str));
EXPECT_EQ(static_cast<uint32_t>(cmds::%(func_name)s::kCmdId),
cmd.header.command);
EXPECT_EQ(sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)),
cmd.header.size * 4u);
EXPECT_EQ(static_cast<char*>(next_cmd),
reinterpret_cast<char*>(&cmd) + sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)));
%(check_code)s
EXPECT_EQ(static_cast<uint32_t>(strlen(test_str)), cmd.data_size);
EXPECT_EQ(0, memcmp(test_str, ImmediateDataAddress(&cmd), strlen(test_str)));
CheckBytesWritten(
next_cmd,
sizeof(cmd) + RoundSizeToMultipleOfEntries(strlen(test_str)),
sizeof(cmd) + strlen(test_str));
}
"""
file.Write(code % {
'func_name': func.name,
'init_code': "\n".join(init_code),
'check_code': "\n".join(check_code),
})
class GLcharNHandler(CustomHandler):
"""Handler for functions that pass a single string with an optional len."""
def __init__(self):
CustomHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.cmd_args = []
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def AddBucketFunction(self, generator, func):
"""Overrriden from TypeHandler."""
pass
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
file.Write("""
GLuint bucket_id = static_cast<GLuint>(c.%(bucket_id)s);
Bucket* bucket = GetBucket(bucket_id);
if (!bucket || bucket->size() == 0) {
return error::kInvalidArguments;
}
std::string str;
if (!bucket->GetAsString(&str)) {
return error::kInvalidArguments;
}
%(gl_func_name)s(0, str.c_str());
return error::kNoError;
}
""" % {
'name': func.name,
'gl_func_name': func.GetGLFunctionName(),
'bucket_id': func.cmd_args[0].name,
})
class IsHandler(TypeHandler):
"""Handler for glIs____ type and glGetError functions."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("result_shm_id", 'uint32_t'))
func.AddCmdArg(Argument("result_shm_offset", 'uint32_t'))
if func.GetInfo('result') == None:
func.AddInfo('result', ['uint32_t'])
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsUnsafe():
valid_test += """
decoder_->set_unsafe_es3_apis_enabled(false);
EXPECT_EQ(error::kUnknownCommand, ExecuteCmd(cmd));"""
valid_test += """
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, file, valid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgsBadSharedMemoryId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
invalid_test += """
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skInvalidSharedMemoryId, shared_memory_offset_);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
cmd.Init(%(args)s%(comma)sshared_memory_id_, kInvalidSharedMemoryOffset);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));"""
if func.IsUnsafe():
invalid_test += """
decoder_->set_unsafe_es3_apis_enabled(true);"""
invalid_test += """
}
"""
self.WriteValidUnitTest(func, file, invalid_test, {
'comma': comma,
}, *extras)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, file)
args = func.GetOriginalArgs()
for arg in args:
arg.WriteGetCode(file)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result_dst = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result_dst));
if (!result_dst) {
return error::kOutOfBounds;
}
"""
file.Write(code % {'func_name': func.name})
func.WriteHandlerValidation(file)
if func.IsUnsafe():
assert func.GetInfo('id_mapping')
assert len(func.GetInfo('id_mapping')) == 1
assert len(args) == 1
id_type = func.GetInfo('id_mapping')[0]
file.Write(" %s service_%s = 0;\n" % (args[0].type, id_type.lower()))
file.Write(" *result_dst = group_->Get%sServiceId(%s, &service_%s);\n" %
(id_type, id_type.lower(), id_type.lower()))
else:
file.Write(" *result_dst = %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
file.Write(" return error::kNoError;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if impl_func == None or impl_func == True:
error_value = func.GetInfo("error_value") or "GL_FALSE"
file.Write("%s GLES2Implementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
file.Write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteTraceEvent(func, file)
func.WriteDestinationInitalizationValidation(file)
self.WriteClientGLCallLog(func, file)
file.Write(" typedef cmds::%s::Result Result;\n" % func.name)
file.Write(" Result* result = GetResultAs<Result*>();\n")
file.Write(" if (!result) {\n")
file.Write(" return %s;\n" % error_value)
file.Write(" }\n")
file.Write(" *result = 0;\n")
assert len(func.GetOriginalArgs()) == 1
id_arg = func.GetOriginalArgs()[0]
if id_arg.type == 'GLsync':
arg_string = "ToGLuint(%s)" % func.MakeOriginalArgString("")
else:
arg_string = func.MakeOriginalArgString("")
file.Write(
" helper_->%s(%s, GetResultShmId(), GetResultShmOffset());\n" %
(func.name, arg_string))
file.Write(" WaitForCmd();\n")
file.Write(" %s result_value = *result" % func.return_type)
if func.return_type == "GLboolean":
file.Write(" != 0")
file.Write(';\n GPU_CLIENT_LOG("returned " << result_value);\n')
file.Write(" CheckGLError();\n")
file.Write(" return result_value;\n")
file.Write("}\n")
file.Write("\n")
def WriteGLES2ImplementationUnitTest(self, func, file):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test')
if client_test == None or client_test == True:
code = """
TEST_F(GLES2ImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
ExpectedMemoryInfo result1 =
GetExpectedResultMemory(sizeof(cmds::%(name)s::Result));
expected.cmd.Init(%(cmd_id_value)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, uint32_t(GL_TRUE)))
.RetiresOnSaturation();
GLboolean result = gl_->%(name)s(%(gl_id_value)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_TRUE(result);
}
"""
args = func.GetOriginalArgs()
assert len(args) == 1
file.Write(code % {
'name': func.name,
'cmd_id_value': args[0].GetValidClientSideCmdArg(func),
'gl_id_value': args[0].GetValidClientSideArg(func) })
class STRnHandler(TypeHandler):
"""Handler for GetProgramInfoLog, GetShaderInfoLog, GetShaderSource, and
GetTranslatedShaderSourceANGLE."""
def __init__(self):
TypeHandler.__init__(self)
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
# remove all but the first cmd args.
cmd_args = func.GetCmdArgs()
func.ClearCmdArgs()
func.AddCmdArg(cmd_args[0])
# add on a bucket id.
func.AddCmdArg(Argument('bucket_id', 'uint32_t'))
def WriteGLES2Implementation(self, func, file):
"""Overrriden from TypeHandler."""
code_1 = """%(return_type)s GLES2Implementation::%(func_name)s(%(args)s) {
GPU_CLIENT_SINGLE_THREAD_CHECK();
"""
code_2 = """ GPU_CLIENT_LOG("[" << GetLogPrefix()
<< "] gl%(func_name)s" << "("
<< %(arg0)s << ", "
<< %(arg1)s << ", "
<< static_cast<void*>(%(arg2)s) << ", "
<< static_cast<void*>(%(arg3)s) << ")");
helper_->SetBucketSize(kResultBucketId, 0);
helper_->%(func_name)s(%(id_name)s, kResultBucketId);
std::string str;
GLsizei max_size = 0;
if (GetBucketAsString(kResultBucketId, &str)) {
if (bufsize > 0) {
max_size =
std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size());
memcpy(%(dest_name)s, str.c_str(), max_size);
%(dest_name)s[max_size] = '\\0';
GPU_CLIENT_LOG("------\\n" << %(dest_name)s << "\\n------");
}
}
if (%(length_name)s != NULL) {
*%(length_name)s = max_size;
}
CheckGLError();
}
"""
args = func.GetOriginalArgs()
str_args = {
'return_type': func.return_type,
'func_name': func.original_name,
'args': func.MakeTypedOriginalArgString(""),
'id_name': args[0].name,
'bufsize_name': args[1].name,
'length_name': args[2].name,
'dest_name': args[3].name,
'arg0': args[0].name,
'arg1': args[1].name,
'arg2': args[2].name,
'arg3': args[3].name,
}
file.Write(code_1 % str_args)
func.WriteDestinationInitalizationValidation(file)
file.Write(code_2 % str_args)
def WriteServiceUnitTest(self, func, file, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
const char* kInfo = "hello";
const uint32_t kBucketId = 123;
SpecializedSetup<cmds::%(name)s, 0>(true);
%(expect_len_code)s
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(DoAll(SetArgumentPointee<2>(strlen(kInfo)),
SetArrayArgument<3>(kInfo, kInfo + strlen(kInfo) + 1)));
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
CommonDecoder::Bucket* bucket = decoder_->GetBucket(kBucketId);
ASSERT_TRUE(bucket != NULL);
EXPECT_EQ(strlen(kInfo) + 1, bucket->size());
EXPECT_EQ(0, memcmp(bucket->GetData(0, bucket->size()), kInfo,
bucket->size()));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
id_name = args[0].GetValidGLArg(func)
get_len_func = func.GetInfo('get_len_func')
get_len_enum = func.GetInfo('get_len_enum')
sub = {
'id_name': id_name,
'get_len_func': get_len_func,
'get_len_enum': get_len_enum,
'gl_args': '%s, strlen(kInfo) + 1, _, _' %
args[0].GetValidGLArg(func),
'args': '%s, kBucketId' % args[0].GetValidArg(func),
'expect_len_code': '',
}
if get_len_func and get_len_func[0:2] == 'gl':
sub['expect_len_code'] = (
" EXPECT_CALL(*gl_, %s(%s, %s, _))\n"
" .WillOnce(SetArgumentPointee<2>(strlen(kInfo) + 1));") % (
get_len_func[2:], id_name, get_len_enum)
self.WriteValidUnitTest(func, file, valid_test, sub, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _, _, _))
.Times(0);
cmds::%(name)s cmd;
cmd.Init(kInvalidClientId, kBucketId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, file, invalid_test, *extras)
def WriteServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
class NamedType(object):
"""A class that represents a type of an argument in a client function.
A type of an argument that is to be passed through in the command buffer
command. Currently used only for the arguments that are specificly named in
the 'cmd_buffer_functions.txt' file, mostly enums.
"""
def __init__(self, info):
assert not 'is_complete' in info or info['is_complete'] == True
self.info = info
self.valid = info['valid']
if 'invalid' in info:
self.invalid = info['invalid']
else:
self.invalid = []
if 'valid_es3' in info:
self.valid_es3 = info['valid_es3']
else:
self.valid_es3 = []
if 'deprecated_es3' in info:
self.deprecated_es3 = info['deprecated_es3']
else:
self.deprecated_es3 = []
def GetType(self):
return self.info['type']
def GetInvalidValues(self):
return self.invalid
def GetValidValues(self):
return self.valid
def GetValidValuesES3(self):
return self.valid_es3
def GetDeprecatedValuesES3(self):
return self.deprecated_es3
def IsConstant(self):
if not 'is_complete' in self.info:
return False
return len(self.GetValidValues()) == 1
def GetConstantValue(self):
return self.GetValidValues()[0]
class Argument(object):
"""A class that represents a function argument."""
cmd_type_map_ = {
'GLenum': 'uint32_t',
'GLint': 'int32_t',
'GLintptr': 'int32_t',
'GLsizei': 'int32_t',
'GLsizeiptr': 'int32_t',
'GLfloat': 'float',
'GLclampf': 'float',
}
need_validation_ = ['GLsizei*', 'GLboolean*', 'GLenum*', 'GLint*']
def __init__(self, name, type):
self.name = name
self.optional = type.endswith("Optional*")
if self.optional:
type = type[:-9] + "*"
self.type = type
if type in self.cmd_type_map_:
self.cmd_type = self.cmd_type_map_[type]
else:
self.cmd_type = 'uint32_t'
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def IsPointer2D(self):
"""Returns true if argument is a 2D pointer."""
return False
def IsConstant(self):
"""Returns true if the argument has only one valid value."""
return False
def AddCmdArgs(self, args):
"""Adds command arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def AddInitArgs(self, args):
"""Adds init arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
if self.IsPointer():
return 'nullptr'
index = func.GetOriginalArgs().index(self)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%d)" % (index + 1))
return str(index + 1)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
value = self.GetValidArg(func)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideArg(self, func):
"""Returns a valid value for this argument in a GL call.
Using the value will produce a command buffer service invocation.
Returns None if there is no such value."""
value = '123'
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideCmdArg(self, func):
"""Returns a valid value for this argument in a command buffer command.
Calling the GL function with the value returned by
GetValidNonCachedClientSideArg will result in a command buffer command
that contains the value returned by this function. """
return '123'
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 0
def GetInvalidArg(self, index):
"""returns an invalid value and expected parse result by index."""
return ("---ERROR0---", "---ERROR2---", None)
def GetLogArg(self):
"""Get argument appropriate for LOG macro."""
if self.type == 'GLboolean':
return 'GLES2Util::GetStringBool(%s)' % self.name
if self.type == 'GLenum':
return 'GLES2Util::GetStringEnum(%s)' % self.name
return self.name
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
if self.type == 'GLsync':
my_type = 'GLuint'
else:
my_type = self.type
file.Write(" %s %s = static_cast<%s>(c.%s);\n" %
(my_type, self.name, my_type, self.name))
def WriteValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteClientSideValidationCode(self, file, func):
"""Writes the validation code for an argument."""
pass
def WriteDestinationInitalizationValidation(self, file, func):
"""Writes the client side destintion initialization validation."""
pass
def WriteDestinationInitalizationValidatationIfNeeded(self, file, func):
"""Writes the client side destintion initialization validation if needed."""
parts = self.type.split(" ")
if len(parts) > 1:
return
if parts[0] in self.need_validation_:
file.Write(
" GPU_CLIENT_VALIDATE_DESTINATION_%sINITALIZATION(%s, %s);\n" %
("OPTIONAL_" if self.optional else "", self.type[:-1], self.name))
def WriteGetAddress(self, file):
"""Writes the code to get the address this argument refers to."""
pass
def GetImmediateVersion(self):
"""Gets the immediate version of this argument."""
return self
def GetBucketVersion(self):
"""Gets the bucket version of this argument."""
return self
class BoolArgument(Argument):
"""class for GLboolean"""
def __init__(self, name, type):
Argument.__init__(self, name, 'GLboolean')
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
class UniformLocationArgument(Argument):
"""class for uniform locations."""
def __init__(self, name):
Argument.__init__(self, name, "GLint")
def WriteGetCode(self, file):
"""Writes the code to get an argument from a command structure."""
code = """ %s %s = static_cast<%s>(c.%s);
"""
file.Write(code % (self.type, self.name, self.type, self.name))
class DataSizeArgument(Argument):
"""class for data_size which Bucket commands do not need."""
def __init__(self, name):
Argument.__init__(self, name, "uint32_t")
def GetBucketVersion(self):
return None
class SizeArgument(Argument):
"""class for GLsizei and GLsizeiptr."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def GetNumInvalidValues(self, func):
"""overridden from Argument."""
if func.IsImmediate():
return 0
return 1
def GetInvalidArg(self, index):
"""overridden from Argument."""
return ("-1", "kNoError", "GL_INVALID_VALUE")
def WriteValidationCode(self, file, func):
"""overridden from Argument."""
if func.IsUnsafe():
return
code = """ if (%(var_name)s < 0) {
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return error::kNoError;
}
"""
file.Write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
def WriteClientSideValidationCode(self, file, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
SetGLError(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return;
}
"""
file.Write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
class SizeNotNegativeArgument(SizeArgument):
"""class for GLsizeiNotNegative. It's NEVER allowed to be negative"""
def __init__(self, name, type, gl_type):
SizeArgument.__init__(self, name, gl_type)
def GetInvalidArg(self, index):
"""overridden from SizeArgument."""
return ("-1", "kOutOfBounds", "GL_NO_ERROR")
def WriteValidationCode(self, file, func):
"""overridden from SizeArgument."""
pass
class EnumBaseArgument(Argument):
"""Base class for EnumArgument, IntArgument, BitfieldArgument, and
ValidatedBoolArgument."""
def __init__(self, name, gl_type, type, gl_error):
Argument.__init__(self, name, gl_type)
self.local_type = type
self.gl_error = gl_error
name = type[len(gl_type):]
self.type_name = name
self.named_type = NamedType(_NAMED_TYPE_INFO[name])
def IsConstant(self):
return self.named_type.IsConstant()
def GetConstantValue(self):
return self.named_type.GetConstantValue()
def WriteValidationCode(self, file, func):
if func.IsUnsafe():
return
if self.named_type.IsConstant():
return
file.Write(" if (!validators_->%s.IsValid(%s)) {\n" %
(ToUnderscore(self.type_name), self.name))
if self.gl_error == "GL_INVALID_ENUM":
file.Write(
" LOCAL_SET_GL_ERROR_INVALID_ENUM(\"gl%s\", %s, \"%s\");\n" %
(func.original_name, self.name, self.name))
else:
file.Write(
" LOCAL_SET_GL_ERROR(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
file.Write(" return error::kNoError;\n")
file.Write(" }\n")
def WriteClientSideValidationCode(self, file, func):
if not self.named_type.IsConstant():
return
file.Write(" if (%s != %s) {" % (self.name,
self.GetConstantValue()))
file.Write(
" SetGLError(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
if func.return_type == "void":
file.Write(" return;\n")
else:
file.Write(" return %s;\n" % func.GetErrorReturnString())
file.Write(" }\n")
def GetValidArg(self, func):
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
num_valid = len(valid)
return valid[0]
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
num_valid = len(valid)
return valid[0]
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return len(self.named_type.GetInvalidValues())
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
invalid = self.named_type.GetInvalidValues()
if invalid:
num_invalid = len(invalid)
if index >= num_invalid:
index = num_invalid - 1
return (invalid[index], "kNoError", self.gl_error)
return ("---ERROR1---", "kNoError", self.gl_error)
class EnumArgument(EnumBaseArgument):
"""A class that represents a GLenum argument"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLenum", type, "GL_INVALID_ENUM")
def GetLogArg(self):
"""Overridden from Argument."""
return ("GLES2Util::GetString%s(%s)" %
(self.type_name, self.name))
class IntArgument(EnumBaseArgument):
"""A class for a GLint argument that can only accept specific values.
For example glTexImage2D takes a GLint for its internalformat
argument instead of a GLenum.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLint", type, "GL_INVALID_VALUE")
class ValidatedBoolArgument(EnumBaseArgument):
"""A class for a GLboolean argument that can only accept specific values.
For example glUniformMatrix takes a GLboolean for it's transpose but it
must be false.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLboolean", type, "GL_INVALID_VALUE")
def GetLogArg(self):
"""Overridden from Argument."""
return 'GLES2Util::GetStringBool(%s)' % self.name
class BitFieldArgument(EnumBaseArgument):
"""A class for a GLbitfield argument that can only accept specific values.
For example glFenceSync takes a GLbitfield for its flags argument bit it
must be 0.
"""
def __init__(self, name, type):
EnumBaseArgument.__init__(self, name, "GLbitfield", type,
"GL_INVALID_VALUE")
class ImmediatePointerArgument(Argument):
"""A class that represents an immediate argument to a function.
An immediate argument is one where the data follows the command.
"""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def IsPointer(self):
return True
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetImmediateDataAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(" c, data_size, immediate_data_size);\n")
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
if self.optional:
return
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class PointerArgument(Argument):
"""A class that represents a pointer argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return self.type.count('*') == 2
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def GetValidArg(self, func):
"""Overridden from Argument."""
return "shared_memory_id_, shared_memory_offset_"
def GetValidGLArg(self, func):
"""Overridden from Argument."""
return "reinterpret_cast<%s>(shared_memory_address_)" % self.type
def GetNumInvalidValues(self, func):
"""Overridden from Argument."""
return 2
def GetInvalidArg(self, index):
"""Overridden from Argument."""
if index == 0:
return ("kInvalidSharedMemoryId, 0", "kOutOfBounds", None)
else:
return ("shared_memory_id_, kInvalidSharedMemoryOffset",
"kOutOfBounds", None)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
def AddCmdArgs(self, args):
"""Overridden from Argument."""
args.append(Argument("%s_shm_id" % self.name, 'uint32_t'))
args.append(Argument("%s_shm_offset" % self.name, 'uint32_t'))
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" c.%s_shm_id, c.%s_shm_offset, data_size);\n" %
(self.name, self.name))
def WriteGetAddress(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
file.Write(
" %s_shm_id, %s_shm_offset, %s_size);\n" %
(self.name, self.name, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
if self.optional:
return
file.Write(" if (%s == NULL) {\n" % self.name)
file.Write(" return error::kOutOfBounds;\n")
file.Write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return ImmediatePointerArgument(self.name, self.type)
def GetBucketVersion(self):
"""Overridden from Argument."""
if self.type.find('char') >= 0:
if self.IsPointer2D():
return InputStringArrayBucketArgument(self.name, self.type)
return InputStringBucketArgument(self.name, self.type)
return BucketPointerArgument(self.name, self.type)
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
class BucketPointerArgument(PointerArgument):
"""A class that represents an bucket argument to a function."""
def __init__(self, name, type):
Argument.__init__(self, name, type)
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(
" %s %s = bucket->GetData(0, data_size);\n" %
(self.type, self.name))
def WriteValidationCode(self, file, func):
"""Overridden from Argument."""
pass
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, file, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(file, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class InputStringBucketArgument(Argument):
"""A string input argument where the string is passed in a bucket."""
def __init__(self, name, type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return False
class InputStringArrayBucketArgument(Argument):
"""A string array input argument where the strings are passed in a bucket."""
def __init__(self, name, type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
self._original_name = name
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """
Bucket* bucket = GetBucket(c.%(name)s);
if (!bucket) {
return error::kInvalidArguments;
}
GLsizei count = 0;
std::vector<char*> strs;
std::vector<GLint> len;
if (!bucket->GetAsStrings(&count, &strs, &len)) {
return error::kInvalidArguments;
}
const char** %(original_name)s =
strs.size() > 0 ? const_cast<const char**>(&strs[0]) : NULL;
const GLint* length =
len.size() > 0 ? const_cast<const GLint*>(&len[0]) : NULL;
(void)length;
"""
file.Write(code % {
'name': self.name,
'original_name': self._original_name,
})
def GetValidArg(self, func):
return "kNameBucketId"
def GetValidGLArg(self, func):
return "_"
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return True
class ResourceIdArgument(Argument):
"""A class that represents a resource id argument to a function."""
def __init__(self, name, type):
match = re.match("(GLid\w+)", type)
self.resource_type = match.group(1)[4:]
if self.resource_type == "Sync":
type = type.replace(match.group(1), "GLsync")
else:
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
if self.type == "GLsync":
my_type = "GLuint"
else:
my_type = self.type
file.Write(" %s %s = c.%s;\n" % (my_type, self.name, self.name))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
if self.resource_type == "Sync":
return "reinterpret_cast<GLsync>(kService%sId)" % self.resource_type
return "kService%sId" % self.resource_type
class ResourceIdBindArgument(Argument):
"""Represents a resource id argument to a bind function."""
def __init__(self, name, type):
match = re.match("(GLidBind\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
code = """ %(type)s %(name)s = c.%(name)s;
"""
file.Write(code % {'type': self.type, 'name': self.name})
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
class ResourceIdZeroArgument(Argument):
"""Represents a resource id argument to a function that can be zero."""
def __init__(self, name, type):
match = re.match("(GLidZero\w+)", type)
self.resource_type = match.group(1)[8:]
type = type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, type)
def WriteGetCode(self, file):
"""Overridden from Argument."""
file.Write(" %s %s = c.%s;\n" % (self.type, self.name, self.name))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
def GetNumInvalidValues(self, func):
"""returns the number of invalid values to be tested."""
return 1
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
class Function(object):
"""A class that represents a function."""
type_handlers = {
'': TypeHandler(),
'Bind': BindHandler(),
'Create': CreateHandler(),
'Custom': CustomHandler(),
'Data': DataHandler(),
'Delete': DeleteHandler(),
'DELn': DELnHandler(),
'GENn': GENnHandler(),
'GETn': GETnHandler(),
'GLchar': GLcharHandler(),
'GLcharN': GLcharNHandler(),
'HandWritten': HandWrittenHandler(),
'Is': IsHandler(),
'Manual': ManualHandler(),
'PUT': PUTHandler(),
'PUTn': PUTnHandler(),
'PUTSTR': PUTSTRHandler(),
'PUTXn': PUTXnHandler(),
'StateSet': StateSetHandler(),
'StateSetRGBAlpha': StateSetRGBAlphaHandler(),
'StateSetFrontBack': StateSetFrontBackHandler(),
'StateSetFrontBackSeparate': StateSetFrontBackSeparateHandler(),
'StateSetNamedParameter': StateSetNamedParameter(),
'STRn': STRnHandler(),
'Todo': TodoHandler(),
}
def __init__(self, name, info):
self.name = name
self.original_name = info['original_name']
self.original_args = self.ParseArgs(info['original_args'])
if 'cmd_args' in info:
self.args_for_cmds = self.ParseArgs(info['cmd_args'])
else:
self.args_for_cmds = self.original_args[:]
self.return_type = info['return_type']
if self.return_type != 'void':
self.return_arg = CreateArg(info['return_type'] + " result")
else:
self.return_arg = None
self.num_pointer_args = sum(
[1 for arg in self.args_for_cmds if arg.IsPointer()])
if self.num_pointer_args > 0:
for arg in reversed(self.original_args):
if arg.IsPointer():
self.last_original_pointer_arg = arg
break
else:
self.last_original_pointer_arg = None
self.info = info
self.type_handler = self.type_handlers[info['type']]
self.can_auto_generate = (self.num_pointer_args == 0 and
info['return_type'] == "void")
self.InitFunction()
def ParseArgs(self, arg_string):
"""Parses a function arg string."""
args = []
parts = arg_string.split(',')
for arg_string in parts:
arg = CreateArg(arg_string)
if arg:
args.append(arg)
return args
def IsType(self, type_name):
"""Returns true if function is a certain type."""
return self.info['type'] == type_name
def InitFunction(self):
"""Creates command args and calls the init function for the type handler.
Creates argument lists for command buffer commands, eg. self.cmd_args and
self.init_args.
Calls the type function initialization.
Override to create different kind of command buffer command argument lists.
"""
self.cmd_args = []
for arg in self.args_for_cmds:
arg.AddCmdArgs(self.cmd_args)
self.init_args = []
for arg in self.args_for_cmds:
arg.AddInitArgs(self.init_args)
if self.return_arg:
self.init_args.append(self.return_arg)
self.type_handler.InitFunction(self)
def IsImmediate(self):
"""Returns whether the function is immediate data function or not."""
return False
def IsUnsafe(self):
"""Returns whether the function has service side validation or not."""
return self.GetInfo('unsafe', False)
def GetInfo(self, name, default = None):
"""Returns a value from the function info for this function."""
if name in self.info:
return self.info[name]
return default
def GetValidArg(self, arg):
"""Gets a valid argument value for the parameter arg from the function info
if one exists."""
try:
index = self.GetOriginalArgs().index(arg)
except ValueError:
return None
valid_args = self.GetInfo('valid_args')
if valid_args and str(index) in valid_args:
return valid_args[str(index)]
return None
def AddInfo(self, name, value):
"""Adds an info."""
self.info[name] = value
def IsExtension(self):
return self.GetInfo('extension') or self.GetInfo('extension_flag')
def IsCoreGLFunction(self):
return (not self.IsExtension() and
not self.GetInfo('pepper_interface') and
not self.IsUnsafe())
def InPepperInterface(self, interface):
ext = self.GetInfo('pepper_interface')
if not interface.GetName():
return self.IsCoreGLFunction()
return ext == interface.GetName()
def InAnyPepperExtension(self):
return self.IsCoreGLFunction() or self.GetInfo('pepper_interface')
def GetErrorReturnString(self):
if self.GetInfo("error_return"):
return self.GetInfo("error_return")
elif self.return_type == "GLboolean":
return "GL_FALSE"
elif "*" in self.return_type:
return "NULL"
return "0"
def GetGLFunctionName(self):
"""Gets the function to call to execute GL for this command."""
if self.GetInfo('decoder_func'):
return self.GetInfo('decoder_func')
return "gl%s" % self.original_name
def GetGLTestFunctionName(self):
gl_func_name = self.GetInfo('gl_test_func')
if gl_func_name == None:
gl_func_name = self.GetGLFunctionName()
if gl_func_name.startswith("gl"):
gl_func_name = gl_func_name[2:]
else:
gl_func_name = self.original_name
return gl_func_name
def GetDataTransferMethods(self):
return self.GetInfo('data_transfer_methods',
['immediate' if self.num_pointer_args == 1 else 'shm'])
def AddCmdArg(self, arg):
"""Adds a cmd argument to this function."""
self.cmd_args.append(arg)
def GetCmdArgs(self):
"""Gets the command args for this function."""
return self.cmd_args
def ClearCmdArgs(self):
"""Clears the command args for this function."""
self.cmd_args = []
def GetCmdConstants(self):
"""Gets the constants for this function."""
return [arg for arg in self.args_for_cmds if arg.IsConstant()]
def GetInitArgs(self):
"""Gets the init args for this function."""
return self.init_args
def GetOriginalArgs(self):
"""Gets the original arguments to this function."""
return self.original_args
def GetLastOriginalArg(self):
"""Gets the last original argument to this function."""
return self.original_args[len(self.original_args) - 1]
def GetLastOriginalPointerArg(self):
return self.last_original_pointer_arg
def GetResourceIdArg(self):
for arg in self.original_args:
if hasattr(arg, 'resource_type'):
return arg
return None
def _MaybePrependComma(self, arg_string, add_comma):
"""Adds a comma if arg_string is not empty and add_comma is true."""
comma = ""
if add_comma and len(arg_string):
comma = ", "
return "%s%s" % (comma, arg_string)
def MakeTypedOriginalArgString(self, prefix, add_comma = False):
"""Gets a list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets the list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedHelperArgString(self, prefix, add_comma = False):
"""Gets a list of typed GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = ", ".join(
["%s %s%s" % (
arg.type,
prefix,
arg.name,
) for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeHelperArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets a list of GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name)
for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedPepperArgString(self, prefix):
"""Gets a list of arguments as they need to be for Pepper."""
if self.GetInfo("pepper_args"):
return self.GetInfo("pepper_args")
else:
return self.MakeTypedOriginalArgString(prefix, False)
def MapCTypeToPepperIdlType(self, ctype, is_for_return_type=False):
"""Converts a C type name to the corresponding Pepper IDL type."""
idltype = {
'char*': '[out] str_t',
'const GLchar* const*': '[out] cstr_t',
'const char*': 'cstr_t',
'const void*': 'mem_t',
'void*': '[out] mem_t',
'void**': '[out] mem_ptr_t',
}.get(ctype, ctype)
# We use "GLxxx_ptr_t" for "GLxxx*".
matched = re.match(r'(const )?(GL\w+)\*$', ctype)
if matched:
idltype = matched.group(2) + '_ptr_t'
if not matched.group(1):
idltype = '[out] ' + idltype
# If an in/out specifier is not specified yet, prepend [in].
if idltype[0] != '[':
idltype = '[in] ' + idltype
# Strip the in/out specifier for a return type.
if is_for_return_type:
idltype = re.sub(r'\[\w+\] ', '', idltype)
return idltype
def MakeTypedPepperIdlArgStrings(self):
"""Gets a list of arguments as they need to be for Pepper IDL."""
args = self.GetOriginalArgs()
return ["%s %s" % (self.MapCTypeToPepperIdlType(arg.type), arg.name)
for arg in args]
def GetPepperName(self):
if self.GetInfo("pepper_name"):
return self.GetInfo("pepper_name")
return self.name
def MakeTypedCmdArgString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeCmdArgString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedInitString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeInitString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeLogArgString(self):
"""Makes a string of the arguments for the LOG macros"""
args = self.GetOriginalArgs()
return ' << ", " << '.join([arg.GetLogArg() for arg in args])
def WriteCommandDescription(self, file):
"""Writes a description of the command."""
file.Write("//! Command that corresponds to gl%s.\n" % self.original_name)
def WriteHandlerValidation(self, file):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WriteValidationCode(file, self)
self.WriteValidationCode(file)
def WriteHandlerImplementation(self, file):
"""Writes the handler implementation for this command."""
self.type_handler.WriteHandlerImplementation(self, file)
def WriteValidationCode(self, file):
"""Writes the validation code for a command."""
pass
def WriteCmdFlag(self, file):
"""Writes the cmd cmd_flags constant."""
flags = []
# By default trace only at the highest level 3.
trace_level = int(self.GetInfo('trace_level', default = 3))
if trace_level not in xrange(0, 4):
raise KeyError("Unhandled trace_level: %d" % trace_level)
flags.append('CMD_FLAG_SET_TRACE_LEVEL(%d)' % trace_level)
if len(flags) > 0:
cmd_flags = ' | '.join(flags)
else:
cmd_flags = 0
file.Write(" static const uint8 cmd_flags = %s;\n" % cmd_flags)
def WriteCmdArgFlag(self, file):
"""Writes the cmd kArgFlags constant."""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kFixed;\n")
def WriteCmdComputeSize(self, file):
"""Writes the ComputeSize function for the command."""
file.Write(" static uint32_t ComputeSize() {\n")
file.Write(
" return static_cast<uint32_t>(sizeof(ValueType)); // NOLINT\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdSetHeader(self, file):
"""Writes the cmd's SetHeader function."""
file.Write(" void SetHeader() {\n")
file.Write(" header.SetCmd<ValueType>();\n")
file.Write(" }\n")
file.Write("\n")
def WriteCmdInit(self, file):
"""Writes the cmd's Init function."""
file.Write(" void Init(%s) {\n" % self.MakeTypedCmdArgString("_"))
file.Write(" SetHeader();\n")
args = self.GetCmdArgs()
for arg in args:
file.Write(" %s = _%s;\n" % (arg.name, arg.name))
file.Write(" }\n")
file.Write("\n")
def WriteCmdSet(self, file):
"""Writes the cmd's Set function."""
copy_args = self.MakeCmdArgString("_", False)
file.Write(" void* Set(void* cmd%s) {\n" %
self.MakeTypedCmdArgString("_", True))
file.Write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
file.Write(" return NextCmdAddress<ValueType>(cmd);\n")
file.Write(" }\n")
file.Write("\n")
def WriteStruct(self, file):
self.type_handler.WriteStruct(self, file)
def WriteDocs(self, file):
self.type_handler.WriteDocs(self, file)
def WriteCmdHelper(self, file):
"""Writes the cmd's helper."""
self.type_handler.WriteCmdHelper(self, file)
def WriteServiceImplementation(self, file):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceUnitTest(self, file, *extras)
def WriteGLES2CLibImplementation(self, file):
"""Writes the GLES2 C Lib Implemention."""
self.type_handler.WriteGLES2CLibImplementation(self, file)
def WriteGLES2InterfaceHeader(self, file):
"""Writes the GLES2 Interface declaration."""
self.type_handler.WriteGLES2InterfaceHeader(self, file)
def WriteMojoGLES2ImplHeader(self, file):
"""Writes the Mojo GLES2 implementation header declaration."""
self.type_handler.WriteMojoGLES2ImplHeader(self, file)
def WriteMojoGLES2Impl(self, file):
"""Writes the Mojo GLES2 implementation declaration."""
self.type_handler.WriteMojoGLES2Impl(self, file)
def WriteGLES2InterfaceStub(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStub(self, file)
def WriteGLES2InterfaceStubImpl(self, file):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStubImpl(self, file)
def WriteGLES2ImplementationHeader(self, file):
"""Writes the GLES2 Implemention declaration."""
self.type_handler.WriteGLES2ImplementationHeader(self, file)
def WriteGLES2Implementation(self, file):
"""Writes the GLES2 Implemention definition."""
self.type_handler.WriteGLES2Implementation(self, file)
def WriteGLES2TraceImplementationHeader(self, file):
"""Writes the GLES2 Trace Implemention declaration."""
self.type_handler.WriteGLES2TraceImplementationHeader(self, file)
def WriteGLES2TraceImplementation(self, file):
"""Writes the GLES2 Trace Implemention definition."""
self.type_handler.WriteGLES2TraceImplementation(self, file)
def WriteGLES2Header(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2Header(self, file)
def WriteGLES2ImplementationUnitTest(self, file):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2ImplementationUnitTest(self, file)
def WriteDestinationInitalizationValidation(self, file):
"""Writes the client side destintion initialization validation."""
self.type_handler.WriteDestinationInitalizationValidation(self, file)
def WriteFormatTest(self, file):
"""Writes the cmd's format test."""
self.type_handler.WriteFormatTest(self, file)
class PepperInterface(object):
"""A class that represents a function."""
def __init__(self, info):
self.name = info["name"]
self.dev = info["dev"]
def GetName(self):
return self.name
def GetInterfaceName(self):
upperint = ""
dev = ""
if self.name:
upperint = "_" + self.name.upper()
if self.dev:
dev = "_DEV"
return "PPB_OPENGLES2%s%s_INTERFACE" % (upperint, dev)
def GetInterfaceString(self):
dev = ""
if self.dev:
dev = "(Dev)"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
def GetStructName(self):
dev = ""
if self.dev:
dev = "_Dev"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
class ImmediateFunction(Function):
"""A class that represnets an immediate function command."""
def __init__(self, func):
Function.__init__(
self,
"%sImmediate" % func.name,
func.info)
def InitFunction(self):
# Override args in original_args and args_for_cmds with immediate versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def IsImmediate(self):
return True
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Immediate version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteImmediateServiceUnitTest(self, file, *extras)
def WriteValidationCode(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateValidationCode(self, file)
def WriteCmdArgFlag(self, file):
"""Overridden from Function"""
file.Write(" static const cmd::ArgFlags kArgFlags = cmd::kAtLeastN;\n")
def WriteCmdComputeSize(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdComputeSize(self, file)
def WriteCmdSetHeader(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSetHeader(self, file)
def WriteCmdInit(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdInit(self, file)
def WriteCmdSet(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSet(self, file)
def WriteCmdHelper(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdHelper(self, file)
def WriteFormatTest(self, file):
"""Overridden from Function"""
self.type_handler.WriteImmediateFormatTest(self, file)
class BucketFunction(Function):
"""A class that represnets a bucket version of a function command."""
def __init__(self, func):
Function.__init__(
self,
"%sBucket" % func.name,
func.info)
def InitFunction(self):
# Override args in original_args and args_for_cmds with bucket versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetBucketVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetBucketVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def WriteCommandDescription(self, file):
"""Overridden from Function"""
file.Write("//! Bucket version of command that corresponds to gl%s.\n" %
self.original_name)
def WriteServiceImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceImplementation(self, file)
def WriteHandlerImplementation(self, file):
"""Overridden from Function"""
self.type_handler.WriteBucketHandlerImplementation(self, file)
def WriteServiceUnitTest(self, file, *extras):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceUnitTest(self, file, *extras)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Overridden from Function"""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name[0:-10] if arg.name.endswith("_bucket_id")
else arg.name) for arg in args])
return super(BucketFunction, self)._MaybePrependComma(arg_string, add_comma)
def CreateArg(arg_string):
"""Creates an Argument."""
arg_parts = arg_string.split()
if len(arg_parts) == 1 and arg_parts[0] == 'void':
return None
# Is this a pointer argument?
elif arg_string.find('*') >= 0:
return PointerArgument(
arg_parts[-1],
" ".join(arg_parts[0:-1]))
# Is this a resource argument? Must come after pointer check.
elif arg_parts[0].startswith('GLidBind'):
return ResourceIdBindArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLidZero'):
return ResourceIdZeroArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLid'):
return ResourceIdArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLenum') and len(arg_parts[0]) > 6:
return EnumArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLbitfield') and len(arg_parts[0]) > 10:
return BitFieldArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean') and len(arg_parts[0]) > 9:
return ValidatedBoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLboolean'):
return BoolArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif arg_parts[0].startswith('GLintUniformLocation'):
return UniformLocationArgument(arg_parts[-1])
elif (arg_parts[0].startswith('GLint') and len(arg_parts[0]) > 5 and
not arg_parts[0].startswith('GLintptr')):
return IntArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
elif (arg_parts[0].startswith('GLsizeiNotNegative') or
arg_parts[0].startswith('GLintptrNotNegative')):
return SizeNotNegativeArgument(arg_parts[-1],
" ".join(arg_parts[0:-1]),
arg_parts[0][0:-11])
elif arg_parts[0].startswith('GLsize'):
return SizeArgument(arg_parts[-1], " ".join(arg_parts[0:-1]))
else:
return Argument(arg_parts[-1], " ".join(arg_parts[0:-1]))
class GLGenerator(object):
"""A class to generate GL command buffers."""
_function_re = re.compile(r'GL_APICALL(.*?)GL_APIENTRY (.*?) \((.*?)\);')
def __init__(self, verbose):
self.original_functions = []
self.functions = []
self.verbose = verbose
self.errors = 0
self.pepper_interfaces = []
self.interface_info = {}
self.generated_cpp_filenames = []
for interface in _PEPPER_INTERFACES:
interface = PepperInterface(interface)
self.pepper_interfaces.append(interface)
self.interface_info[interface.GetName()] = interface
def AddFunction(self, func):
"""Adds a function."""
self.functions.append(func)
def GetFunctionInfo(self, name):
"""Gets a type info for the given function name."""
if name in _FUNCTION_INFO:
func_info = _FUNCTION_INFO[name].copy()
else:
func_info = {}
if not 'type' in func_info:
func_info['type'] = ''
return func_info
def Log(self, msg):
"""Prints something if verbose is true."""
if self.verbose:
print msg
def Error(self, msg):
"""Prints an error."""
print "Error: %s" % msg
self.errors += 1
def WriteLicense(self, file):
"""Writes the license."""
file.Write(_LICENSE)
def WriteNamespaceOpen(self, file):
"""Writes the code for the namespace."""
file.Write("namespace gpu {\n")
file.Write("namespace gles2 {\n")
file.Write("\n")
def WriteNamespaceClose(self, file):
"""Writes the code to close the namespace."""
file.Write("} // namespace gles2\n")
file.Write("} // namespace gpu\n")
file.Write("\n")
def ParseGLH(self, filename):
"""Parses the cmd_buffer_functions.txt file and extracts the functions"""
f = open(filename, "r")
functions = f.read()
f.close()
for line in functions.splitlines():
match = self._function_re.match(line)
if match:
func_name = match.group(2)[2:]
func_info = self.GetFunctionInfo(func_name)
if func_info['type'] == 'Noop':
continue
parsed_func_info = {
'original_name': func_name,
'original_args': match.group(3),
'return_type': match.group(1).strip(),
}
for k in parsed_func_info.keys():
if not k in func_info:
func_info[k] = parsed_func_info[k]
f = Function(func_name, func_info)
self.original_functions.append(f)
#for arg in f.GetOriginalArgs():
# if not isinstance(arg, EnumArgument) and arg.type == 'GLenum':
# self.Log("%s uses bare GLenum %s." % (func_name, arg.name))
gen_cmd = f.GetInfo('gen_cmd')
if gen_cmd == True or gen_cmd == None:
if f.type_handler.NeedsDataTransferFunction(f):
methods = f.GetDataTransferMethods()
if 'immediate' in methods:
self.AddFunction(ImmediateFunction(f))
if 'bucket' in methods:
self.AddFunction(BucketFunction(f))
if 'shm' in methods:
self.AddFunction(f)
else:
self.AddFunction(f)
self.Log("Auto Generated Functions : %d" %
len([f for f in self.functions if f.can_auto_generate or
(not f.IsType('') and not f.IsType('Custom') and
not f.IsType('Todo'))]))
funcs = [f for f in self.functions if not f.can_auto_generate and
(f.IsType('') or f.IsType('Custom') or f.IsType('Todo'))]
self.Log("Non Auto Generated Functions: %d" % len(funcs))
for f in funcs:
self.Log(" %-10s %-20s gl%s" % (f.info['type'], f.return_type, f.name))
def WriteCommandIds(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
file.Write("#define GLES2_COMMAND_LIST(OP) \\\n")
id = 256
for func in self.functions:
file.Write(" %-60s /* %d */ \\\n" %
("OP(%s)" % func.name, id))
id += 1
file.Write("\n")
file.Write("enum CommandId {\n")
file.Write(" kStartPoint = cmd::kLastCommonId, "
"// All GLES2 commands start after this.\n")
file.Write("#define GLES2_CMD_OP(name) k ## name,\n")
file.Write(" GLES2_COMMAND_LIST(GLES2_CMD_OP)\n")
file.Write("#undef GLES2_CMD_OP\n")
file.Write(" kNumCommands\n")
file.Write("};\n")
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteFormat(self, filename):
"""Writes the command buffer format"""
file = CHeaderWriter(filename)
# Forward declaration of a few enums used in constant argument
# to avoid including GL header files.
enum_defines = {
'GL_SYNC_GPU_COMMANDS_COMPLETE': '0x9117',
'GL_SYNC_FLUSH_COMMANDS_BIT': '0x00000001',
}
file.Write('\n')
for enum in enum_defines:
file.Write("#define %s %s\n" % (enum, enum_defines[enum]))
file.Write('\n')
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteStruct(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteDocs(self, filename):
"""Writes the command buffer doc version of the commands"""
file = CWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteDocs(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteFormatTest(self, filename):
"""Writes the command buffer format test."""
file = CHeaderWriter(
filename,
"// This file contains unit tests for gles2 commmands\n"
"// It is included by gles2_cmd_format_test.cc\n"
"\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteFormatTest(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCmdHelperHeader(self, filename):
"""Writes the gles2 command helper."""
file = CHeaderWriter(filename)
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteCmdHelper(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceContextStateHeader(self, filename):
"""Writes the service context state header."""
file = CHeaderWriter(
filename,
"// It is included by context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write(" bool cached_%s;\n" % capability['name'])
file.Write("};\n\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
if isinstance(item['default'], list):
file.Write("%s %s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
file.Write("%s %s;\n" % (item['type'], item['name']))
if item.get('cached', False):
if isinstance(item['default'], list):
file.Write("%s cached_%s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
file.Write("%s cached_%s;\n" % (item['type'], item['name']))
file.Write("\n")
file.Write("""
inline void SetDeviceCapabilityState(GLenum cap, bool enable) {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write("""\
case GL_%s:
""" % capability['name'].upper())
file.Write("""\
if (enable_flags.cached_%(name)s == enable &&
!ignore_cached_state)
return;
enable_flags.cached_%(name)s = enable;
break;
""" % capability)
file.Write("""\
default:
NOTREACHED();
return;
}
if (enable)
glEnable(cap);
else
glDisable(cap);
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteClientContextStateHeader(self, filename):
"""Writes the client context state header."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.h\n")
file.Write("struct EnableFlags {\n")
file.Write(" EnableFlags();\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" bool %s;\n" % capability['name'])
file.Write("};\n\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteContextStateGetters(self, file, class_name):
"""Writes the state getters."""
for gl_type in ["GLint", "GLfloat"]:
file.Write("""
bool %s::GetStateAs%s(
GLenum pname, %s* params, GLsizei* num_written) const {
switch (pname) {
""" % (class_name, gl_type, gl_type))
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'enum' in state:
file.Write(" case %s:\n" % state['enum'])
file.Write(" *num_written = %d;\n" % len(state['states']))
file.Write(" if (params) {\n")
for ndx,item in enumerate(state['states']):
file.Write(" params[%d] = static_cast<%s>(%s);\n" %
(ndx, gl_type, item['name']))
file.Write(" }\n")
file.Write(" return true;\n")
else:
for item in state['states']:
file.Write(" case %s:\n" % item['enum'])
if isinstance(item['default'], list):
item_len = len(item['default'])
file.Write(" *num_written = %d;\n" % item_len)
file.Write(" if (params) {\n")
if item['type'] == gl_type:
file.Write(" memcpy(params, %s, sizeof(%s) * %d);\n" %
(item['name'], item['type'], item_len))
else:
file.Write(" for (size_t i = 0; i < %s; ++i) {\n" %
item_len)
file.Write(" params[i] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
"%s[i]" % item['name'])))
file.Write(" }\n");
else:
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(" params[0] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
item['name'])))
file.Write(" }\n")
file.Write(" return true;\n")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *num_written = 1;\n")
file.Write(" if (params) {\n")
file.Write(
" params[0] = static_cast<%s>(enable_flags.%s);\n" %
(gl_type, capability['name']))
file.Write(" }\n")
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
def WriteServiceContextStateImpl(self, filename):
"""Writes the context state service implementation."""
file = CHeaderWriter(
filename,
"// It is included by context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
code.append("cached_%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write("ContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("void ContextState::Initialize() {\n")
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
for item in state['states']:
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
file.Write(" %s[%d] = %s;\n" % (item['name'], ndx, value))
else:
file.Write(" %s = %s;\n" % (item['name'], item['default']))
if item.get('cached', False):
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
file.Write(" cached_%s[%d] = %s;\n" % (item['name'], ndx, value))
else:
file.Write(" cached_%s = %s;\n" % (item['name'], item['default']))
file.Write("}\n")
file.Write("""
void ContextState::InitCapabilities(const ContextState* prev_state) const {
""")
def WriteCapabilities(test_prev, es3_caps):
for capability in _CAPABILITY_FLAGS:
capability_name = capability['name']
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3 and not es3_caps or not capability_es3 and es3_caps:
continue
if test_prev:
file.Write(""" if (prev_state->enable_flags.cached_%s !=
enable_flags.cached_%s) {\n""" %
(capability_name, capability_name))
file.Write(" EnableDisable(GL_%s, enable_flags.cached_%s);\n" %
(capability_name.upper(), capability_name))
if test_prev:
file.Write(" }")
file.Write(" if (prev_state) {")
WriteCapabilities(True, False)
file.Write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(True, True)
file.Write(" }\n")
file.Write(" } else {")
WriteCapabilities(False, False)
file.Write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(False, True)
file.Write(" }\n")
file.Write(" }")
file.Write("""}
void ContextState::InitState(const ContextState *prev_state) const {
""")
def WriteStates(test_prev):
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
if test_prev:
file.Write(" if (")
args = []
for place, item in enumerate(group):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
file.Write(' ||\n')
file.Write("(%s != prev_state->%s)" % (item_name, item_name))
if test_prev:
file.Write(")\n")
file.Write(
" gl%s(%s, %s);\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
elif state['type'] == 'NamedParameter':
for item in state['states']:
item_name = CachedStateName(item)
if 'extension_flag' in item:
file.Write(" if (feature_info_->feature_flags().%s) {\n " %
item['extension_flag'])
if test_prev:
if isinstance(item['default'], list):
file.Write(" if (memcmp(prev_state->%s, %s, "
"sizeof(%s) * %d)) {\n" %
(item_name, item_name, item['type'],
len(item['default'])))
else:
file.Write(" if (prev_state->%s != %s) {\n " %
(item_name, item_name))
if 'gl_version_flag' in item:
item_name = item['gl_version_flag']
inverted = ''
if item_name[0] == '!':
inverted = '!'
item_name = item_name[1:]
file.Write(" if (%sfeature_info_->gl_version_info().%s) {\n" %
(inverted, item_name))
file.Write(" gl%s(%s, %s);\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
item['name']))
if 'gl_version_flag' in item:
file.Write(" }\n")
if test_prev:
if 'extension_flag' in item:
file.Write(" ")
file.Write(" }")
if 'extension_flag' in item:
file.Write(" }")
else:
if 'extension_flag' in state:
file.Write(" if (feature_info_->feature_flags().%s)\n " %
state['extension_flag'])
if test_prev:
file.Write(" if (")
args = []
for place, item in enumerate(state['states']):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
file.Write(' ||\n')
file.Write("(%s != prev_state->%s)" %
(item_name, item_name))
if test_prev:
file.Write(" )\n")
file.Write(" gl%s(%s);\n" % (state['func'], ", ".join(args)))
file.Write(" if (prev_state) {")
WriteStates(True)
file.Write(" } else {")
WriteStates(False)
file.Write(" }")
file.Write("}\n")
file.Write("""bool ContextState::GetEnabled(GLenum cap) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" return enable_flags.%s;\n" % capability['name'])
file.Write(""" default:
NOTREACHED();
return false;
}
}
""")
self.WriteContextStateGetters(file, "ContextState")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteClientContextStateImpl(self, filename):
"""Writes the context state client side implementation."""
file = CHeaderWriter(
filename,
"// It is included by client_context_state.cc\n")
code = []
for capability in _CAPABILITY_FLAGS:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
file.Write(
"ClientContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
file.Write("\n")
file.Write("""
bool ClientContextState::SetCapabilityState(
GLenum cap, bool enabled, bool* changed) {
*changed = false;
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(""" if (enable_flags.%(name)s != enabled) {
*changed = true;
enable_flags.%(name)s = enabled;
}
return true;
""" % capability)
file.Write(""" default:
return false;
}
}
""")
file.Write("""bool ClientContextState::GetEnabled(
GLenum cap, bool* enabled) const {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
file.Write(" *enabled = enable_flags.%s;\n" % capability['name'])
file.Write(" return true;\n")
file.Write(""" default:
return false;
}
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceImplementation(self, filename):
"""Writes the service decorder implementation."""
file = CHeaderWriter(
filename,
"// It is included by gles2_cmd_decoder.cc\n")
for func in self.functions:
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
func.WriteServiceImplementation(file)
file.Write("""
bool GLES2DecoderImpl::SetCapabilityState(GLenum cap, bool enabled) {
switch (cap) {
""")
for capability in _CAPABILITY_FLAGS:
file.Write(" case GL_%s:\n" % capability['name'].upper())
if 'state_flag' in capability:
file.Write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
%(state_flag)s = true;
}
return false;
""" % capability)
else:
file.Write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
state_.enable_flags.cached_%(name)s = enabled;
return true;
}
return false;
""" % capability)
file.Write(""" default:
NOTREACHED();
return false;
}
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUnitTests(self, filename):
"""Writes the service decorder unit tests."""
num_tests = len(self.functions)
FUNCTIONS_PER_FILE = 98 # hard code this so it doesn't change.
count = 0
for test_num in range(0, num_tests, FUNCTIONS_PER_FILE):
count += 1
name = filename % count
file = CHeaderWriter(
name,
"// It is included by gles2_cmd_decoder_unittest_%d.cc\n" % count)
test_name = 'GLES2DecoderTest%d' % count
end = test_num + FUNCTIONS_PER_FILE
if end > num_tests:
end = num_tests
for idx in range(test_num, end):
func = self.functions[idx]
# Do any filtering of the functions here, so that the functions
# will not move between the numbered files if filtering properties
# are changed.
if func.GetInfo('extension_flag'):
continue
if True:
#gen_cmd = func.GetInfo('gen_cmd')
#if gen_cmd == True or gen_cmd == None:
if func.GetInfo('unit_test') == False:
file.Write("// TODO(gman): %s\n" % func.name)
else:
func.WriteServiceUnitTest(file, {
'test_name': test_name
})
file.Close()
self.generated_cpp_filenames.append(file.filename)
file = CHeaderWriter(
filename % 0,
"// It is included by gles2_cmd_decoder_unittest_base.cc\n")
file.Write(
"""void GLES2DecoderTestBase::SetupInitCapabilitiesExpectations(
bool es3_capable) {""")
for capability in _CAPABILITY_FLAGS:
capability_es3 = 'es3' in capability and capability['es3'] == True
if not capability_es3:
file.Write(" ExpectEnableDisable(GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
file.Write(" if (es3_capable) {")
for capability in _CAPABILITY_FLAGS:
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
file.Write(" ExpectEnableDisable(GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
file.Write(""" }
}
void GLES2DecoderTestBase::SetupInitStateExpectations() {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2, state['states'])):
args = []
for item in group:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
elif state['type'] == 'NamedParameter':
for item in state['states']:
if 'extension_flag' in item:
file.Write(" if (group_->feature_info()->feature_flags().%s) {\n" %
item['extension_flag'])
file.Write(" ")
expect_value = item['default']
if isinstance(expect_value, list):
# TODO: Currently we do not check array values.
expect_value = "_"
file.Write(
" EXPECT_CALL(*gl_, %s(%s, %s))\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
expect_value))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
if 'extension_flag' in item:
file.Write(" }\n")
else:
if 'extension_flag' in state:
file.Write(" if (group_->feature_info()->feature_flags().%s) {\n" %
state['extension_flag'])
file.Write(" ")
args = []
for item in state['states']:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
# TODO: Currently we do not check array values.
args = ["_" if isinstance(arg, list) else arg for arg in args]
file.Write(" EXPECT_CALL(*gl_, %s(%s))\n" %
(state['func'], ", ".join(args)))
file.Write(" .Times(1)\n")
file.Write(" .RetiresOnSaturation();\n")
if 'extension_flag' in state:
file.Write(" }\n")
file.Write("""}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUnitTestsForExtensions(self, filename):
"""Writes the service decorder unit tests for functions with extension_flag.
The functions are special in that they need a specific unit test
baseclass to turn on the extension.
"""
functions = [f for f in self.functions if f.GetInfo('extension_flag')]
file = CHeaderWriter(
filename,
"// It is included by gles2_cmd_decoder_unittest_extensions.cc\n")
for func in functions:
if True:
if func.GetInfo('unit_test') == False:
file.Write("// TODO(gman): %s\n" % func.name)
else:
extension = ToCamelCase(
ToGLExtensionString(func.GetInfo('extension_flag')))
func.WriteServiceUnitTest(file, {
'test_name': 'GLES2DecoderTestWith%s' % extension
})
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2Header(self, filename):
"""Writes the GLES2 header."""
file = CHeaderWriter(
filename,
"// This file contains Chromium-specific GLES2 declarations.\n\n")
for func in self.original_functions:
func.WriteGLES2Header(file)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2CLibImplementation(self, filename):
"""Writes the GLES2 c lib implementation."""
file = CHeaderWriter(
filename,
"// These functions emulate GLES2 over command buffers.\n")
for func in self.original_functions:
func.WriteGLES2CLibImplementation(file)
file.Write("""
namespace gles2 {
extern const NameToFunc g_gles2_function_table[] = {
""")
for func in self.original_functions:
file.Write(
' { "gl%s", reinterpret_cast<GLES2FunctionPointer>(gl%s), },\n' %
(func.name, func.name))
file.Write(""" { NULL, NULL, },
};
} // namespace gles2
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceHeader(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLES2ImplHeader(self, filename):
"""Writes the Mojo GLES2 implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface.h to declare the\n"
"// GL api functions.\n")
code = """
#include "gpu/command_buffer/client/gles2_interface.h"
#include "third_party/mojo/src/mojo/public/c/gles2/gles2.h"
namespace mojo {
class MojoGLES2Impl : public gpu::gles2::GLES2Interface {
public:
explicit MojoGLES2Impl(MojoGLES2Context context) {
context_ = context;
}
~MojoGLES2Impl() override {}
"""
file.Write(code);
for func in self.original_functions:
func.WriteMojoGLES2ImplHeader(file)
code = """
private:
MojoGLES2Context context_;
};
} // namespace mojo
"""
file.Write(code);
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLES2Impl(self, filename):
"""Writes the Mojo GLES2 implementation."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
code = """
#include "mojo/gpu/mojo_gles2_impl_autogen.h"
#include "base/logging.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_copy_texture.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_image.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_miscellaneous.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_pixel_transfer_buffer_object.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_sub_image.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_sync_point.h"
#include "third_party/mojo/src/mojo/public/c/gles2/chromium_texture_mailbox.h"
#include "third_party/mojo/src/mojo/public/c/gles2/gles2.h"
#include "third_party/mojo/src/mojo/public/c/gles2/occlusion_query_ext.h"
namespace mojo {
"""
file.Write(code);
for func in self.original_functions:
func.WriteMojoGLES2Impl(file)
code = """
} // namespace mojo
"""
file.Write(code);
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceStub(self, filename):
"""Writes the GLES2 interface stub header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.h.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStub(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2InterfaceStubImpl(self, filename):
"""Writes the GLES2 interface header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_interface_stub.cc.\n")
for func in self.original_functions:
func.WriteGLES2InterfaceStubImpl(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ImplementationHeader(self, filename):
"""Writes the GLES2 Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2Implementation(self, filename):
"""Writes the GLES2 Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.cc to define the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2Implementation(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2TraceImplementationHeader(self, filename):
"""Writes the GLES2 Trace Implementation header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.h\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementationHeader(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2TraceImplementation(self, filename):
"""Writes the GLES2 Trace Implementation."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_trace_implementation.cc\n")
for func in self.original_functions:
func.WriteGLES2TraceImplementation(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ImplementationUnitTests(self, filename):
"""Writes the GLES2 helper header."""
file = CHeaderWriter(
filename,
"// This file is included by gles2_implementation.h to declare the\n"
"// GL api functions.\n")
for func in self.original_functions:
func.WriteGLES2ImplementationUnitTest(file)
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUtilsHeader(self, filename):
"""Writes the gles2 auto generated utility header."""
file = CHeaderWriter(filename)
for name in sorted(_NAMED_TYPE_INFO.keys()):
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
file.Write("ValueValidator<%s> %s;\n" %
(named_type.GetType(), ToUnderscore(name)))
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteServiceUtilsImplementation(self, filename):
"""Writes the gles2 auto generated utility implementation."""
file = CHeaderWriter(filename)
names = sorted(_NAMED_TYPE_INFO.keys())
for name in names:
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
if named_type.GetValidValues():
file.Write("static const %s valid_%s_table[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValues():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
if named_type.GetValidValuesES3():
file.Write("static const %s valid_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValuesES3():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
if named_type.GetDeprecatedValuesES3():
file.Write("static const %s deprecated_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetDeprecatedValuesES3():
file.Write(" %s,\n" % value)
file.Write("};\n")
file.Write("\n")
file.Write("Validators::Validators()")
pre = ' : '
for count, name in enumerate(names):
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.IsConstant():
continue
if named_type.GetValidValues():
code = """%(pre)s%(name)s(
valid_%(name)s_table, arraysize(valid_%(name)s_table))"""
else:
code = "%(pre)s%(name)s()"
file.Write(code % {
'name': ToUnderscore(name),
'pre': pre,
})
pre = ',\n '
file.Write(" {\n");
file.Write("}\n\n");
file.Write("void Validators::UpdateValuesES3() {\n")
for name in names:
named_type = NamedType(_NAMED_TYPE_INFO[name])
if named_type.GetDeprecatedValuesES3():
code = """ %(name)s.RemoveValues(
deprecated_%(name)s_table_es3, arraysize(deprecated_%(name)s_table_es3));
"""
file.Write(code % {
'name': ToUnderscore(name),
})
if named_type.GetValidValuesES3():
code = """ %(name)s.AddValues(
valid_%(name)s_table_es3, arraysize(valid_%(name)s_table_es3));
"""
file.Write(code % {
'name': ToUnderscore(name),
})
file.Write("}\n\n");
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCommonUtilsHeader(self, filename):
"""Writes the gles2 common utility header."""
file = CHeaderWriter(filename)
type_infos = sorted(_NAMED_TYPE_INFO.keys())
for type_info in type_infos:
if _NAMED_TYPE_INFO[type_info]['type'] == 'GLenum':
file.Write("static std::string GetString%s(uint32_t value);\n" %
type_info)
file.Write("\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteCommonUtilsImpl(self, filename):
"""Writes the gles2 common utility header."""
enum_re = re.compile(r'\#define\s+(GL_[a-zA-Z0-9_]+)\s+([0-9A-Fa-fx]+)')
dict = {}
for fname in ['third_party/khronos/GLES2/gl2.h',
'third_party/khronos/GLES2/gl2ext.h',
'third_party/khronos/GLES3/gl3.h',
'gpu/GLES2/gl2chromium.h',
'gpu/GLES2/gl2extchromium.h']:
lines = open(fname).readlines()
for line in lines:
m = enum_re.match(line)
if m:
name = m.group(1)
value = m.group(2)
if len(value) <= 10:
if not value in dict:
dict[value] = name
# check our own _CHROMIUM macro conflicts with khronos GL headers.
elif dict[value] != name and (name.endswith('_CHROMIUM') or
dict[value].endswith('_CHROMIUM')):
self.Error("code collision: %s and %s have the same code %s" %
(dict[value], name, value))
file = CHeaderWriter(filename)
file.Write("static const GLES2Util::EnumToString "
"enum_to_string_table[] = {\n")
for value in dict:
file.Write(' { %s, "%s", },\n' % (value, dict[value]))
file.Write("""};
const GLES2Util::EnumToString* const GLES2Util::enum_to_string_table_ =
enum_to_string_table;
const size_t GLES2Util::enum_to_string_table_len_ =
sizeof(enum_to_string_table) / sizeof(enum_to_string_table[0]);
""")
enums = sorted(_NAMED_TYPE_INFO.keys())
for enum in enums:
if _NAMED_TYPE_INFO[enum]['type'] == 'GLenum':
file.Write("std::string GLES2Util::GetString%s(uint32_t value) {\n" %
enum)
valid_list = _NAMED_TYPE_INFO[enum]['valid']
if 'valid_es3' in _NAMED_TYPE_INFO[enum]:
valid_list = valid_list + _NAMED_TYPE_INFO[enum]['valid_es3']
assert len(valid_list) == len(set(valid_list))
if len(valid_list) > 0:
file.Write(" static const EnumToString string_table[] = {\n")
for value in valid_list:
file.Write(' { %s, "%s" },\n' % (value, value))
file.Write(""" };
return GLES2Util::GetQualifiedEnumString(
string_table, arraysize(string_table), value);
}
""")
else:
file.Write(""" return GLES2Util::GetQualifiedEnumString(
NULL, 0, value);
}
""")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WritePepperGLES2Interface(self, filename, dev):
"""Writes the Pepper OpenGLES interface definition."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("label Chrome {\n")
file.Write(" M39 = 1.0\n")
file.Write("};\n\n")
if not dev:
# Declare GL types.
file.Write("[version=1.0]\n")
file.Write("describe {\n")
for gltype in ['GLbitfield', 'GLboolean', 'GLbyte', 'GLclampf',
'GLclampx', 'GLenum', 'GLfixed', 'GLfloat', 'GLint',
'GLintptr', 'GLshort', 'GLsizei', 'GLsizeiptr',
'GLubyte', 'GLuint', 'GLushort']:
file.Write(" %s;\n" % gltype)
file.Write(" %s_ptr_t;\n" % gltype)
file.Write("};\n\n")
# C level typedefs.
file.Write("#inline c\n")
file.Write("#include \"ppapi/c/pp_resource.h\"\n")
if dev:
file.Write("#include \"ppapi/c/ppb_opengles2.h\"\n\n")
else:
file.Write("\n#ifndef __gl2_h_\n")
for (k, v) in _GL_TYPES.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#ifdef _WIN64\n")
for (k, v) in _GL_TYPES_64.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#else\n")
for (k, v) in _GL_TYPES_32.iteritems():
file.Write("typedef %s %s;\n" % (v, k))
file.Write("#endif // _WIN64\n")
file.Write("#endif // __gl2_h_\n\n")
file.Write("#endinl\n")
for interface in self.pepper_interfaces:
if interface.dev != dev:
continue
# Historically, we provide OpenGLES2 interfaces with struct
# namespace. Not to break code which uses the interface as
# "struct OpenGLES2", we put it in struct namespace.
file.Write('\n[macro="%s", force_struct_namespace]\n' %
interface.GetInterfaceName())
file.Write("interface %s {\n" % interface.GetStructName())
for func in self.original_functions:
if not func.InPepperInterface(interface):
continue
ret_type = func.MapCTypeToPepperIdlType(func.return_type,
is_for_return_type=True)
func_prefix = " %s %s(" % (ret_type, func.GetPepperName())
file.Write(func_prefix)
file.Write("[in] PP_Resource context")
for arg in func.MakeTypedPepperIdlArgStrings():
file.Write(",\n" + " " * len(func_prefix) + arg)
file.Write(");\n")
file.Write("};\n\n")
file.Close()
def WritePepperGLES2Implementation(self, filename):
"""Writes the Pepper OpenGLES interface implementation."""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#include \"ppapi/shared_impl/ppb_opengles2_shared.h\"\n\n")
file.Write("#include \"base/logging.h\"\n")
file.Write("#include \"gpu/command_buffer/client/gles2_implementation.h\"\n")
file.Write("#include \"ppapi/shared_impl/ppb_graphics_3d_shared.h\"\n")
file.Write("#include \"ppapi/thunk/enter.h\"\n\n")
file.Write("namespace ppapi {\n\n")
file.Write("namespace {\n\n")
file.Write("typedef thunk::EnterResource<thunk::PPB_Graphics3D_API>"
" Enter3D;\n\n")
file.Write("gpu::gles2::GLES2Implementation* ToGles2Impl(Enter3D*"
" enter) {\n")
file.Write(" DCHECK(enter);\n")
file.Write(" DCHECK(enter->succeeded());\n")
file.Write(" return static_cast<PPB_Graphics3D_Shared*>(enter->object())->"
"gles2_impl();\n");
file.Write("}\n\n");
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
original_arg = func.MakeTypedPepperArgString("")
context_arg = "PP_Resource context_id"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
file.Write("%s %s(%s) {\n" %
(func.return_type, func.GetPepperName(), arg))
file.Write(" Enter3D enter(context_id, true);\n")
file.Write(" if (enter.succeeded()) {\n")
return_str = "" if func.return_type == "void" else "return "
file.Write(" %sToGles2Impl(&enter)->%s(%s);\n" %
(return_str, func.original_name,
func.MakeOriginalArgString("")))
file.Write(" }")
if func.return_type == "void":
file.Write("\n")
else:
file.Write(" else {\n")
file.Write(" return %s;\n" % func.GetErrorReturnString())
file.Write(" }\n")
file.Write("}\n\n")
file.Write("} // namespace\n")
for interface in self.pepper_interfaces:
file.Write("const %s* PPB_OpenGLES2_Shared::Get%sInterface() {\n" %
(interface.GetStructName(), interface.GetName()))
file.Write(" static const struct %s "
"ppb_opengles2 = {\n" % interface.GetStructName())
file.Write(" &")
file.Write(",\n &".join(
f.GetPepperName() for f in self.original_functions
if f.InPepperInterface(interface)))
file.Write("\n")
file.Write(" };\n")
file.Write(" return &ppb_opengles2;\n")
file.Write("}\n")
file.Write("} // namespace ppapi\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteGLES2ToPPAPIBridge(self, filename):
"""Connects GLES2 helper library to PPB_OpenGLES2 interface"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
file.Write("#ifndef GL_GLEXT_PROTOTYPES\n")
file.Write("#define GL_GLEXT_PROTOTYPES\n")
file.Write("#endif\n")
file.Write("#include <GLES2/gl2.h>\n")
file.Write("#include <GLES2/gl2ext.h>\n")
file.Write("#include \"ppapi/lib/gl/gles2/gl2ext_ppapi.h\"\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
interface = self.interface_info[func.GetInfo('pepper_interface') or '']
file.Write("%s GL_APIENTRY gl%s(%s) {\n" %
(func.return_type, func.GetPepperName(),
func.MakeTypedPepperArgString("")))
return_str = "" if func.return_type == "void" else "return "
interface_str = "glGet%sInterfacePPAPI()" % interface.GetName()
original_arg = func.MakeOriginalArgString("")
context_arg = "glGetCurrentContextPPAPI()"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
if interface.GetName():
file.Write(" const struct %s* ext = %s;\n" %
(interface.GetStructName(), interface_str))
file.Write(" if (ext)\n")
file.Write(" %sext->%s(%s);\n" %
(return_str, func.GetPepperName(), arg))
if return_str:
file.Write(" %s0;\n" % return_str)
else:
file.Write(" %s%s->%s(%s);\n" %
(return_str, interface_str, func.GetPepperName(), arg))
file.Write("}\n\n")
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLCallVisitor(self, filename):
"""Provides the GL implementation for mojo"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
for func in self.original_functions:
if not func.IsCoreGLFunction():
continue
file.Write("VISIT_GL_CALL(%s, %s, (%s), (%s))\n" %
(func.name, func.return_type,
func.MakeTypedOriginalArgString(""),
func.MakeOriginalArgString("")))
file.Close()
self.generated_cpp_filenames.append(file.filename)
def WriteMojoGLCallVisitorForExtension(self, filename, extension):
"""Provides the GL implementation for mojo for a particular extension"""
file = CWriter(filename)
file.Write(_LICENSE)
file.Write(_DO_NOT_EDIT_WARNING)
for func in self.original_functions:
if func.GetInfo("extension") != extension:
continue
file.Write("VISIT_GL_CALL(%s, %s, (%s), (%s))\n" %
(func.name, func.return_type,
func.MakeTypedOriginalArgString(""),
func.MakeOriginalArgString("")))
file.Close()
self.generated_cpp_filenames.append(file.filename)
def Format(generated_files):
formatter = "clang-format"
if platform.system() == "Windows":
formatter += ".bat"
for filename in generated_files:
call([formatter, "-i", "-style=chromium", filename])
def main(argv):
"""This is the main function."""
parser = OptionParser()
parser.add_option(
"--output-dir",
help="base directory for resulting files, under chrome/src. default is "
"empty. Use this if you want the result stored under gen.")
parser.add_option(
"-v", "--verbose", action="store_true",
help="prints more output.")
(options, args) = parser.parse_args(args=argv)
# Add in states and capabilites to GLState
gl_state_valid = _NAMED_TYPE_INFO['GLState']['valid']
for state_name in sorted(_STATES.keys()):
state = _STATES[state_name]
if 'extension_flag' in state:
continue
if 'enum' in state:
if not state['enum'] in gl_state_valid:
gl_state_valid.append(state['enum'])
else:
for item in state['states']:
if 'extension_flag' in item:
continue
if not item['enum'] in gl_state_valid:
gl_state_valid.append(item['enum'])
for capability in _CAPABILITY_FLAGS:
valid_value = "GL_%s" % capability['name'].upper()
if not valid_value in gl_state_valid:
gl_state_valid.append(valid_value)
# This script lives under gpu/command_buffer, cd to base directory.
os.chdir(os.path.dirname(__file__) + "/../..")
base_dir = os.getcwd()
gen = GLGenerator(options.verbose)
gen.ParseGLH("gpu/command_buffer/cmd_buffer_functions.txt")
# Support generating files under gen/
if options.output_dir != None:
os.chdir(options.output_dir)
gen.WritePepperGLES2Interface("ppapi/api/ppb_opengles2.idl", False)
gen.WritePepperGLES2Interface("ppapi/api/dev/ppb_opengles2ext_dev.idl", True)
gen.WriteGLES2ToPPAPIBridge("ppapi/lib/gl/gles2/gles2.c")
gen.WritePepperGLES2Implementation(
"ppapi/shared_impl/ppb_opengles2_shared.cc")
os.chdir(base_dir)
gen.WriteCommandIds("gpu/command_buffer/common/gles2_cmd_ids_autogen.h")
gen.WriteFormat("gpu/command_buffer/common/gles2_cmd_format_autogen.h")
gen.WriteFormatTest(
"gpu/command_buffer/common/gles2_cmd_format_test_autogen.h")
gen.WriteGLES2InterfaceHeader(
"gpu/command_buffer/client/gles2_interface_autogen.h")
gen.WriteMojoGLES2ImplHeader(
"mojo/gpu/mojo_gles2_impl_autogen.h")
gen.WriteMojoGLES2Impl(
"mojo/gpu/mojo_gles2_impl_autogen.cc")
gen.WriteGLES2InterfaceStub(
"gpu/command_buffer/client/gles2_interface_stub_autogen.h")
gen.WriteGLES2InterfaceStubImpl(
"gpu/command_buffer/client/gles2_interface_stub_impl_autogen.h")
gen.WriteGLES2ImplementationHeader(
"gpu/command_buffer/client/gles2_implementation_autogen.h")
gen.WriteGLES2Implementation(
"gpu/command_buffer/client/gles2_implementation_impl_autogen.h")
gen.WriteGLES2ImplementationUnitTests(
"gpu/command_buffer/client/gles2_implementation_unittest_autogen.h")
gen.WriteGLES2TraceImplementationHeader(
"gpu/command_buffer/client/gles2_trace_implementation_autogen.h")
gen.WriteGLES2TraceImplementation(
"gpu/command_buffer/client/gles2_trace_implementation_impl_autogen.h")
gen.WriteGLES2CLibImplementation(
"gpu/command_buffer/client/gles2_c_lib_autogen.h")
gen.WriteCmdHelperHeader(
"gpu/command_buffer/client/gles2_cmd_helper_autogen.h")
gen.WriteServiceImplementation(
"gpu/command_buffer/service/gles2_cmd_decoder_autogen.h")
gen.WriteServiceContextStateHeader(
"gpu/command_buffer/service/context_state_autogen.h")
gen.WriteServiceContextStateImpl(
"gpu/command_buffer/service/context_state_impl_autogen.h")
gen.WriteClientContextStateHeader(
"gpu/command_buffer/client/client_context_state_autogen.h")
gen.WriteClientContextStateImpl(
"gpu/command_buffer/client/client_context_state_impl_autogen.h")
gen.WriteServiceUnitTests(
"gpu/command_buffer/service/gles2_cmd_decoder_unittest_%d_autogen.h")
gen.WriteServiceUnitTestsForExtensions(
"gpu/command_buffer/service/"
"gles2_cmd_decoder_unittest_extensions_autogen.h")
gen.WriteServiceUtilsHeader(
"gpu/command_buffer/service/gles2_cmd_validation_autogen.h")
gen.WriteServiceUtilsImplementation(
"gpu/command_buffer/service/"
"gles2_cmd_validation_implementation_autogen.h")
gen.WriteCommonUtilsHeader(
"gpu/command_buffer/common/gles2_cmd_utils_autogen.h")
gen.WriteCommonUtilsImpl(
"gpu/command_buffer/common/gles2_cmd_utils_implementation_autogen.h")
gen.WriteGLES2Header("gpu/GLES2/gl2chromium_autogen.h")
mojo_gles2_prefix = ("third_party/mojo/src/mojo/public/c/gles2/"
"gles2_call_visitor")
gen.WriteMojoGLCallVisitor(mojo_gles2_prefix + "_autogen.h")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_texture_mailbox_autogen.h",
"CHROMIUM_texture_mailbox")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_sync_point_autogen.h",
"CHROMIUM_sync_point")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_sub_image_autogen.h",
"CHROMIUM_sub_image")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_miscellaneous_autogen.h",
"CHROMIUM_miscellaneous")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_occlusion_query_ext_autogen.h",
"occlusion_query_EXT")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_image_autogen.h",
"CHROMIUM_image")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_copy_texture_autogen.h",
"CHROMIUM_copy_texture")
gen.WriteMojoGLCallVisitorForExtension(
mojo_gles2_prefix + "_chromium_pixel_transfer_buffer_object_autogen.h",
"CHROMIUM_pixel_transfer_buffer_object")
Format(gen.generated_cpp_filenames)
if gen.errors > 0:
print "%d errors" % gen.errors
return 1
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
SaschaMester/delicium
|
gpu/command_buffer/build_gles2_cmd_buffer.py
|
Python
|
bsd-3-clause
| 350,071
|
"""
=============================================
Compute MxNE with time-frequency sparse prior
=============================================
The TF-MxNE solver is a distributed inverse method (like dSPM or sLORETA)
that promotes focal (sparse) sources (such as dipole fitting techniques).
The benefit of this approach is that:
- it is spatio-temporal without assuming stationarity (sources properties
can vary over time)
- activations are localized in space, time and frequency in one step.
- with a built-in filtering process based on a short time Fourier
transform (STFT), data does not need to be low passed (just high pass
to make the signals zero mean).
- the solver solves a convex optimization problem, hence cannot be
trapped in local minima.
References:
A. Gramfort, D. Strohmeier, J. Haueisen, M. Hamalainen, M. Kowalski
Time-Frequency Mixed-Norm Estimates: Sparse M/EEG imaging with
non-stationary source activations
Neuroimage, Volume 70, 15 April 2013, Pages 410-422, ISSN 1053-8119,
DOI: 10.1016/j.neuroimage.2012.12.051.
A. Gramfort, D. Strohmeier, J. Haueisen, M. Hamalainen, M. Kowalski
Functional Brain Imaging with M/EEG Using Structured Sparsity in
Time-Frequency Dictionaries
Proceedings Information Processing in Medical Imaging
Lecture Notes in Computer Science, 2011, Volume 6801/2011,
600-611, DOI: 10.1007/978-3-642-22092-0_49
https://doi.org/10.1007/978-3-642-22092-0_49
"""
# Author: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import mne
from mne.datasets import sample
from mne.minimum_norm import make_inverse_operator, apply_inverse
from mne.inverse_sparse import tf_mixed_norm
from mne.viz import plot_sparse_source_estimates
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects'
fwd_fname = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
ave_fname = data_path + '/MEG/sample/sample_audvis-no-filter-ave.fif'
cov_fname = data_path + '/MEG/sample/sample_audvis-shrunk-cov.fif'
# Read noise covariance matrix
cov = mne.read_cov(cov_fname)
# Handling average file
condition = 'Left visual'
evoked = mne.read_evokeds(ave_fname, condition=condition, baseline=(None, 0))
evoked = mne.pick_channels_evoked(evoked)
# We make the window slightly larger than what you'll eventually be interested
# in ([-0.05, 0.3]) to avoid edge effects.
evoked.crop(tmin=-0.1, tmax=0.4)
# Handling forward solution
forward = mne.read_forward_solution(fwd_fname, force_fixed=False,
surf_ori=True)
###############################################################################
# Run solver
# alpha_space regularization parameter is between 0 and 100 (100 is high)
alpha_space = 50. # spatial regularization parameter
# alpha_time parameter promotes temporal smoothness
# (0 means no temporal regularization)
alpha_time = 1. # temporal regularization parameter
loose, depth = 0.2, 0.9 # loose orientation & depth weighting
# Compute dSPM solution to be used as weights in MxNE
inverse_operator = make_inverse_operator(evoked.info, forward, cov,
loose=loose, depth=depth)
stc_dspm = apply_inverse(evoked, inverse_operator, lambda2=1. / 9.,
method='dSPM')
# Compute TF-MxNE inverse solution
stc, residual = tf_mixed_norm(evoked, forward, cov, alpha_space, alpha_time,
loose=loose, depth=depth, maxit=200, tol=1e-4,
weights=stc_dspm, weights_min=8., debias=True,
wsize=16, tstep=4, window=0.05,
return_residual=True)
# Crop to remove edges
stc.crop(tmin=-0.05, tmax=0.3)
evoked.crop(tmin=-0.05, tmax=0.3)
residual.crop(tmin=-0.05, tmax=0.3)
# Show the evoked response and the residual for gradiometers
ylim = dict(grad=[-120, 120])
evoked.pick_types(meg='grad', exclude='bads')
evoked.plot(titles=dict(grad='Evoked Response: Gradiometers'), ylim=ylim,
proj=True)
residual.pick_types(meg='grad', exclude='bads')
residual.plot(titles=dict(grad='Residuals: Gradiometers'), ylim=ylim,
proj=True)
###############################################################################
# View in 2D and 3D ("glass" brain like 3D plot)
plot_sparse_source_estimates(forward['src'], stc, bgcolor=(1, 1, 1),
opacity=0.1, fig_name="TF-MxNE (cond %s)"
% condition, modes=['sphere'], scale_factors=[1.])
time_label = 'TF-MxNE time=%0.2f ms'
clim = dict(kind='value', lims=[10e-9, 15e-9, 20e-9])
brain = stc.plot('sample', 'inflated', 'rh', clim=clim, time_label=time_label,
smoothing_steps=5, subjects_dir=subjects_dir)
brain.show_view('medial')
brain.set_data_time_index(120)
brain.add_label("V1", color="yellow", scalar_thresh=.5, borders=True)
brain.add_label("V2", color="red", scalar_thresh=.5, borders=True)
|
mne-tools/mne-tools.github.io
|
0.12/_downloads/plot_time_frequency_mixed_norm_inverse.py
|
Python
|
bsd-3-clause
| 4,959
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing classes related to AWS VM networking.
The Firewall class provides a way of opening VM ports. The Network class allows
VMs to communicate via internal ips and isolates PerfKitBenchmarker VMs from
others in
the same project. See https://aws.amazon.com/documentation/vpc/
for more information about AWS Virtual Private Clouds.
"""
import json
import logging
import threading
import uuid
from perfkitbenchmarker import flags
from perfkitbenchmarker import network
from perfkitbenchmarker import resource
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.aws import util
FLAGS = flags.FLAGS
class AwsFirewall(network.BaseFirewall):
"""An object representing the AWS Firewall."""
def __init__(self, project):
self.firewall_set = set()
self._lock = threading.Lock()
def __getstate__(self):
"""Implements getstate to allow pickling (since locks can't be pickled)."""
d = self.__dict__.copy()
del d['_lock']
return d
def __setstate__(self, state):
"""Restores the lock after the object is unpickled."""
self.__dict__ = state
self._lock = threading.Lock()
def AllowPort(self, vm, port):
"""Opens a port on the firewall.
Args:
vm: The BaseVirtualMachine object to open the port for.
port: The local port to open.
"""
if vm.is_static:
return
entry = (port, vm.group_id)
if entry in self.firewall_set:
return
with self._lock:
if entry in self.firewall_set:
return
authorize_cmd = util.AWS_PREFIX + [
'ec2',
'authorize-security-group-ingress',
'--region=%s' % vm.region,
'--group-id=%s' % vm.group_id,
'--port=%s' % port,
'--cidr=0.0.0.0/0']
util.IssueRetryableCommand(
authorize_cmd + ['--protocol=tcp'])
util.IssueRetryableCommand(
authorize_cmd + ['--protocol=udp'])
self.firewall_set.add(entry)
def DisallowAllPorts(self):
"""Closes all ports on the firewall."""
pass
class AwsVpc(resource.BaseResource):
"""An object representing an Aws VPC."""
def __init__(self, region):
super(AwsVpc, self).__init__()
self.region = region
self.id = None
def _Create(self):
"""Creates the VPC."""
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-vpc',
'--region=%s' % self.region,
'--cidr-block=10.0.0.0/16']
stdout, _, _ = vm_util.IssueCommand(create_cmd)
response = json.loads(stdout)
self.id = response['Vpc']['VpcId']
self._EnableDnsHostnames()
util.AddDefaultTags(self.id, self.region)
def _Exists(self):
"""Returns true if the VPC exists."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-vpcs',
'--region=%s' % self.region,
'--filter=Name=vpc-id,Values=%s' % self.id]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
vpcs = response['Vpcs']
assert len(vpcs) < 2, 'Too many VPCs.'
return len(vpcs) > 0
def _EnableDnsHostnames(self):
"""Sets the enableDnsHostnames attribute of this VPC to True.
By default, instances launched in non-default VPCs are assigned an
unresolvable hostname. This breaks the hadoop benchmark. Setting the
enableDnsHostnames attribute to 'true' on the VPC resolves this. See:
http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html
"""
enable_hostnames_command = util.AWS_PREFIX + [
'ec2',
'modify-vpc-attribute',
'--region=%s' % self.region,
'--vpc-id', self.id,
'--enable-dns-hostnames',
'{ "Value": true }']
util.IssueRetryableCommand(enable_hostnames_command)
def _Delete(self):
"""Delete's the VPC."""
delete_cmd = util.AWS_PREFIX + [
'ec2',
'delete-vpc',
'--region=%s' % self.region,
'--vpc-id=%s' % self.id]
vm_util.IssueCommand(delete_cmd)
class AwsSubnet(resource.BaseResource):
"""An object representing an Aws subnet."""
def __init__(self, zone, vpc_id):
super(AwsSubnet, self).__init__()
self.zone = zone
self.region = zone[:-1]
self.vpc_id = vpc_id
self.id = None
def _Create(self):
"""Creates the subnet."""
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-subnet',
'--region=%s' % self.region,
'--vpc-id=%s' % self.vpc_id,
'--cidr-block=10.0.0.0/24',
'--availability-zone=%s' % self.zone]
stdout, _, _ = vm_util.IssueCommand(create_cmd)
response = json.loads(stdout)
self.id = response['Subnet']['SubnetId']
util.AddDefaultTags(self.id, self.region)
def _Delete(self):
"""Deletes the subnet."""
logging.info('Deleting subnet %s. This may fail if all instances in the '
'subnet have not completed termination, but will be retried.',
self.id)
delete_cmd = util.AWS_PREFIX + [
'ec2',
'delete-subnet',
'--region=%s' % self.region,
'--subnet-id=%s' % self.id]
vm_util.IssueCommand(delete_cmd)
def _Exists(self):
"""Returns true if the subnet exists."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-subnets',
'--region=%s' % self.region,
'--filter=Name=subnet-id,Values=%s' % self.id]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
subnets = response['Subnets']
assert len(subnets) < 2, 'Too many subnets.'
return len(subnets) > 0
class AwsInternetGateway(resource.BaseResource):
"""An object representing an Aws Internet Gateway."""
def __init__(self, region):
super(AwsInternetGateway, self).__init__()
self.region = region
self.vpc_id = None
self.id = None
self.attached = False
def _Create(self):
"""Creates the internet gateway."""
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-internet-gateway',
'--region=%s' % self.region]
stdout, _, _ = vm_util.IssueCommand(create_cmd)
response = json.loads(stdout)
self.id = response['InternetGateway']['InternetGatewayId']
util.AddDefaultTags(self.id, self.region)
def _Delete(self):
"""Deletes the internet gateway."""
delete_cmd = util.AWS_PREFIX + [
'ec2',
'delete-internet-gateway',
'--region=%s' % self.region,
'--internet-gateway-id=%s' % self.id]
vm_util.IssueCommand(delete_cmd)
def _Exists(self):
"""Returns true if the internet gateway exists."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-internet-gateways',
'--region=%s' % self.region,
'--filter=Name=internet-gateway-id,Values=%s' % self.id]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
internet_gateways = response['InternetGateways']
assert len(internet_gateways) < 2, 'Too many internet gateways.'
return len(internet_gateways) > 0
def Attach(self, vpc_id):
"""Attaches the internetgateway to the VPC."""
if not self.attached:
self.vpc_id = vpc_id
attach_cmd = util.AWS_PREFIX + [
'ec2',
'attach-internet-gateway',
'--region=%s' % self.region,
'--internet-gateway-id=%s' % self.id,
'--vpc-id=%s' % self.vpc_id]
util.IssueRetryableCommand(attach_cmd)
self.attached = True
def Detach(self):
"""Detaches the internetgateway from the VPC."""
if self.attached:
detach_cmd = util.AWS_PREFIX + [
'ec2',
'detach-internet-gateway',
'--region=%s' % self.region,
'--internet-gateway-id=%s' % self.id,
'--vpc-id=%s' % self.vpc_id]
util.IssueRetryableCommand(detach_cmd)
self.attached = False
class AwsRouteTable(resource.BaseResource):
"""An object representing a route table."""
def __init__(self, region, vpc_id):
super(AwsRouteTable, self).__init__()
self.region = region
self.vpc_id = vpc_id
def _Create(self):
"""Creates the route table.
This is a no-op since every VPC has a default route table.
"""
pass
def _Delete(self):
"""Deletes the route table.
This is a no-op since the default route table gets deleted with the VPC.
"""
pass
@vm_util.Retry()
def _PostCreate(self):
"""Gets data about the route table."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-route-tables',
'--region=%s' % self.region,
'--filters=Name=vpc-id,Values=%s' % self.vpc_id]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
self.id = response['RouteTables'][0]['RouteTableId']
def CreateRoute(self, internet_gateway_id):
"""Adds a route to the internet gateway."""
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-route',
'--region=%s' % self.region,
'--route-table-id=%s' % self.id,
'--gateway-id=%s' % internet_gateway_id,
'--destination-cidr-block=0.0.0.0/0']
util.IssueRetryableCommand(create_cmd)
class AwsPlacementGroup(resource.BaseResource):
"""Object representing an AWS Placement Group.
Attributes:
region: The AWS region the Placement Group is in.
name: The name of the Placement Group.
"""
def __init__(self, region):
"""Init method for AwsPlacementGroup.
Args:
region: A string containing the AWS region of the Placement Group.
"""
super(AwsPlacementGroup, self).__init__()
self.name = (
'perfkit-%s-%s' % (FLAGS.run_uri, str(uuid.uuid4())[-12:]))
self.region = region
def _Create(self):
"""Creates the Placement Group."""
create_cmd = util.AWS_PREFIX + [
'ec2',
'create-placement-group',
'--region=%s' % self.region,
'--group-name=%s' % self.name,
'--strategy=cluster']
vm_util.IssueCommand(create_cmd)
def _Delete(self):
"""Deletes the Placement Group."""
delete_cmd = util.AWS_PREFIX + [
'ec2',
'delete-placement-group',
'--region=%s' % self.region,
'--group-name=%s' % self.name]
vm_util.IssueCommand(delete_cmd)
def _Exists(self):
"""Returns true if the Placement Group exists."""
describe_cmd = util.AWS_PREFIX + [
'ec2',
'describe-placement-groups',
'--region=%s' % self.region,
'--filter=Name=group-name,Values=%s' % self.name]
stdout, _ = util.IssueRetryableCommand(describe_cmd)
response = json.loads(stdout)
placement_groups = response['PlacementGroups']
assert len(placement_groups) < 2, 'Too many placement groups.'
return len(placement_groups) > 0
class AwsNetwork(network.BaseNetwork):
"""Object representing an AWS Network.
Attributes:
region: The AWS region the Network is in.
vpc_id: The id of the Network's Virtual Private Cloud (VPC).
subnet_id: The id of the Subnet of the Network's VPC.
internet_gateway_id: The id of the Network's Internet Gateway.
route_table_id: The id of the Route Table of the Networks's VPC.
"""
def __init__(self, zone):
"""Initializes AwsNetwork instances.
Args:
zone: The Availability Zone that the Network corresponds to.
"""
super(AwsNetwork, self).__init__(zone)
self.region = zone[:-1]
self.vpc = AwsVpc(self.region)
self.internet_gateway = AwsInternetGateway(self.region)
self.subnet = None
self.route_table = None
self.placement_group = AwsPlacementGroup(self.region)
def Create(self):
"""Creates the network."""
self.vpc.Create()
self.internet_gateway.Create()
self.internet_gateway.Attach(self.vpc.id)
if self.route_table is None:
self.route_table = AwsRouteTable(self.region, self.vpc.id)
self.route_table.Create()
self.route_table.CreateRoute(self.internet_gateway.id)
if self.subnet is None:
self.subnet = AwsSubnet(self.zone, self.vpc.id)
self.subnet.Create()
self.placement_group.Create()
def Delete(self):
"""Deletes the network."""
self.placement_group.Delete()
if self.subnet:
self.subnet.Delete()
self.internet_gateway.Detach()
self.internet_gateway.Delete()
self.vpc.Delete()
|
tvansteenburgh/PerfKitBenchmarker
|
perfkitbenchmarker/aws/aws_network.py
|
Python
|
apache-2.0
| 12,866
|
##
# Copyright 2013-2017 Ghent University
#
# This file is triple-licensed under GPLv2 (see below), MIT, and
# BSD three-clause licenses.
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for gmvolf compiler toolchain (includes GCC, MVAPICH2, OpenBLAS, LAPACK, ScaLAPACK and FFTW).
:author: Dmitri Gribenko (National Technical University of Ukraine "KPI")
"""
from easybuild.toolchains.gmvapich2 import Gmvapich2
from easybuild.toolchains.fft.fftw import Fftw
from easybuild.toolchains.linalg.openblas import OpenBLAS
from easybuild.toolchains.linalg.scalapack import ScaLAPACK
class Gmvolf(Gmvapich2, OpenBLAS, ScaLAPACK, Fftw):
"""Compiler toolchain with GCC, MVAPICH2, OpenBLAS, ScaLAPACK and FFTW."""
NAME = 'gmvolf'
SUBTOOLCHAIN = Gmvapich2.NAME
|
ULHPC/easybuild-framework
|
easybuild/toolchains/gmvolf.py
|
Python
|
gpl-2.0
| 1,741
|
from .dev import * # noqa
INSTALLED_APPS.append('wagtail_pgsearchbackend') # noqa: F405
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': '5432',
}
}
WAGTAILSEARCH_BACKENDS = {
'default': {
'BACKEND': 'wagtail_pgsearchbackend.backend',
'SEARCH_CONFIG': 'english'
}
}
STATIC_ROOT = '/app/static'
MEDIA_ROOT = '/app/media'
|
kinaklub/next.filmfest.by
|
filmfest/settings/docker.py
|
Python
|
unlicense
| 491
|
# Copyright 2013 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The shares api."""
import ast
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import uuidutils
import six
import webob
from webob import exc
from manila.api import common
from manila.api.openstack import wsgi
from manila.api.views import share_accesses as share_access_views
from manila.api.views import shares as share_views
from manila.common import constants
from manila import db
from manila import exception
from manila.i18n import _, _LI
from manila import share
from manila.share import share_types
LOG = log.getLogger(__name__)
class ShareMixin(object):
"""Mixin class for Share API Controllers."""
def _update(self, *args, **kwargs):
db.share_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return self.share_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.share_api.delete(*args, **kwargs)
def _migrate(self, *args, **kwargs):
return self.share_api.migrate_share(*args, **kwargs)
def show(self, req, id):
"""Return data about the given share."""
context = req.environ['manila.context']
try:
share = self.share_api.get(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
return self._view_builder.detail(req, share)
def delete(self, req, id):
"""Delete a share."""
context = req.environ['manila.context']
LOG.info(_LI("Delete share with id: %s"), id, context=context)
try:
share = self.share_api.get(context, id)
# NOTE(ameade): If the share is in a share group, we require its
# id be specified as a param.
sg_id_key = 'share_group_id'
if share.get(sg_id_key):
share_group_id = req.params.get(sg_id_key)
if not share_group_id:
msg = _("Must provide '%s' as a request "
"parameter when deleting a share in a share "
"group.") % sg_id_key
raise exc.HTTPBadRequest(explanation=msg)
elif share_group_id != share.get(sg_id_key):
msg = _("The specified '%s' does not match "
"the share group id of the share.") % sg_id_key
raise exc.HTTPBadRequest(explanation=msg)
self.share_api.delete(context, share)
except exception.NotFound:
raise exc.HTTPNotFound()
except exception.InvalidShare as e:
raise exc.HTTPForbidden(explanation=six.text_type(e))
except exception.Conflict as e:
raise exc.HTTPConflict(explanation=six.text_type(e))
return webob.Response(status_int=202)
def index(self, req):
"""Returns a summary list of shares."""
return self._get_shares(req, is_detail=False)
def detail(self, req):
"""Returns a detailed list of shares."""
return self._get_shares(req, is_detail=True)
def _get_shares(self, req, is_detail):
"""Returns a list of shares, transformed through view builder."""
context = req.environ['manila.context']
search_opts = {}
search_opts.update(req.GET)
# Remove keys that are not related to share attrs
search_opts.pop('limit', None)
search_opts.pop('offset', None)
sort_key = search_opts.pop('sort_key', 'created_at')
sort_dir = search_opts.pop('sort_dir', 'desc')
# Deserialize dicts
if 'metadata' in search_opts:
search_opts['metadata'] = ast.literal_eval(search_opts['metadata'])
if 'extra_specs' in search_opts:
search_opts['extra_specs'] = ast.literal_eval(
search_opts['extra_specs'])
# NOTE(vponomaryov): Manila stores in DB key 'display_name', but
# allows to use both keys 'name' and 'display_name'. It is leftover
# from Cinder v1 and v2 APIs.
if 'name' in search_opts:
search_opts['display_name'] = search_opts.pop('name')
if sort_key == 'name':
sort_key = 'display_name'
common.remove_invalid_options(
context, search_opts, self._get_share_search_options())
shares = self.share_api.get_all(
context, search_opts=search_opts, sort_key=sort_key,
sort_dir=sort_dir)
limited_list = common.limited(shares, req)
if is_detail:
shares = self._view_builder.detail_list(req, limited_list)
else:
shares = self._view_builder.summary_list(req, limited_list)
return shares
def _get_share_search_options(self):
"""Return share search options allowed by non-admin."""
# NOTE(vponomaryov): share_server_id depends on policy, allow search
# by it for non-admins in case policy changed.
# Also allow search by extra_specs in case policy
# for it allows non-admin access.
return (
'display_name', 'status', 'share_server_id', 'volume_type_id',
'share_type_id', 'snapshot_id', 'host', 'share_network_id',
'is_public', 'metadata', 'extra_specs', 'sort_key', 'sort_dir',
'share_group_id', 'share_group_snapshot_id'
)
def update(self, req, id, body):
"""Update a share."""
context = req.environ['manila.context']
if not body or 'share' not in body:
raise exc.HTTPUnprocessableEntity()
share_data = body['share']
valid_update_keys = (
'display_name',
'display_description',
'is_public',
)
update_dict = {key: share_data[key]
for key in valid_update_keys
if key in share_data}
try:
share = self.share_api.get(context, id)
except exception.NotFound:
raise exc.HTTPNotFound()
share = self.share_api.update(context, share, update_dict)
share.update(update_dict)
return self._view_builder.detail(req, share)
def create(self, req, body):
# Remove share group attributes
body.get('share', {}).pop('share_group_id', None)
share = self._create(req, body)
return share
def _create(self, req, body,
check_create_share_from_snapshot_support=False):
"""Creates a new share."""
context = req.environ['manila.context']
if not self.is_valid_body(body, 'share'):
raise exc.HTTPUnprocessableEntity()
share = body['share']
# NOTE(rushiagr): Manila API allows 'name' instead of 'display_name'.
if share.get('name'):
share['display_name'] = share.get('name')
del share['name']
# NOTE(rushiagr): Manila API allows 'description' instead of
# 'display_description'.
if share.get('description'):
share['display_description'] = share.get('description')
del share['description']
size = share['size']
share_proto = share['share_proto'].upper()
msg = (_LI("Create %(share_proto)s share of %(size)s GB") %
{'share_proto': share_proto, 'size': size})
LOG.info(msg, context=context)
availability_zone = share.get('availability_zone')
if availability_zone:
try:
db.availability_zone_get(context, availability_zone)
except exception.AvailabilityZoneNotFound as e:
raise exc.HTTPNotFound(explanation=six.text_type(e))
kwargs = {
'availability_zone': availability_zone,
'metadata': share.get('metadata'),
'is_public': share.get('is_public', False),
'share_group_id': share.get('share_group_id')
}
snapshot_id = share.get('snapshot_id')
if snapshot_id:
snapshot = self.share_api.get_snapshot(context, snapshot_id)
else:
snapshot = None
kwargs['snapshot_id'] = snapshot_id
share_network_id = share.get('share_network_id')
if snapshot:
# Need to check that share_network_id from snapshot's
# parents share equals to share_network_id from args.
# If share_network_id is empty then update it with
# share_network_id of parent share.
parent_share = self.share_api.get(context, snapshot['share_id'])
parent_share_net_id = parent_share.instance['share_network_id']
if share_network_id:
if share_network_id != parent_share_net_id:
msg = "Share network ID should be the same as snapshot's" \
" parent share's or empty"
raise exc.HTTPBadRequest(explanation=msg)
elif parent_share_net_id:
share_network_id = parent_share_net_id
# Verify that share can be created from a snapshot
if (check_create_share_from_snapshot_support and
not parent_share['create_share_from_snapshot_support']):
msg = (_("A new share may not be created from snapshot '%s', "
"because the snapshot's parent share does not have "
"that capability.")
% snapshot_id)
LOG.error(msg)
raise exc.HTTPBadRequest(explanation=msg)
if share_network_id:
try:
self.share_api.get_share_network(
context,
share_network_id)
except exception.ShareNetworkNotFound as e:
raise exc.HTTPNotFound(explanation=six.text_type(e))
kwargs['share_network_id'] = share_network_id
display_name = share.get('display_name')
display_description = share.get('display_description')
if 'share_type' in share and 'volume_type' in share:
msg = 'Cannot specify both share_type and volume_type'
raise exc.HTTPBadRequest(explanation=msg)
req_share_type = share.get('share_type', share.get('volume_type'))
share_type = None
if req_share_type:
try:
if not uuidutils.is_uuid_like(req_share_type):
share_type = share_types.get_share_type_by_name(
context, req_share_type)
else:
share_type = share_types.get_share_type(
context, req_share_type)
except exception.ShareTypeNotFound:
msg = _("Share type not found.")
raise exc.HTTPNotFound(explanation=msg)
elif not snapshot:
def_share_type = share_types.get_default_share_type()
if def_share_type:
share_type = def_share_type
# Only use in create share feature. Create share from snapshot
# and create share with share group features not
# need this check.
if (not share_network_id and not snapshot
and not share.get('share_group_id')
and share_type and share_type.get('extra_specs')
and (strutils.bool_from_string(share_type.get('extra_specs').
get('driver_handles_share_servers')))):
msg = _('Share network must be set when the '
'driver_handles_share_servers is true.')
raise exc.HTTPBadRequest(explanation=msg)
if share_type:
kwargs['share_type'] = share_type
new_share = self.share_api.create(context,
share_proto,
size,
display_name,
display_description,
**kwargs)
return self._view_builder.detail(req, new_share)
@staticmethod
def _any_instance_has_errored_rules(share):
for instance in share['instances']:
access_rules_status = instance['access_rules_status']
if access_rules_status == constants.SHARE_INSTANCE_RULES_ERROR:
return True
return False
@wsgi.Controller.authorize('allow_access')
def _allow_access(self, req, id, body, enable_ceph=False,
allow_on_error_status=False):
"""Add share access rule."""
context = req.environ['manila.context']
access_data = body.get('allow_access', body.get('os-allow_access'))
share = self.share_api.get(context, id)
if (not allow_on_error_status and
self._any_instance_has_errored_rules(share)):
msg = _("Access rules cannot be added while the share or any of "
"its replicas or migration copies has its "
"access_rules_status set to %(instance_rules_status)s. "
"Deny any rules in %(rule_state)s state and try "
"again.") % {
'instance_rules_status': constants.SHARE_INSTANCE_RULES_ERROR,
'rule_state': constants.ACCESS_STATE_ERROR,
}
raise webob.exc.HTTPBadRequest(explanation=msg)
access_type = access_data['access_type']
access_to = access_data['access_to']
common.validate_access(access_type=access_type,
access_to=access_to,
enable_ceph=enable_ceph)
try:
access = self.share_api.allow_access(
context, share, access_type, access_to,
access_data.get('access_level'))
except exception.ShareAccessExists as e:
raise webob.exc.HTTPBadRequest(explanation=e.msg)
return self._access_view_builder.view(req, access)
@wsgi.Controller.authorize('deny_access')
def _deny_access(self, req, id, body):
"""Remove share access rule."""
context = req.environ['manila.context']
access_id = body.get(
'deny_access', body.get('os-deny_access'))['access_id']
try:
access = self.share_api.access_get(context, access_id)
if access.share_id != id:
raise exception.NotFound()
share = self.share_api.get(context, id)
except exception.NotFound as error:
raise webob.exc.HTTPNotFound(explanation=six.text_type(error))
self.share_api.deny_access(context, share, access)
return webob.Response(status_int=202)
def _access_list(self, req, id, body):
"""list share access rules."""
context = req.environ['manila.context']
share = self.share_api.get(context, id)
access_rules = self.share_api.access_get_all(context, share)
return self._access_view_builder.list_view(req, access_rules)
def _extend(self, req, id, body):
"""Extend size of a share."""
context = req.environ['manila.context']
share, size = self._get_valid_resize_parameters(
context, id, body, 'os-extend')
try:
self.share_api.extend(context, share, size)
except (exception.InvalidInput, exception.InvalidShare) as e:
raise webob.exc.HTTPBadRequest(explanation=six.text_type(e))
except exception.ShareSizeExceedsAvailableQuota as e:
raise webob.exc.HTTPForbidden(explanation=six.text_type(e))
return webob.Response(status_int=202)
def _shrink(self, req, id, body):
"""Shrink size of a share."""
context = req.environ['manila.context']
share, size = self._get_valid_resize_parameters(
context, id, body, 'os-shrink')
try:
self.share_api.shrink(context, share, size)
except (exception.InvalidInput, exception.InvalidShare) as e:
raise webob.exc.HTTPBadRequest(explanation=six.text_type(e))
return webob.Response(status_int=202)
def _get_valid_resize_parameters(self, context, id, body, action):
try:
share = self.share_api.get(context, id)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=six.text_type(e))
try:
size = int(body.get(action,
body.get(action.split('os-')[-1]))['new_size'])
except (KeyError, ValueError, TypeError):
msg = _("New share size must be specified as an integer.")
raise webob.exc.HTTPBadRequest(explanation=msg)
return share, size
class ShareController(wsgi.Controller, ShareMixin, wsgi.AdminActionsMixin):
"""The Shares API v1 controller for the OpenStack API."""
resource_name = 'share'
_view_builder_class = share_views.ViewBuilder
def __init__(self):
super(self.__class__, self).__init__()
self.share_api = share.API()
self._access_view_builder = share_access_views.ViewBuilder()
@wsgi.action('os-reset_status')
def share_reset_status(self, req, id, body):
"""Reset status of a share."""
return self._reset_status(req, id, body)
@wsgi.action('os-force_delete')
def share_force_delete(self, req, id, body):
"""Delete a share, bypassing the check for status."""
return self._force_delete(req, id, body)
@wsgi.action('os-allow_access')
def allow_access(self, req, id, body):
"""Add share access rule."""
return self._allow_access(req, id, body)
@wsgi.action('os-deny_access')
def deny_access(self, req, id, body):
"""Remove share access rule."""
return self._deny_access(req, id, body)
@wsgi.action('os-access_list')
def access_list(self, req, id, body):
"""List share access rules."""
return self._access_list(req, id, body)
@wsgi.action('os-extend')
def extend(self, req, id, body):
"""Extend size of a share."""
return self._extend(req, id, body)
@wsgi.action('os-shrink')
def shrink(self, req, id, body):
"""Shrink size of a share."""
return self._shrink(req, id, body)
def create_resource():
return wsgi.Resource(ShareController())
|
vponomaryov/manila
|
manila/api/v1/shares.py
|
Python
|
apache-2.0
| 18,956
|
#!/usr/bin/env python
# example togglebutton.py
import pygtk
pygtk.require('2.0')
import gtk
class ToggleButton:
# Our callback.
# The data passed to this method is printed to stdout
def callback(self, widget, data=None):
print "%s was toggled %s" % (data, ("OFF", "ON")[widget.get_active()])
# This callback quits the program
def delete_event(self, widget, event, data=None):
gtk.main_quit()
return False
def __init__(self):
# Create a new window
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
# Set the window title
self.window.set_title("Toggle Button")
# Set a handler for delete_event that immediately
# exits GTK.
self.window.connect("delete_event", self.delete_event)
# Sets the border width of the window.
self.window.set_border_width(20)
# Create a vertical box
vbox = gtk.VBox(True, 2)
# Put the vbox in the main window
self.window.add(vbox)
# Create first button
button = gtk.ToggleButton("toggle button 1")
# When the button is toggled, we call the "callback" method
# with a pointer to "button" as its argument
button.connect("toggled", self.callback, "toggle button 1")
# Insert button 1
vbox.pack_start(button, True, True, 2)
button.show()
# Create second button
button = gtk.ToggleButton("toggle button 2")
# When the button is toggled, we call the "callback" method
# with a pointer to "button 2" as its argument
button.connect("toggled", self.callback, "toggle button 2")
# Insert button 2
vbox.pack_start(button, True, True, 2)
button.show()
# Create "Quit" button
button = gtk.Button("Quit")
# When the button is clicked, we call the main_quit function
# and the program exits
button.connect("clicked", lambda wid: gtk.main_quit())
# Insert the quit button
vbox.pack_start(button, True, True, 2)
button.show()
vbox.show()
self.window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
ToggleButton()
main()
|
certik/pyjamas
|
pygtkweb/demos/008-togglebutton.py
|
Python
|
apache-2.0
| 2,249
|
# Copyright (C) 2020 Renato Lima - Akretion <renato.lima@akretion.com.br>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
ISSQN_ELIGIBILITY = [
('1', 'Exigível'),
('2', 'Não incidência'),
('3', 'Isenção'),
('4', 'Exportação'),
('5', 'Imunidade'),
('6', 'Exigibilidade Suspensa por Decisão Judicial'),
('7', 'Exigibilidade Suspensa por Processo Administrativo'),
]
ISSQN_ELIGIBILITY_DEFAULT = '2'
ISSQN_INCENTIVE = [
('1', 'Sim'),
('2', 'Não'),
]
ISSQN_INCENTIVE_DEFAULT = '2'
|
kmee/l10n-brazil
|
l10n_br_fiscal/constants/issqn.py
|
Python
|
agpl-3.0
| 545
|
import json
import sys
from typing import TYPE_CHECKING
from vnpy.api.rest import Request, RestClient
if TYPE_CHECKING:
from vnpy.gateway.oanda import OandaGateway
_ = lambda x: x # noqa
class OandaApiBase(RestClient):
"""
Oanda Base API
"""
def __init__(self, gateway: "OandaGateway"):
super().__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.key = ""
self.secret = b""
def sign(self, request):
"""
Generate BitMEX signature.
"""
if request.data:
request.data = json.dumps(request.data)
request.headers = {
"Authorization": f"Bearer {self.key}",
"Content-Type": "application/json",
}
return request
def is_request_success(self, data: dict, request: "Request"):
# message should be check too
# but checking only this is enough for us.
return super().is_request_success(data, request) and 'errorMessage' not in data
def on_error(
self, exception_type: type, exception_value: Exception, tb, request: Request
):
"""
Callback to handler request exception.
"""
msg = f"触发异常,状态码:{exception_type},信息:{exception_value}"
self.gateway.write_log(msg)
sys.stderr.write(
self.exception_detail(exception_type, exception_value, tb, request)
)
|
msincenselee/vnpy
|
vnpy/gateway/oanda/oanda_api_base.py
|
Python
|
mit
| 1,465
|
"""Implementations of assessment abstract base class managers."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class AssessmentProfile:
"""The ``AssessmentProfile`` describes the interoperability among assessment services."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def supports_visible_federation(self):
"""Tests if federation is visible.
:return: ``true`` if visible federation is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_my_assessment_taken(self):
"""Tests if a session is available to lookup taken assessments for the authenticated agent.
:return: ``true`` if my assessment taken session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment(self):
"""Tests for the availability of a assessment service which is the service for taking and examining assessments taken.
:return: ``true`` if assessment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_results(self):
"""Tests for the availability of an assessment rsults service.
:return: ``true`` if assessment results is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_lookup(self):
"""Tests if an item lookup service is supported.
:return: true if item lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_query(self):
"""Tests if an item query service is supported.
:return: ``true`` if item query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_search(self):
"""Tests if an item search service is supported.
:return: ``true`` if item search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_admin(self):
"""Tests if an item administrative service is supported.
:return: ``true`` if item admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_notification(self):
"""Tests if item notification is supported.
Messages may be sent when items are created, modified, or
deleted.
:return: ``true`` if item notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_bank(self):
"""Tests if an item to bank lookup session is available.
:return: ``true`` if item bank lookup session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_bank_assignment(self):
"""Tests if an item to bank assignment session is available.
:return: ``true`` if item bank assignment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_item_smart_bank(self):
"""Tests if an item smart bank session is available.
:return: ``true`` if item smart bank session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_lookup(self):
"""Tests if an assessment lookup service is supported.
An assessment lookup service defines methods to access
assessments.
:return: true if assessment lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_query(self):
"""Tests if an assessment query service is supported.
:return: ``true`` if assessment query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_search(self):
"""Tests if an assessment search service is supported.
:return: ``true`` if assessment search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_admin(self):
"""Tests if an assessment administrative service is supported.
:return: ``true`` if assessment admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_notification(self):
"""Tests if assessment notification is supported.
Messages may be sent when assessments are created, modified, or
deleted.
:return: ``true`` if assessment notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_bank(self):
"""Tests if an assessment to bank lookup session is available.
:return: ``true`` if assessment bank lookup session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_bank_assignment(self):
"""Tests if an assessment to bank assignment session is available.
:return: ``true`` if assessment bank assignment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_smart_bank(self):
"""Tests if an assessment smart bank session is available.
:return: ``true`` if assessment smart bank session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_basic_authoring(self):
"""Tests if an assessment basic authoring session is available.
:return: ``true`` if assessment basic authoring is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_lookup(self):
"""Tests if an assessment offered lookup service is supported.
:return: true if assessment offered lookup is supported, false otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_query(self):
"""Tests if an assessment offered query service is supported.
:return: ``true`` if assessment offered query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_search(self):
"""Tests if an assessment offered search service is supported.
:return: ``true`` if assessment offered search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_admin(self):
"""Tests if an assessment offered administrative service is supported.
:return: ``true`` if assessment offered admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_notification(self):
"""Tests if assessment offered notification is supported.
Messages may be sent when offered assessments are created,
modified, or deleted.
:return: ``true`` if assessment offered notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_bank(self):
"""Tests if an assessment offered to bank lookup session is available.
:return: ``true`` if assessment offered bank lookup session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_bank_assignment(self):
"""Tests if an assessment offered to bank assignment session is available.
:return: ``true`` if assessment offered bank assignment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_offered_smart_bank(self):
"""Tests if an assessment offered smart bank session is available.
:return: ``true`` if assessment offered smart bank session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_lookup(self):
"""Tests if an assessment taken lookup service is supported.
:return: ``true`` if assessment taken lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_query(self):
"""Tests if an assessment taken query service is supported.
:return: ``true`` if assessment taken query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_search(self):
"""Tests if an assessment taken search service is supported.
:return: ``true`` if assessment taken search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_admin(self):
"""Tests if an assessment taken administrative service is supported which is used to instantiate an assessment offered.
:return: ``true`` if assessment taken admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_notification(self):
"""Tests if assessment taken notification is supported.
Messages may be sent when items are created, modified, or
deleted.
:return: ``true`` if assessment taken notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_bank(self):
"""Tests if an assessment taken to bank lookup session is available.
:return: ``true`` if assessment taken bank lookup session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_bank_assignment(self):
"""Tests if an assessment taken to bank assignment session is available.
:return: ``true`` if assessment taken bank assignment is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_taken_smart_bank(self):
"""Tests if an assessment taken smart bank session is available.
:return: ``true`` if assessment taken smart bank session is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_lookup(self):
"""Tests if a bank lookup service is supported.
A bank lookup service defines methods to access assessment
banks.
:return: ``true`` if bank lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_query(self):
"""Tests if a bank query service is supported.
:return: ``true`` if bank query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_search(self):
"""Tests if a bank search service is supported.
:return: ``true`` if bank search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_admin(self):
"""Tests if a banlk administrative service is supported.
:return: ``true`` if bank admin is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_notification(self):
"""Tests if bank notification is supported.
Messages may be sent when items are created, modified, or
deleted.
:return: ``true`` if bank notification is supported ``,`` ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_hierarchy(self):
"""Tests if a bank hierarchy traversal is supported.
:return: ``true`` if a bank hierarchy traversal is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_bank_hierarchy_design(self):
"""Tests if bank hierarchy design is supported.
:return: ``true`` if a bank hierarchy design is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_authoring(self):
"""Tests if an assessment authoring service is supported.
:return: ``true`` if an assessment authoring is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_assessment_batch(self):
"""Tests if an assessment batch service is supported.
:return: ``true`` if an assessment batch service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_item_record_types(self):
"""Gets the supported ``Item`` record types.
:return: a list containing the supported ``Item`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
item_record_types = property(fget=get_item_record_types)
@abc.abstractmethod
def supports_item_record_type(self, item_record_type):
"""Tests if the given ``Item`` record type is supported.
:param item_record_type: a ``Type`` indicating a ``Item`` record type
:type item_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``item_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_item_search_record_types(self):
"""Gets the supported ``Item`` search record types.
:return: a list containing the supported ``Item`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
item_search_record_types = property(fget=get_item_search_record_types)
@abc.abstractmethod
def supports_item_search_record_type(self, item_search_record_type):
"""Tests if the given ``Item`` search record type is supported.
:param item_search_record_type: a ``Type`` indicating an ``Item`` search record type
:type item_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``item_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_record_types(self):
"""Gets the supported ``Assessment`` record types.
:return: a list containing the supported ``Assessment`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_record_types = property(fget=get_assessment_record_types)
@abc.abstractmethod
def supports_assessment_record_type(self, assessment_record_type):
"""Tests if the given ``Assessment`` record type is supported.
:param assessment_record_type: a ``Type`` indicating an ``Assessment`` record type
:type assessment_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_search_record_types(self):
"""Gets the supported ``Assessment`` search record types.
:return: a list containing the supported assessment search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_search_record_types = property(fget=get_assessment_search_record_types)
@abc.abstractmethod
def supports_assessment_search_record_type(self, assessment_search_record_type):
"""Tests if the given assessment search record type is supported.
:param assessment_search_record_type: a ``Type`` indicating an assessment search record type
:type assessment_search_record_type: ``osid.type.Type``
:return: ``true`` if the given search record Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_offered_record_types(self):
"""Gets the supported ``AssessmentOffered`` record types.
:return: a list containing the supported ``AssessmentOffered`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_offered_record_types = property(fget=get_assessment_offered_record_types)
@abc.abstractmethod
def supports_assessment_offered_record_type(self, assessment_offered_record_type):
"""Tests if the given ``AssessmentOffered`` record type is supported.
:param assessment_offered_record_type: a ``Type`` indicating an ``AssessmentOffered`` record type
:type assessment_offered_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_offered_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_offered_search_record_types(self):
"""Gets the supported ``AssessmentOffered`` search record types.
:return: a list containing the supported ``AssessmentOffered`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_offered_search_record_types = property(fget=get_assessment_offered_search_record_types)
@abc.abstractmethod
def supports_assessment_offered_search_record_type(self, assessment_offered_search_record_type):
"""Tests if the given ``AssessmentOffered`` search record type is supported.
:param assessment_offered_search_record_type: a ``Type`` indicating an ``AssessmentOffered`` search record type
:type assessment_offered_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_offered_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_taken_record_types(self):
"""Gets the supported ``AssessmentTaken`` record types.
:return: a list containing the supported ``AssessmentTaken`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_taken_record_types = property(fget=get_assessment_taken_record_types)
@abc.abstractmethod
def supports_assessment_taken_record_type(self, assessment_taken_record_type):
"""Tests if the given ``AssessmentTaken`` record type is supported.
:param assessment_taken_record_type: a ``Type`` indicating an ``AssessmentTaken`` record type
:type assessment_taken_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_taken_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_taken_search_record_types(self):
"""Gets the supported ``AssessmentTaken`` search record types.
:return: a list containing the supported ``AssessmentTaken`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_taken_search_record_types = property(fget=get_assessment_taken_search_record_types)
@abc.abstractmethod
def supports_assessment_taken_search_record_type(self, assessment_taken_search_record_type):
"""Tests if the given ``AssessmentTaken`` search record type is supported.
:param assessment_taken_search_record_type: a ``Type`` indicating an ``AssessmentTaken`` search record type
:type assessment_taken_search_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_taken_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_section_record_types(self):
"""Gets the supported ``AssessmentSection`` record types.
:return: a list containing the supported ``AssessmentSection`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
assessment_section_record_types = property(fget=get_assessment_section_record_types)
@abc.abstractmethod
def supports_assessment_section_record_type(self, assessment_section_record_type):
"""Tests if the given ``AssessmentSection`` record type is supported.
:param assessment_section_record_type: a ``Type`` indicating an ``AssessmentSection`` record type
:type assessment_section_record_type: ``osid.type.Type``
:return: ``true`` if the given Type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``assessment_section_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bank_record_types(self):
"""Gets the supported ``Bank`` record types.
:return: a list containing the supported ``Bank`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
bank_record_types = property(fget=get_bank_record_types)
@abc.abstractmethod
def supports_bank_record_type(self, bank_record_type):
"""Tests if the given ``Bank`` record type is supported.
:param bank_record_type: a ``Type`` indicating a ``Bank`` type
:type bank_record_type: ``osid.type.Type``
:return: ``true`` if the given key record ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bank_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bank_search_record_types(self):
"""Gets the supported bank search record types.
:return: a list containing the supported ``Bank`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
bank_search_record_types = property(fget=get_bank_search_record_types)
@abc.abstractmethod
def supports_bank_search_record_type(self, bank_search_record_type):
"""Tests if the given bank search record type is supported.
:param bank_search_record_type: a ``Type`` indicating a ``Bank`` search record type
:type bank_search_record_type: ``osid.type.Type``
:return: ``true`` if the given search record ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``bank_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
class AssessmentManager:
"""The assessment manager provides access to assessment sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``MyAssessmentTakenSession:`` a session to get taken or in
progress assessments for the current agent
* ``AssessmentSession:`` a session to be assessed and examine
assessments taken
* ``AssessmentResultsSession:`` a session to retrieve assessment
results
* ``ItemLookupSession:`` a session to look up ``Items``
* ``ItemQuerySession`` : a session to query ``Items``
* ``ItemSearchSession:`` a session to search ``Items``
* ``ItemAdminSession:`` a session to create, modify and delete
``Items``
* ``ItemNotificationSession: a`` session to receive messages
pertaining to ``Item`` changes
* ``ItemBankSession:`` a session for looking up item and bank
mappings
* ``ItemBankAssignmentSession:`` a session for managing item and
bank mappings
* ``ItemSmartBankSession:`` a session for managing dynamic banks
* ``AssessmentLookupSession:`` a session to look up
``Assessments``
* ``AssessmentQuerySession:`` a session to query ``Assessments``
* ``AssessmentSearchSession:`` a session to search ``Assessments``
* ``AssessmentAdminSession:`` a session to create, modify and
delete ``Assessments``
* ``AssessmentNotificationSession: a`` session to receive messages
pertaining to ``Assessment`` changes
* ``AssessmentBankSession:`` a session for looking up assessment
and bank mappings
* ``AssessmentBankAssignmentSession:`` a session for managing
assessment and bank mappings
* ``AssessmentSmartBankSession:`` a session for managing dynamic
banks
* ``AssessmentBasicAuthoringSession:`` a session for making simple
mappings of assessment items to assessments
* ``AssessmentOfferedLookupSession:`` a session to look up
``AssessmentsOffered``
* ``AssessmentOfferedQuerySession:`` a session to query
``AssessmentsOffered``
* ``AssessmentOfferedSearchSession`` : a session to search
``AssessmentsOffered``
* ``AssessmentOfferedAdminSession:`` a session to create, modify
and delete ``AssessmentsOffered``
* ``AssessmentOfferedNotificationSession: a`` session to receive
messages pertaining to ``AssessmentOffered`` changes
* ``AssessmentOfferedBankSession:`` a session for looking up
assessments offered and bank mappings
* ``AssessmentOfferedBankAssignmentSession:`` a session for
managing assessments offered and bank mappings
* ``AssessmentOfferedSmartBankSession`` : a session to manage
dynamic banks of assessments offered
* ``AssessmentTakenLookupSession:`` a session to look up
``Assessments``
* ``AssessmentTakenQuerySession:`` a session to query
``Assessments``
* ``AssessmentTakenSearchSession:`` a session to search
Assessments
* ``AssessmentTakenAdminSession:`` a session to create, modify and
delete ``AssessmentsTaken``
* ``AssessmentTakenNotificationSession: a`` session to receive
messages pertaining to ``AssessmentTaken`` changes
* ``AssessmentTakenBankSession:`` a session for looking up
assessments taken and bank mappings
* ``AssessmenttTakenBankAssignmentSession:`` a session for
managing assessments taken and bank mappings
* ``AssessmentTakenSmartBankSession:`` a session to manage dynamic
banks of assessments taken
* ``BankLookupSession:`` a session to lookup banks
* ``BankQuerySession`` : a session to query banks
* ``BankSearchSession:`` a session to search banks
* ``BankAdminSession`` : a session to create, modify and delete
banks
* ``BankNotificationSession`` : a session to receive messages
pertaining to ``Bank`` changes
* ``BankHierarchySession`` : a session to traverse the ``Bank``
hierarchy
* ``BankHierarchyDesignSession`` : a session to manage the
``Bank`` hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_my_assessment_taken_session(self):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent.
:return: a ``MyAssessmentTakenSession``
:rtype: ``osid.assessment.MyAssessmentTakenSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_assessment_taken()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
return # osid.assessment.MyAssessmentTakenSession
my_assessment_taken_session = property(fget=get_my_assessment_taken_session)
@abc.abstractmethod
def get_my_assessment_taken_session_for_bank(self, bank_id):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent for the given bank ``Id``.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:return: a ``MyAssessmentTakenSession``
:rtype: ``osid.assessment.MyAssessmentTakenSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_assessment_taken()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
return # osid.assessment.MyAssessmentTakenSession
@abc.abstractmethod
def get_assessment_session(self):
"""Gets an ``AssessmentSession`` which is responsible for taking assessments and examining responses from assessments taken.
:return: an assessment session for this service
:rtype: ``osid.assessment.AssessmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
return # osid.assessment.AssessmentSession
assessment_session = property(fget=get_assessment_session)
@abc.abstractmethod
def get_assessment_session_for_bank(self, bank_id):
"""Gets an ``AssessmentSession`` which is responsible for performing assessments for the given bank ``Id``.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:return: an assessment session for this service
:rtype: ``osid.assessment.AssessmentSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
return # osid.assessment.AssessmentSession
@abc.abstractmethod
def get_assessment_results_session(self):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results.
:return: an assessment results session for this service
:rtype: ``osid.assessment.AssessmentResultsSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_results()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
return # osid.assessment.AssessmentResultsSession
assessment_results_session = property(fget=get_assessment_results_session)
@abc.abstractmethod
def get_assessment_results_session_for_bank(self, bank_id):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results for the given bank.
:param bank_id: the ``Id`` of the assessment taken
:type bank_id: ``osid.id.Id``
:return: an assessment results session for this service
:rtype: ``osid.assessment.AssessmentResultsSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_results()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
return # osid.assessment.AssessmentResultsSession
@abc.abstractmethod
def get_item_lookup_session(self):
"""Gets the ``OsidSession`` associated with the item lookup service.
:return: an ``ItemLookupSession``
:rtype: ``osid.assessment.ItemLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` is ``true``.*
"""
return # osid.assessment.ItemLookupSession
item_lookup_session = property(fget=get_item_lookup_session)
@abc.abstractmethod
def get_item_lookup_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the item lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _item_lookup_session``
:rtype: ``osid.assessment.ItemLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemLookupSession
@abc.abstractmethod
def get_item_query_session(self):
"""Gets the ``OsidSession`` associated with the item query service.
:return: an ``ItemQuerySession``
:rtype: ``osid.assessment.ItemQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` is ``true``.*
"""
return # osid.assessment.ItemQuerySession
item_query_session = property(fget=get_item_query_session)
@abc.abstractmethod
def get_item_query_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the item query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _item_query_session``
:rtype: ``osid.assessment.ItemQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemQuerySession
@abc.abstractmethod
def get_item_search_session(self):
"""Gets the ``OsidSession`` associated with the item search service.
:return: an ``ItemSearchSession``
:rtype: ``osid.assessment.ItemSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` is ``true``.*
"""
return # osid.assessment.ItemSearchSession
item_search_session = property(fget=get_item_search_session)
@abc.abstractmethod
def get_item_search_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the item search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _item_search_session``
:rtype: ``osid.assessment.ItemSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemSearchSession
@abc.abstractmethod
def get_item_admin_session(self):
"""Gets the ``OsidSession`` associated with the item administration service.
:return: an ``ItemAdminSession``
:rtype: ``osid.assessment.ItemAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` is ``true``.*
"""
return # osid.assessment.ItemAdminSession
item_admin_session = property(fget=get_item_admin_session)
@abc.abstractmethod
def get_item_admin_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the item admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _item_admin_session``
:rtype: ``osid.assessment.ItemAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemAdminSession
@abc.abstractmethod
def get_item_notification_session(self, item_receiver):
"""Gets the notification session for notifications pertaining to item changes.
:param item_receiver: the item receiver interface
:type item_receiver: ``osid.assessment.ItemReceiver``
:return: an ``ItemNotificationSession``
:rtype: ``osid.assessment.ItemNotificationSession``
:raise: ``NullArgument`` -- ``item_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` is ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_item_notification_session_for_bank(self, item_receiver, bank_id):
"""Gets the ``OsidSession`` associated with the item notification service for the given bank.
:param item_receiver: the item receiver interface
:type item_receiver: ``osid.assessment.ItemReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _item_notification_session``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``item_receiver`` or ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_item_bank_session(self):
"""Gets the ``OsidSession`` associated with the item banking service.
:return: an ``ItemBankSession``
:rtype: ``osid.assessment.ItemBankSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank()`` is ``true``.*
"""
return # osid.assessment.ItemBankSession
item_bank_session = property(fget=get_item_bank_session)
@abc.abstractmethod
def get_item_bank_assignment_session(self):
"""Gets the ``OsidSession`` associated with the item bank assignment service.
:return: an ``ItemBankAssignmentSession``
:rtype: ``osid.assessment.ItemBankAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.ItemBankAssignmentSession
item_bank_assignment_session = property(fget=get_item_bank_assignment_session)
@abc.abstractmethod
def get_item_smart_bank_session(self, bank_id):
"""Gets the ``OsidSession`` associated with the item smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``ItemSmartBankSession``
:rtype: ``osid.assessment.ItemSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.ItemSmartBankSession
@abc.abstractmethod
def get_assessment_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment lookup service.
:return: an ``AssessmentLookupSession``
:rtype: ``osid.assessment.AssessmentLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentLookupSession
assessment_lookup_session = property(fget=get_assessment_lookup_session)
@abc.abstractmethod
def get_assessment_lookup_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _assessment_lookup_session``
:rtype: ``osid.assessment.AssessmentLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentLookupSession
@abc.abstractmethod
def get_assessment_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment query service.
:return: an ``AssessmentQuerySession``
:rtype: ``osid.assessment.AssessmentQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentQuerySession
assessment_query_session = property(fget=get_assessment_query_session)
@abc.abstractmethod
def get_assessment_query_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _assessment_query_session``
:rtype: ``osid.assessment.AssessmentQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentQuerySession
@abc.abstractmethod
def get_assessment_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment search service.
:return: an ``AssessmentSearchSession``
:rtype: ``osid.assessment.AssessmentSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentSearchSession
assessment_search_session = property(fget=get_assessment_search_session)
@abc.abstractmethod
def get_assessment_search_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _assessment_search_session``
:rtype: ``osid.assessment.AssessmentSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentSearchSession
@abc.abstractmethod
def get_assessment_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment administration service.
:return: an ``AssessmentAdminSession``
:rtype: ``osid.assessment.AssessmentAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentAdminSession
assessment_admin_session = property(fget=get_assessment_admin_session)
@abc.abstractmethod
def get_assessment_admin_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _assessment_admin_session``
:rtype: ``osid.assessment.AssessmentAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentAdminSession
@abc.abstractmethod
def get_assessment_notification_session(self, assessment_receiver):
"""Gets the notification session for notifications pertaining to assessment changes.
:param assessment_receiver: the assessment receiver interface
:type assessment_receiver: ``osid.assessment.AssessmentReceiver``
:return: an ``AssessmentNotificationSession``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NullArgument`` -- ``assessment_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` is ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_assessment_notification_session_for_bank(self, assessment_receiver, bank_id):
"""Gets the ``OsidSession`` associated with the assessment notification service for the given bank.
:param assessment_receiver: the assessment receiver interface
:type assessment_receiver: ``osid.assessment.AssessmentReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: ``an _assessment_notification_session``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``assessment_receiver`` or ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_assessment_bank_session(self):
"""Gets the ``OsidSession`` associated with the assessment banking service.
:return: an ``AssessmentBankSession``
:rtype: ``osid.assessment.AssessmentBankSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentBankSession
assessment_bank_session = property(fget=get_assessment_bank_session)
@abc.abstractmethod
def get_assessment_bank_assignment_session(self):
"""Gets the ``OsidSession`` associated with the assessment bank assignment service.
:return: an ``AssessmentBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentBankAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentBankAssignmentSession
assessment_bank_assignment_session = property(fget=get_assessment_bank_assignment_session)
@abc.abstractmethod
def get_assessment_smart_bank_session(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentSmartBankSession``
:rtype: ``osid.assessment.AssessmentSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentSmartBankSession
@abc.abstractmethod
def get_assessment_basic_authoring_session(self):
"""Gets the ``OsidSession`` associated with the assessment authoring service.
:return: an ``AssessmentBasicAuthoringSession``
:rtype: ``osid.assessment.AssessmentBasicAuthoringSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_basic_authoring()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` is ``true``.*
"""
return # osid.assessment.AssessmentBasicAuthoringSession
assessment_basic_authoring_session = property(fget=get_assessment_basic_authoring_session)
@abc.abstractmethod
def get_assessment_basic_authoring_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment authoring service for the given bank.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentBasicAuthoringSession``
:rtype: ``osid.assessment.AssessmentBasicAuthoringSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_basic_authoring()`` or ``supports_visibe_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentBasicAuthoringSession
@abc.abstractmethod
def get_assessment_offered_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service.
:return: an ``AssessmentOfferedLookupSession``
:rtype: ``osid.assessment.AssessmentOfferedLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedLookupSession
assessment_offered_lookup_session = property(fget=get_assessment_offered_lookup_session)
@abc.abstractmethod
def get_assessment_offered_lookup_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentOfferedLookupSession``
:rtype: ``osid.assessment.AssessmentOfferedLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedLookupSession
@abc.abstractmethod
def get_assessment_offered_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered query service.
:return: an ``AssessmentOfferedQuerySession``
:rtype: ``osid.assessment.AssessmentOfferedQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedQuerySession
assessment_offered_query_session = property(fget=get_assessment_offered_query_session)
@abc.abstractmethod
def get_assessment_offered_query_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment offered query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentOfferedQuerySession``
:rtype: ``osid.assessment.AssessmentOfferedQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedQuerySession
@abc.abstractmethod
def get_assessment_offered_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered search service.
:return: an ``AssessmentOfferedSearchSession``
:rtype: ``osid.assessment.AssessmentOfferedSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedSearchSession
assessment_offered_search_session = property(fget=get_assessment_offered_search_session)
@abc.abstractmethod
def get_assessment_offered_search_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment offered search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentOfferedSearchSession``
:rtype: ``osid.assessment.AssessmentOfferedSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedSearchSession
@abc.abstractmethod
def get_assessment_offered_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment offered administration service.
:return: an ``AssessmentOfferedAdminSession``
:rtype: ``osid.assessment.AssessmentOfferedAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedAdminSession
assessment_offered_admin_session = property(fget=get_assessment_offered_admin_session)
@abc.abstractmethod
def get_assessment_offered_admin_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment offered admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentOfferedAdminSession``
:rtype: ``osid.assessment.AssessmentOfferedAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedAdminSession
@abc.abstractmethod
def get_assessment_offered_notification_session(self, assessment_offered_receiver):
"""Gets the notification session for notifications pertaining to offered assessment changes.
:param assessment_offered_receiver: the assessment offered receiver interface
:type assessment_offered_receiver: ``osid.assessment.AssessmentOfferedReceiver``
:return: an ``AssessmentOfferedNotificationSession``
:rtype: ``osid.assessment.AssessmentOfferedNotificationSession``
:raise: ``NullArgument`` -- ``assessment_offered_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedNotificationSession
@abc.abstractmethod
def get_assessment_offered_notification_session_for_bank(self, assessment_offered_receiver, bank_id):
"""Gets the ``OsidSession`` associated with the offered assessment notification service for the given bank.
:param assessment_offered_receiver: the assessment offered receiver interface
:type assessment_offered_receiver: ``osid.assessment.AssessmentOfferedReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: a ``AssessmentOfferedNotificationSession``
:rtype: ``osid.assessment.AssessmentOfferedNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``assessment_offered_receiver`` or ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedNotificationSession
@abc.abstractmethod
def get_assessment_offered_bank_session(self):
"""Gets the session for retrieving offered assessments to bank mappings.
:return: an ``AssessmentOfferedBankSession``
:rtype: ``osid.assessment.AssessmentOfferedBankSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedBankSession
assessment_offered_bank_session = property(fget=get_assessment_offered_bank_session)
@abc.abstractmethod
def get_assessment_offered_bank_assignment_session(self):
"""Gets the session for assigning offered assessments to bank mappings.
:return: an ``AssessmentOfferedBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentOfferedBankAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedBankAssignmentSession
assessment_offered_bank_assignment_session = property(fget=get_assessment_offered_bank_assignment_session)
@abc.abstractmethod
def get_assessment_offered_smart_bank_session(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment offered smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentOfferedSmartBankSession``
:rtype: ``osid.assessment.AssessmentOfferedSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedSmartBankSession
@abc.abstractmethod
def get_assessment_taken_lookup_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service.
:return: an ``AssessmentTakenLookupSession``
:rtype: ``osid.assessment.AssessmentTakenLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenLookupSession
assessment_taken_lookup_session = property(fget=get_assessment_taken_lookup_session)
@abc.abstractmethod
def get_assessment_taken_lookup_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenLookupSession``
:rtype: ``osid.assessment.AssessmentTakenLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenLookupSession
@abc.abstractmethod
def get_assessment_taken_query_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken query service.
:return: an ``AssessmentTakenQuerySession``
:rtype: ``osid.assessment.AssessmentTakenQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenQuerySession
assessment_taken_query_session = property(fget=get_assessment_taken_query_session)
@abc.abstractmethod
def get_assessment_taken_query_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment taken query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenQuerySession``
:rtype: ``osid.assessment.AssessmentTakenQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenQuerySession
@abc.abstractmethod
def get_assessment_taken_search_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken search service.
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenSearchSession
assessment_taken_search_session = property(fget=get_assessment_taken_search_session)
@abc.abstractmethod
def get_assessment_taken_search_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment taken search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenSearchSession
@abc.abstractmethod
def get_assessment_taken_admin_session(self):
"""Gets the ``OsidSession`` associated with the assessment taken administration service.
:return: an ``AssessmentTakenAdminSession``
:rtype: ``osid.assessment.AssessmentTakenAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenAdminSession
assessment_taken_admin_session = property(fget=get_assessment_taken_admin_session)
@abc.abstractmethod
def get_assessment_taken_admin_session_for_bank(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment taken admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenAdminSession
@abc.abstractmethod
def get_assessment_taken_notification_session(self, assessment_taken_receiver):
"""Gets the notification session for notifications pertaining to taken assessment changes.
:param assessment_taken_receiver: the assessment taken receiver interface
:type assessment_taken_receiver: ``osid.assessment.AssessmentTakenReceiver``
:return: an ``AssessmentTakenNotificationSession``
:rtype: ``osid.assessment.AssessmentTakenNotificationSession``
:raise: ``NullArgument`` -- ``assessment_taken_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenNotificationSession
@abc.abstractmethod
def get_assessment_taken_notification_session_for_bank(self, assessment_taken_receiver, bank_id):
"""Gets the ``OsidSession`` associated with the taken assessment notification service for the given bank.
:param assessment_taken_receiver: the assessment taken receiver interface
:type assessment_taken_receiver: ``osid.assessment.AssessmentTakenReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenNotificationSession``
:rtype: ``osid.assessment.AssessmentTakenNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``assessment_taken_receiver`` or ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenNotificationSession
@abc.abstractmethod
def get_assessment_taken_bank_session(self):
"""Gets the session for retrieving taken assessments to bank mappings.
:return: an ``AssessmentTakenBankSession``
:rtype: ``osid.assessment.AssessmentTakenBankSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenBankSession
assessment_taken_bank_session = property(fget=get_assessment_taken_bank_session)
@abc.abstractmethod
def get_assessment_taken_bank_assignment_session(self):
"""Gets the session for assigning taken assessments to bank mappings.
:return: an ``AssessmentTakenBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentTakenBankAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenBankAssignmentSession
assessment_taken_bank_assignment_session = property(fget=get_assessment_taken_bank_assignment_session)
@abc.abstractmethod
def get_assessment_taken_smart_bank_session(self, bank_id):
"""Gets the ``OsidSession`` associated with the assessment taken smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:return: an ``AssessmentTakenSmartBankSession``
:rtype: ``osid.assessment.AssessmentTakenSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenSmartBankSession
@abc.abstractmethod
def get_bank_lookup_session(self):
"""Gets the OsidSession associated with the bank lookup service.
:return: a ``BankLookupSession``
:rtype: ``osid.assessment.BankLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_lookup() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_lookup()`` is true.*
"""
return # osid.assessment.BankLookupSession
bank_lookup_session = property(fget=get_bank_lookup_session)
@abc.abstractmethod
def get_bank_query_session(self):
"""Gets the OsidSession associated with the bank query service.
:return: a ``BankQuerySession``
:rtype: ``osid.assessment.BankQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_query() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is true.*
"""
return # osid.assessment.BankQuerySession
bank_query_session = property(fget=get_bank_query_session)
@abc.abstractmethod
def get_bank_search_session(self):
"""Gets the OsidSession associated with the bank search service.
:return: a ``BankSearchSession``
:rtype: ``osid.assessment.BankSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_search() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_search()`` is true.*
"""
return # osid.assessment.BankSearchSession
bank_search_session = property(fget=get_bank_search_session)
@abc.abstractmethod
def get_bank_admin_session(self):
"""Gets the OsidSession associated with the bank administration service.
:return: a ``BankAdminSession``
:rtype: ``osid.assessment.BankAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_admin()`` is true.*
"""
return # osid.assessment.BankAdminSession
bank_admin_session = property(fget=get_bank_admin_session)
@abc.abstractmethod
def get_bank_notification_session(self, bankreceiver):
"""Gets the notification session for notifications pertaining to bank service changes.
:param bankreceiver: the bank receiver interface
:type bankreceiver: ``osid.assessment.BankReceiver``
:return: a ``BankNotificationSession``
:rtype: ``osid.assessment.BankNotificationSession``
:raise: ``NullArgument`` -- ``bank_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_notification() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_notification()`` is true.*
"""
return # osid.assessment.BankNotificationSession
@abc.abstractmethod
def get_bank_hierarchy_session(self):
"""Gets the session traversing bank hierarchies.
:return: a ``BankHierarchySession``
:rtype: ``osid.assessment.BankHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_hierarchy() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy()`` is true.*
"""
return # osid.assessment.BankHierarchySession
bank_hierarchy_session = property(fget=get_bank_hierarchy_session)
@abc.abstractmethod
def get_bank_hierarchy_design_session(self):
"""Gets the session designing bank hierarchies.
:return: a ``BankHierarchySession``
:rtype: ``osid.assessment.BankHierarchyDesignSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_hierarchy_design() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
return # osid.assessment.BankHierarchyDesignSession
bank_hierarchy_design_session = property(fget=get_bank_hierarchy_design_session)
@abc.abstractmethod
def get_assessment_authoring_manager(self):
"""Gets an ``AssessmentAuthoringManager``.
:return: an ``AssessmentAuthoringManager``
:rtype: ``osid.assessment.authoring.AssessmentAuthoringManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_authoring() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_authoring()`` is true.*
"""
return # osid.assessment.authoring.AssessmentAuthoringManager
assessment_authoring_manager = property(fget=get_assessment_authoring_manager)
@abc.abstractmethod
def get_assessment_batch_manager(self):
"""Gets an ``AssessmentBatchManager``.
:return: an ``AssessmentBatchManager``
:rtype: ``osid.assessment.batch.AssessmentBatchManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_batch()`` is true.*
"""
return # osid.assessment.batch.AssessmentBatchManager
assessment_batch_manager = property(fget=get_assessment_batch_manager)
class AssessmentProxyManager:
"""The assessment manager provides access to assessment sessions and provides interoperability tests for various aspects of this service.
Methods in this manager support the passing of a ``Proxy`` object.
The sessions included in this manager are:
* ``MyAssessmentTakenSession:`` a session to get taken or in
progress assessments for the current agent
* ``AssessmentSession:`` a session to be assessed and examine
assessments taken
* ``AssessmentResultsSession:`` a session to retrieve assessment
results
* ``ItemLookupSession:`` a session to look up ``Items``
* ``ItemQuerySession`` : a session to query ``Items``
* ``ItemSearchSession:`` a session to search ``Items``
* ``ItemAdminSession:`` a session to create, modify and delete
``Items``
* ``ItemNotificationSession: a`` session to receive messages
pertaining to ``Item`` changes
* ``ItemBankSession:`` a session for looking up item and bank
mappings
* ``ItemBankAssignmentSession:`` a session for managing item and
bank mappings
* ``ItemSmartBankSession:`` a session for managing dynamic banks
* ``AssessmentLookupSession:`` a session to look up
``Assessments``
* ``AssessmentQuerySession:`` a session to query ``Assessments``
* ``AssessmentSearchSession:`` a session to search ``Assessments``
* ``AssessmentAdminSession:`` a session to create, modify and
delete ``Assessments``
* ``AssessmentNotificationSession: a`` session to receive messages
pertaining to ``Assessment`` changes
* ``AssessmentBankSession:`` a session for looking up assessment
and bank mappings
* ``AssessmentBankAssignmentSession:`` a session for managing
assessment and bank mappings
* ``AssessmentSmartBankSession:`` a session for managing dynamic
banks
* ``AssessmentBasicAuthoringSession:`` a session for making simple
mappings of assessment items to assessments
* ``AssessmentOfferedLookupSession:`` a session to look up
``Assessments``
* ``AssessmentOfferedQuerySession:`` a session to query
``Assessments``
* ``AssessmentOfferedSearchSession`` : a session to search
``Assessments``
* ``AssessmentOfferedAdminSession:`` a session to create, modify
and delete ``Assessments``
* ``AssessmentOfferedNotificationSession: a`` session to receive
messages pertaining to ``Assessment`` changes
* ``AssessmentOfferedBankSession:`` a session for looking up
assessment and bank mappings
* ``AssessmentOfferedBankAssignmentSession:`` a session for
managing assessment and bank mappings
* ``AssessmentOfferedSmartBankSession`` : a session to manage
dynamic banks
* ``AssessmentTakenLookupSession:`` a session to look up
``Assessments``
* ``AssessmentTakenQuerySession:`` a session to query
``Assessments``
* ``AssessmentTakenSearchSession:`` a session to search
Assessments
* ``AssessmentTakenAdminSession:`` a session to create, modify and
delete ``AssessmentsTaken``
* ``AssessmentTakenNotificationSession: a`` session to receive
messages pertaining to ``AssessmentTaken`` changes
* ``AssessmentTakenBankSession:`` a session for looking up
assessments taken and bank mappings
* ``AssessmenttTakenBankAssignmentSession:`` a session for
managing assessments taken and bank mappings
* ``AssessmentTakenSmartBankSession:`` a session to manage dynamic
banks of assessments taken
* ``BankLookupSession:`` a session to lookup banks
* ``BankQuerySession`` : a session to query banks
* ``BankSearchSession:`` a session to search banks
* ``BankAdminSession`` : a session to create, modify and delete
banks
* ``BankNotificationSession`` : a session to receive messages
pertaining to ``Bank`` changes
* ``BankHierarchySession`` : a session to traverse the ``Bank``
hierarchy
* ``BankHierarchyDesignSession`` : a session to manage the
``Bank`` hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_my_assessment_taken_session(self, proxy):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MyAssessmentTakenSession``
:rtype: ``osid.assessment.MyAssessmentTakenSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_assessment_taken()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
return # osid.assessment.MyAssessmentTakenSession
@abc.abstractmethod
def get_my_assessment_taken_session_for_bank(self, bank_id, proxy):
"""Gets a ``MyAssessmentTakenSession`` to retrieve assessments taken for the current agent for the given bank ``Id``.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MyAssessmentTakenSession``
:rtype: ``osid.assessment.MyAssessmentTakenSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_assessment_taken()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_assessment_taken()`` is ``true``.*
"""
return # osid.assessment.MyAssessmentTakenSession
@abc.abstractmethod
def get_assessment_session(self, proxy):
"""Gets an ``AssessmentSession`` which is responsible for taking assessments and examining responses from assessments taken.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an assessment session for this service
:rtype: ``osid.assessment.AssessmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
return # osid.assessment.AssessmentSession
@abc.abstractmethod
def get_assessment_session_for_bank(self, bank_id, proxy):
"""Gets an ``AssessmentSession`` which is responsible for performing assessments for the given bank ``Id``.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an assessment session for this service
:rtype: ``osid.assessment.AssessmentSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment()`` is ``true``.*
"""
return # osid.assessment.AssessmentSession
@abc.abstractmethod
def get_assessment_results_session(self, proxy):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an assessment results session for this service
:rtype: ``osid.assessment.AssessmentResultsSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_results()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
return # osid.assessment.AssessmentResultsSession
@abc.abstractmethod
def get_assessment_results_session_for_bank(self, bank_id, proxy):
"""Gets an ``AssessmentResultsSession`` to retrieve assessment results for the given bank.
:param bank_id: the ``Id`` of the assessment taken
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an assessment results session for this service
:rtype: ``osid.assessment.AssessmentResultsSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_results()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_results()`` is ``true``.*
"""
return # osid.assessment.AssessmentResultsSession
@abc.abstractmethod
def get_item_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemLookupSession``
:rtype: ``osid.assessment.ItemLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` is ``true``.*
"""
return # osid.assessment.ItemLookupSession
@abc.abstractmethod
def get_item_lookup_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _item_lookup_session``
:rtype: ``osid.assessment.ItemLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_lookup()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemLookupSession
@abc.abstractmethod
def get_item_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemQuerySession``
:rtype: ``osid.assessment.ItemQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` is ``true``.*
"""
return # osid.assessment.ItemQuerySession
@abc.abstractmethod
def get_item_query_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _item_query_session``
:rtype: ``osid.assessment.ItemQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemQuerySession
@abc.abstractmethod
def get_item_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemSearchSession``
:rtype: ``osid.assessment.ItemSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` is ``true``.*
"""
return # osid.assessment.ItemSearchSession
@abc.abstractmethod
def get_item_search_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _item_search_session``
:rtype: ``osid.assessment.ItemSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``porxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_search()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemSearchSession
@abc.abstractmethod
def get_item_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemAdminSession``
:rtype: ``osid.assessment.ItemAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` is ``true``.*
"""
return # osid.assessment.ItemAdminSession
@abc.abstractmethod
def get_item_admin_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _item_admin_session``
:rtype: ``osid.assessment.ItemAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
return # osid.assessment.ItemAdminSession
@abc.abstractmethod
def get_item_notification_session(self, item_receiver, proxy):
"""Gets the notification session for notifications pertaining to item changes.
:param item_receiver: the item receiver interface
:type item_receiver: ``osid.assessment.ItemReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemNotificationSession``
:rtype: ``osid.assessment.ItemNotificationSession``
:raise: ``NullArgument`` -- ``item_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` is ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_item_notification_session_for_bank(self, item_receiver, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item notification service for the given bank.
:param item_receiver: the item receiver interface
:type item_receiver: ``osid.assessment.ItemReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _item_notification_session``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``item_receiver, bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_item_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_item_bank_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item banking service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemBankSession``
:rtype: ``osid.assessment.ItemBankSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank()`` is ``true``.*
"""
return # osid.assessment.ItemBankSession
@abc.abstractmethod
def get_item_bank_assignment_session(self, proxy):
"""Gets the ``OsidSession`` associated with the item bank assignment service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemBankAssignmentSession``
:rtype: ``osid.assessment.ItemBankAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.ItemBankAssignmentSession
@abc.abstractmethod
def get_item_smart_bank_session(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the item smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``ItemSmartBankSession``
:rtype: ``osid.assessment.ItemSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_item_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_item_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.ItemSmartBankSession
@abc.abstractmethod
def get_assessment_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentLookupSession``
:rtype: ``osid.assessment.AssessmentLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentLookupSession
@abc.abstractmethod
def get_assessment_lookup_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _assessment_lookup_session``
:rtype: ``osid.assessment.AssessmentLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentLookupSession
@abc.abstractmethod
def get_assessment_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentQuerySession``
:rtype: ``osid.assessment.AssessmentQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentQuerySession
@abc.abstractmethod
def get_assessment_query_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _assessment_query_session``
:rtype: ``osid.assessment.AssessmentQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentQuerySession
@abc.abstractmethod
def get_assessment_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentSearchSession``
:rtype: ``osid.assessment.AssessmentSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentSearchSession
@abc.abstractmethod
def get_assessment_search_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _assessment_search_session``
:rtype: ``osid.assessment.AssessmentSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentSearchSession
@abc.abstractmethod
def get_assessment_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentAdminSession``
:rtype: ``osid.assessment.AssessmentAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentAdminSession
@abc.abstractmethod
def get_assessment_admin_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _assessment_admin_session``
:rtype: ``osid.assessment.AssessmentAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentAdminSession
@abc.abstractmethod
def get_assessment_notification_session(self, assessment_receiver, proxy):
"""Gets the notification session for notifications pertaining to assessment changes.
:param assessment_receiver: the assessment receiver interface
:type assessment_receiver: ``osid.assessment.AssessmentReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentNotificationSession``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NullArgument`` -- ``assessment_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` is ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_assessment_notification_session_for_bank(self, assessment_receiver, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment notification service for the given bank.
:param assessment_receiver: the assessment receiver interface
:type assessment_receiver: ``osid.assessment.AssessmentReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: ``an _assessment_notification_session``
:rtype: ``osid.assessment.AssessmentNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``assessment_receiver, bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.ItemNotificationSession
@abc.abstractmethod
def get_assessment_bank_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment banking service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentBankSession``
:rtype: ``osid.assessment.AssessmentBankSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentBankSession
@abc.abstractmethod
def get_assessment_bank_assignment_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment bank assignment service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentBankAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentBankAssignmentSession
@abc.abstractmethod
def get_assessment_smart_bank_session(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentSmartBankSession``
:rtype: ``osid.assessment.AssessmentSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentSmartBankSession
@abc.abstractmethod
def get_assessment_basic_authoring_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment authoring service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentBasicAuthoringSession``
:rtype: ``osid.assessment.AssessmentBasicAuthoringSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_basic_authoring()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` is ``true``.*
"""
return # osid.assessment.AssessmentBasicAuthoringSession
@abc.abstractmethod
def get_assessment_basic_authoring_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment authoring service for the given bank.
:param bank_id: the ``Id`` of a bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentBasicAuthoringSession``
:rtype: ``osid.assessment.AssessmentBasicAuthoringSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_basic_authoring()`` or ``supports_visibe_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_basic_authoring()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentBasicAuthoringSession
@abc.abstractmethod
def get_assessment_offered_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedLookupSession``
:rtype: ``osid.assessment.AssessmentOfferedLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedLookupSession
@abc.abstractmethod
def get_assessment_offered_lookup_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedLookupSession``
:rtype: ``osid.assessment.AssessmentOfferedLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedLookupSession
@abc.abstractmethod
def get_assessment_offered_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedQuerySession``
:rtype: ``osid.assessment.AssessmentOfferedQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedQuerySession
@abc.abstractmethod
def get_assessment_offered_query_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedQuerySession``
:rtype: ``osid.assessment.AssessmentOfferedQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedQuerySession
@abc.abstractmethod
def get_assessment_offered_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedSearchSession``
:rtype: ``osid.assessment.AssessmentOfferedSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedSearchSession
@abc.abstractmethod
def get_assessment_offered_search_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedSearchSession``
:rtype: ``osid.assessment.AssessmentOfferedSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or proxy is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedSearchSession
@abc.abstractmethod
def get_assessment_offered_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedAdminSession``
:rtype: ``osid.assessment.AssessmentOfferedAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedAdminSession
@abc.abstractmethod
def get_assessment_offered_admin_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedAdminSession``
:rtype: ``osid.assessment.AssessmentOfferedAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedAdminSession
@abc.abstractmethod
def get_assessment_offered_notification_session(self, assessment_offered_receiver, proxy):
"""Gets the notification session for notifications pertaining to offered assessment changes.
:param assessment_offered_receiver: the assessment offered receiver interface
:type assessment_offered_receiver: ``osid.assessment.AssessmentOfferedReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedNotificationSession``
:rtype: ``osid.assessment.AssessmentOfferedNotificationSession``
:raise: ``NullArgument`` -- ``assessment_offered_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedNotificationSession
@abc.abstractmethod
def get_assessment_offered_notification_session_for_bank(self, assessment_offered_receiver, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the offered assessment notification service for the given bank.
:param assessment_offered_receiver: the assessment offered receiver interface
:type assessment_offered_receiver: ``osid.assessment.AssessmentOfferedReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``AssessmentOfferedNotificationSession``
:rtype: ``osid.assessment.AssessmentOfferedNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` or ``proxy`` not found
:raise: ``NullArgument`` -- ``assessment_offered_receiver, bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_offered_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentOfferedNotificationSession
@abc.abstractmethod
def get_assessment_offered_bank_session(self, proxy):
"""Gets the session for retrieving offered assessments to bank mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedBankSession``
:rtype: ``osid.assessment.AssessmentOfferedBankSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedBankSession
@abc.abstractmethod
def get_assessment_offered_bank_assignment_session(self, proxy):
"""Gets the session for assigning offered assessments to bank mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentOfferedBankAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedBankAssignmentSession
@abc.abstractmethod
def get_assessment_offered_smart_bank_session(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment offered smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentOfferedSmartBankSession``
:rtype: ``osid.assessment.AssessmentOfferedSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_offered_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_offered_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentOfferedSmartBankSession
@abc.abstractmethod
def get_assessment_taken_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenLookupSession``
:rtype: ``osid.assessment.AssessmentTakenLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenLookupSession
@abc.abstractmethod
def get_assessment_taken_lookup_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken lookup service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenLookupSession``
:rtype: ``osid.assessment.AssessmentTakenLookupSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenLookupSession
@abc.abstractmethod
def get_assessment_taken_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenQuerySession``
:rtype: ``osid.assessment.AssessmentTakenQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenQuerySession
@abc.abstractmethod
def get_assessment_taken_query_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken query service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenQuerySession``
:rtype: ``osid.assessment.AssessmentTakenQuerySession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenQuerySession
@abc.abstractmethod
def get_assessment_taken_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenSearchSession
@abc.abstractmethod
def get_assessment_taken_search_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken search service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenSearchSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenSearchSession
@abc.abstractmethod
def get_assessment_taken_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenAdminSession``
:rtype: ``osid.assessment.AssessmentTakenAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenAdminSession
@abc.abstractmethod
def get_assessment_taken_admin_session_for_bank(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken admin service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenSearchSession``
:rtype: ``osid.assessment.AssessmentTakenAdminSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenAdminSession
@abc.abstractmethod
def get_assessment_taken_notification_session(self, assessment_taken_receiver, proxy):
"""Gets the notification session for notifications pertaining to taken assessment changes.
:param assessment_taken_receiver: the assessment taken receiver interface
:type assessment_taken_receiver: ``osid.assessment.AssessmentTakenReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenNotificationSession``
:rtype: ``osid.assessment.AssessmentTakenNotificationSession``
:raise: ``NullArgument`` -- ``assessment_taken_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenNotificationSession
@abc.abstractmethod
def get_assessment_taken_notification_session_for_bank(self, assessment_taken_receiver, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the taken assessment notification service for the given bank.
:param assessment_taken_receiver: the assessment taken receiver interface
:type assessment_taken_receiver: ``osid.assessment.AssessmentTakenReceiver``
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenNotificationSession``
:rtype: ``osid.assessment.AssessmentTakenNotificationSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``assessment_taken_receiver, bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- ``unable to complete request``
:raise: ``Unimplemented`` -- ``supports_assessment_taken_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_notification()`` and
``supports_visible_federation()`` are ``true``.*
"""
return # osid.assessment.AssessmentTakenNotificationSession
@abc.abstractmethod
def get_assessment_taken_bank_session(self, proxy):
"""Gets the session for retrieving taken assessments to bank mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenBankSession``
:rtype: ``osid.assessment.AssessmentTakenBankSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenBankSession
@abc.abstractmethod
def get_assessment_taken_bank_assignment_session(self, proxy):
"""Gets the session for assigning taken assessments to bank mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenBankAssignmentSession``
:rtype: ``osid.assessment.AssessmentTakenBankAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_bank_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_bank_assignment()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenBankAssignmentSession
@abc.abstractmethod
def get_assessment_taken_smart_bank_session(self, bank_id, proxy):
"""Gets the ``OsidSession`` associated with the assessment taken smart banking service for the given bank.
:param bank_id: the ``Id`` of the bank
:type bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``AssessmentTakenSmartBankSession``
:rtype: ``osid.assessment.AssessmentTakenSmartBankSession``
:raise: ``NotFound`` -- ``bank_id`` not found
:raise: ``NullArgument`` -- ``bank_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_taken_smart_bank()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_taken_smart_bank()`` and
``supports_visibe_federation()`` is ``true``.*
"""
return # osid.assessment.AssessmentTakenSmartBankSession
@abc.abstractmethod
def get_bank_lookup_session(self, proxy):
"""Gets the OsidSession associated with the bank lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankLookupSession``
:rtype: ``osid.assessment.BankLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_lookup() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_lookup()`` is true.*
"""
return # osid.assessment.BankLookupSession
@abc.abstractmethod
def get_bank_query_session(self, proxy):
"""Gets the OsidSession associated with the bank query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankQuerySession``
:rtype: ``osid.assessment.BankQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_query() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is true.*
"""
return # osid.assessment.BankQuerySession
@abc.abstractmethod
def get_bank_search_session(self, proxy):
"""Gets the OsidSession associated with the bank search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankSearchSession``
:rtype: ``osid.assessment.BankSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_search() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_search()`` is true.*
"""
return # osid.assessment.BankSearchSession
@abc.abstractmethod
def get_bank_admin_session(self, proxy):
"""Gets the OsidSession associated with the bank administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankAdminSession``
:rtype: ``osid.assessment.BankAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_admin() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_admin()`` is true.*
"""
return # osid.assessment.BankAdminSession
@abc.abstractmethod
def get_bank_notification_session(self, bank_receiver, proxy):
"""Gets the notification session for notifications pertaining to bank service changes.
:param bank_receiver: the bank receiver interface
:type bank_receiver: ``osid.assessment.BankReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankNotificationSession``
:rtype: ``osid.assessment.BankNotificationSession``
:raise: ``NullArgument`` -- ``bank_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_notification() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_notification()`` is true.*
"""
return # osid.assessment.BankNotificationSession
@abc.abstractmethod
def get_bank_hierarchy_session(self, proxy):
"""Gets the session traversing bank hierarchies.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankHierarchySession``
:rtype: ``osid.assessment.BankHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_hierarchy() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy()`` is true.*
"""
return # osid.assessment.BankHierarchySession
@abc.abstractmethod
def get_bank_hierarchy_design_session(self, proxy):
"""Gets the session designing bank hierarchies.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``BankHierarchySession``
:rtype: ``osid.assessment.BankHierarchyDesignSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_bank_hierarchy_design() is false``
*compliance: optional -- This method must be implemented if
``supports_bank_hierarchy_design()`` is true.*
"""
return # osid.assessment.BankHierarchyDesignSession
@abc.abstractmethod
def get_assessment_authoring_proxy_manager(self):
"""Gets an ``AssessmentAuthoringProxyManager``.
:return: an ``AssessmentAuthoringProxyManager``
:rtype: ``osid.assessment.authoring.AssessmentAuthoringProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_authoring() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_authoring()`` is true.*
"""
return # osid.assessment.authoring.AssessmentAuthoringProxyManager
assessment_authoring_proxy_manager = property(fget=get_assessment_authoring_proxy_manager)
@abc.abstractmethod
def get_assessment_batch_proxy_manager(self):
"""Gets an ``AssessmentBatchProxyManager``.
:return: an ``AssessmentBatchProxyManager``
:rtype: ``osid.assessment.batch.AssessmentBatchProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_assessment_batch() is false``
*compliance: optional -- This method must be implemented if
``supports_assessment_batch()`` is true.*
"""
return # osid.assessment.batch.AssessmentBatchProxyManager
assessment_batch_proxy_manager = property(fget=get_assessment_batch_proxy_manager)
|
mitsei/dlkit
|
dlkit/abstract_osid/assessment/managers.py
|
Python
|
mit
| 156,558
|
# Copyright (c) 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler import solvers
from nova.scheduler.solvers.constraints import active_hosts_constraint
from nova import test
from nova.tests.scheduler import solver_scheduler_fakes as fakes
class TestActiveHostsConstraint(test.NoDBTestCase):
def setUp(self):
super(TestActiveHostsConstraint, self).setUp()
self.constraint_cls = active_hosts_constraint.ActiveHostsConstraint
self._generate_fake_constraint_input()
def _generate_fake_constraint_input(self):
self.fake_variables = solvers.BaseVariables()
self.fake_variables.host_instance_matrix = [
['h0i0', 'h0i1', 'h0i2'],
['h1i0', 'h1i1', 'h1i2']]
self.fake_filter_properties = {
'instance_uuids': ['fake_uuid_%s' % x for x in range(3)],
'num_instances': 3}
host1 = fakes.FakeSolverSchedulerHostState('host1', 'node1', {})
host2 = fakes.FakeSolverSchedulerHostState('host2', 'node1', {})
self.fake_hosts = [host1, host2]
@mock.patch('nova.scheduler.solvers.constraints.'
'active_hosts_constraint.ActiveHostsConstraint.'
'host_filter_cls')
def test_active_hosts_constraint_get_components(self, mock_filter_cls):
expected_cons_vars = [['h1i0'], ['h1i1'], ['h1i2']]
expected_cons_coeffs = [[1], [1], [1]]
expected_cons_consts = [0, 0, 0]
expected_cons_ops = ['==', '==', '==']
mock_filter = mock_filter_cls.return_value
mock_filter.host_passes.side_effect = [True, False]
cons_vars, cons_coeffs, cons_consts, cons_ops = (
self.constraint_cls().get_components(self.fake_variables,
self.fake_hosts, self.fake_filter_properties))
self.assertEqual(expected_cons_vars, cons_vars)
self.assertEqual(expected_cons_coeffs, cons_coeffs)
self.assertEqual(expected_cons_consts, cons_consts)
self.assertEqual(expected_cons_ops, cons_ops)
|
CiscoSystems/nova-solver-scheduler
|
nova/tests/scheduler/solvers/constraints/test_active_hosts_constraint.py
|
Python
|
apache-2.0
| 2,639
|
#!/usr/bin/env python
"""
Regex-based word tokenizers.
Note that small/full/half-width character variants are *not* covered.
If a text were to contains such characters, normalize it first.
"""
from __future__ import absolute_import, unicode_literals
import codecs
try:
from html import unescape
except ImportError:
# Python <= 3.3 doesn't have html.unescape
try:
from html.parser import HTMLParser
except ImportError:
# Python 2.x
from HTMLParser import HTMLParser
unescape = HTMLParser().unescape
from regex import compile, UNICODE, VERBOSE
try:
from segtok.segmenter import SENTENCE_TERMINALS, HYPHENS
except ImportError:
# if used as command-line tool
# noinspection PyUnresolvedReferences
from .segmenter import SENTENCE_TERMINALS, HYPHENS
__author__ = 'Florian Leitner <florian.leitner@gmail.com>'
APOSTROPHES = '\'\u02BC\u2019\u2032'
"""All apostrophe-like marks, including the ASCII "single quote"."""
APOSTROPHE = r"[\u02BC\u2019\u2032]"
"""Any apostrophe-like marks, including "prime" but not the ASCII "single quote"."""
LINEBREAK = r'(?:\r\n|\n|\r|\u2028)'
"""Any valid linebreak sequence (Windows, Unix, Mac, or U+2028)."""
LETTER = r'[\p{Ll}\p{Lm}\p{Lt}\p{Lu}]'
"""Any Unicode letter character that can form part of a word: Ll, Lm, Lt, Lu."""
NUMBER = r'[\p{Nd}\p{Nl}]'
"""Any Unicode number character: Nd or Nl."""
POWER = r'\u207B?[\u00B9\u00B2\u00B3]'
"""Superscript 1, 2, and 3, optionally prefixed with a minus sign."""
SUBDIGIT = r'[\u2080-\u2089]'
"""Subscript digits."""
ALNUM = LETTER[:-1] + NUMBER[1:]
"""Any alphanumeric Unicode character: letter or number."""
HYPHEN = r'[%s]' % HYPHENS
SPACE = r'[\p{Zs}\t]'
"""Any unicode space character plus the (horizontal) tab."""
APO_MATCHER = compile(APOSTROPHE, UNICODE)
"""Matcher for any apostrophe."""
HYPHENATED_LINEBREAK = compile(
r'({alnum}{hyphen}){space}*?{linebreak}{space}*?({alnum})'.format(
alnum=ALNUM, hyphen=HYPHEN, linebreak=LINEBREAK, space=SPACE
), UNICODE
)
"""
The pattern matches any alphanumeric Unicode character, followed by a hyphen,
a single line-break surrounded by optional (non-breaking) spaces,
and terminates with a alphanumeric character on this next line.
The opening char and hyphen as well as the terminating char are captured in two groups.
"""
IS_POSSESSIVE = compile(r"{alnum}+(?:{hyphen}{alnum}+)*(?:{apo}[sS]|[sS]{apo})$".format(
alnum=ALNUM, hyphen=HYPHEN, apo="['" + APOSTROPHE[1:]
), UNICODE
)
"""A pattern that matches English words with a possessive s terminal form."""
IS_CONTRACTION = compile(r"{alnum}+(?:{hyphen}{alnum}+)*{apo}(?:d|ll|m|re|s|t|ve)$".format(
alnum=ALNUM, hyphen=HYPHEN, apo="['" + APOSTROPHE[1:]
), UNICODE
)
"""A pattern that matches tokens with valid English contractions ``'(d|ll|m|re|s|t|ve)``."""
def split_possessive_markers(tokens):
"""
A function to split possessive markers at the end of alphanumeric (and hyphenated) tokens.
Takes the output of any of the tokenizer functions and produces and updated list.
To use it, simply wrap the tokenizer function, for example::
>>> my_sentence = "This is Fred's latest book."
>>> split_possessive_markers(word_tokenizer(my_sentence))
['This', 'is', 'Fred', "'s", 'latest', 'book', '.']
:param tokens: a list of tokens
:returns: an updated list if a split was made or the original list otherwise
"""
idx = -1
for token in list(tokens):
idx += 1
if IS_POSSESSIVE.match(token) is not None:
if token[-1].lower() == 's' and token[-2] in APOSTROPHES:
tokens.insert(idx, token[:-2])
idx += 1
tokens[idx] = token[-2:]
elif token[-2].lower() == 's' and token[-1] in APOSTROPHES:
tokens.insert(idx, token[:-1])
idx += 1
tokens[idx] = token[-1:]
return tokens
def split_contractions(tokens):
"""
A function to split apostrophe contractions at the end of alphanumeric (and hyphenated) tokens.
Takes the output of any of the tokenizer functions and produces and updated list.
:param tokens: a list of tokens
:returns: an updated list if a split was made or the original list otherwise
"""
idx = -1
for token in list(tokens):
idx += 1
if IS_CONTRACTION.match(token) is not None:
length = len(token)
if length > 1:
for pos in range(length - 1, -1, -1):
if token[pos] in APOSTROPHES:
if 2 < length and pos + 2 == length and token[-1] == 't' and token[pos - 1] == 'n':
pos -= 1
tokens.insert(idx, token[:pos])
idx += 1
tokens[idx] = token[pos:]
return tokens
def _matches(regex):
"""Regular expression compiling function decorator."""
def match_decorator(fn):
automaton = compile(regex, UNICODE | VERBOSE)
fn.split = automaton.split
fn.match = automaton.match
return fn
return match_decorator
@_matches(r'\s+')
def space_tokenizer(sentence):
"""
For a given input `sentence`, return a list of its tokens.
Split on Unicode spaces ``\\s+`` (i.e., any kind of **Unicode** space character).
The separating space characters are not included in the resulting token list.
"""
return [token for token in space_tokenizer.split(sentence) if token]
@_matches(r'(%s+)' % ALNUM)
def symbol_tokenizer(sentence):
"""
The symbol tokenizer extends the :func:`space_tokenizer` by separating alphanumerics.
Separates alphanumeric Unicode character sequences in already space-split tokens.
"""
return [token for span in space_tokenizer(sentence) for
token in symbol_tokenizer.split(span) if token]
'''
| # Comma, surrounded by digits (e.g., chemicals) or letters
{alnum} , (?={alnum})
| # Colon, surrounded by digits (e.g., time, references)
{number} : (?={number})
| # Hyphen, surrounded by digits (e.g., DNA endings: "5'-ACGT-3'") or letters
{alnum} {apo}? {hyphen} (?={alnum}) # incl. optional apostrophe for DNA segments
| # ASCII single quote after an s and at the token's end
s ' $
'''
@_matches(r"""((?:
# Dots, except ellipsis
{alnum} \. (?!\.\.)
| # Apostophes, non-consecutive
{apo} (?!{apo})
| # ASCII single quote, surrounded by digits or letters (no dangling allowed)
{alnum} ' (?={alnum})
| # Terminal dimensions (superscript minus, 1, 2, and 3) attached to physical units
# size-prefix unit-acronym dimension
\b [yzafpn\u00B5mcdhkMGTPEZY]? {letter}{{1,3}} {power} $
| # Atom counts (subscript numbers) and ionization states (optional superscript
# 2 or 3 followed by a + or -) are attached to valid fragments of a chemical formula
\b (?:[A-Z][a-z]?|[\)\]])+ {subdigit}+ (?:[\u00B2\u00B3]?[\u207A\u207B])?
| # Any (Unicode) letter, digit, or the underscore
{alnum}
)+)""".format(alnum=ALNUM, apo=APOSTROPHE, power=POWER, subdigit=SUBDIGIT,
hyphen=HYPHEN, letter=LETTER, number=NUMBER))
def word_tokenizer(sentence):
"""
This tokenizer extends the alphanumeric :func:`symbol_tokenizer` by splitting fewer cases:
1. Dots appearing after a letter are maintained as part of the word, except for the last word
in a sentence if that dot is the sentence terminal. Therefore, abbreviation marks (words
containing or ending in a ``.``, like "i.e.") remain intact and URL or ID segments remain
complete ("www.ex-ample.com", "EC1.2.3.4.5", etc.). The only dots that never are attached
are triple dots (``...``; ellipsis).
2. Commas surrounded by alphanumeric characters are maintained in the word, too, e.g. ``a,b``.
Colons surrounded by digits are maintained, e.g., 'at 12:30pm' or 'Isaiah 12:3'.
Commas, semi-colons, and colons dangling at the end of a token are always spliced off.
3. Any two alphanumeric letters that are separated by a single hyphen are joined together;
Those "inner" hyphens may optionally be followed by a linebreak surrounded by spaces;
The spaces will be removed, however. For example, ``Hel- \\r\\n \t lo`` contains a (Windows)
linebreak and will be returned as ``Hel-lo``.
4. Apostrophes are always allowed in words as long as they are not repeated; The single quote
ASCII letter ``'`` is only allowed as a terminal apostrophe after the letter ``s``,
otherwise it must be surrounded by letters. To support DNA and chemicals, a apostrophe
(prime) may be located before the hyphen, as in the single token "5'-ACGT-3'" (if any
non-ASCII hyphens are used instead of the shown single quote).
5. Superscript 1, 2, and 3, optionally prefixed with a superscript minus, are attached to a
word if it is no longer than 3 letters (optionally 4 if the first letter is a power prefix
in the range from yocto, y (10^-24) to yotta, Y (10^+24)).
6. Subscript digits are attached if prefixed with letters that look like a chemical formula.
"""
pruned = HYPHENATED_LINEBREAK.sub(r'\1\2', sentence)
tokens = [token for span in space_tokenizer(pruned) for
token in word_tokenizer.split(span) if token]
# splice the sentence terminal off the last word/token if it has any at its borders
# only look for the sentence terminal in the last three tokens
for idx, word in enumerate(reversed(tokens[-3:]), 1):
if (word_tokenizer.match(word) and not APO_MATCHER.match(word)) or \
any(t in word for t in SENTENCE_TERMINALS):
last = len(word) - 1
if 0 == last or u'...' == word:
# any case of "..." or any single char (last == 0)
pass # leave the token as it is
elif any(word.rfind(t) == last for t in SENTENCE_TERMINALS):
# "stuff."
tokens[-idx] = word[:-1]
tokens.insert(len(tokens) - idx + 1, word[-1])
elif any(word.find(t) == 0 for t in SENTENCE_TERMINALS):
# ".stuff"
tokens[-idx] = word[0]
tokens.insert(len(tokens) - idx, word[:-1])
break
# keep splicing off any dangling commas and (semi-) colons
dirty = True
while dirty:
dirty = False
for idx, word in enumerate(reversed(tokens), 1):
while len(word) > 1 and word[-1] in u',;:':
char = word[-1] # the dangling comma/colon
word = word[:-1]
tokens[-idx] = word
tokens.insert(len(tokens) - idx + 1, char)
idx += 1
dirty = True
if dirty:
break # restart check to avoid index errors
return tokens
@_matches(r"""
(?<=^|[\s<"'(\[{]) # visual border
( # RFC3986-like URIs:
[A-z]+ # required scheme
:// # required hier-part
(?:[^@]+@)? # optional user
(?:[\w-]+\.)+\w+ # required host
(?::\d+)? # optional port
(?:\/[^?\#\s'">)\]}]+)? # optional path
(?:\?[^\#\s'">)\]}]+)? # optional query
(?:\#[^\s'">)\]}]+)? # optional fragment
| # simplified e-Mail addresses:
[\w.#$%&'*+/=!?^`{|}~-]+ # local part
@ # klammeraffe
(?:[\w-]+\.)+ # (sub-)domain(s)
\w+ # TLD
)(?=[\s>"')\]}]|$) # visual border
""")
def web_tokenizer(sentence):
"""
The web tokenizer works like the :func:`word_tokenizer`, but does not split URIs or
e-mail addresses. It also un-escapes all escape sequences (except in URIs or email addresses).
"""
return [token for i, span in enumerate(web_tokenizer.split(sentence))
for token in ((span,) if i % 2 else word_tokenizer(unescape(span)))]
def main():
# tokenize one sentence per line input
from argparse import ArgumentParser
from sys import argv, stdout, stdin, stderr, getdefaultencoding, version_info
from os import path, linesep
def _tokenize(sentence, tokenizer):
sep = None
for token in tokenizer(sentence):
if sep is not None:
stdout.write(sep)
stdout.write(token)
sep = ' '
stdout.write(linesep)
NUM_TOKENIZERS = 4
SPACE, ALNUM, TOKEN, WEB = list(range(NUM_TOKENIZERS))
TOKENIZER = [None] * NUM_TOKENIZERS
TOKENIZER[SPACE] = space_tokenizer
TOKENIZER[ALNUM] = symbol_tokenizer
TOKENIZER[TOKEN] = word_tokenizer
TOKENIZER[WEB] = web_tokenizer
parser = ArgumentParser(usage='%(prog)s [--mode] [FILE ...]',
description=__doc__, prog=path.basename(argv[0]),
epilog='default tokenizer: token; default encoding: ' +
getdefaultencoding())
parser.add_argument('files', metavar='FILE', nargs='*',
help='One-Sentence-Per-Line file; if absent, read from STDIN')
parser.add_argument('--possessive-marker', '-p', action='store_true', # TODO
help='split off the possessive marker from alphanumeric tokens')
parser.add_argument('--split-contractions', '-c', action='store_true', # TODO
help='split contractions like "don\'t" in alphanumeric tokens in two')
parser.add_argument('--encoding', '-e', help='define encoding to use')
mode = parser.add_mutually_exclusive_group()
parser.set_defaults(mode=TOKEN)
mode.add_argument('--space', '-s', action='store_const', dest='mode', const=SPACE,
help=space_tokenizer.__doc__)
mode.add_argument('--alnum', '-a', action='store_const', dest='mode', const=ALNUM,
help=symbol_tokenizer.__doc__)
mode.add_argument('--token', '-t', action='store_const', dest='mode', const=TOKEN,
help=word_tokenizer.__doc__)
mode.add_argument('--web', '-w', action='store_const', dest='mode', const=WEB,
help=web_tokenizer.__doc__)
args = parser.parse_args()
tokenizer_func = TOKENIZER[args.mode]
# fix broken Unicode handling in Python 2.x
# see http://www.macfreek.nl/memory/Encoding_of_Python_stdout
if args.encoding or version_info < (3, 0):
if version_info >= (3, 0):
stdout = stdout.buffer
stdin = stdin.buffer
stdout = codecs.getwriter(args.encoding or 'utf-8')(stdout, 'xmlcharrefreplace')
stdin = codecs.getreader(args.encoding or 'utf-8')(stdin, 'xmlcharrefreplace')
if not args.encoding:
stderr.write('wrapped tokenizer stdio with UTF-8 de/encoders')
stderr.write(linesep)
if args.split_contractions:
tokenizer = lambda sentence: split_contractions(tokenizer_func(sentence))
elif args.possessive_marker:
tokenizer = lambda sentence: split_possessive_markers(tokenizer_func(sentence))
else:
tokenizer = tokenizer_func
if args.files:
for txt_file_path in args.files:
with codecs.open(txt_file_path, 'r', encoding=(args.encoding or 'utf-8')) as fp:
for line in fp:
_tokenize(line, tokenizer)
else:
for line in stdin:
_tokenize(line, tokenizer)
if __name__ == '__main__':
main()
|
ethancaballero/neural-engineers-first-attempt
|
load_d2c_data/segtok/tokenizer.py
|
Python
|
mit
| 15,681
|
"""Domain models for the application."""
from .move import Move, Moves
from .turn import Turn
from .player import Player, Players
from .board import Board
from .game import Game
|
jacebrowning/gridcommand
|
gridcommand/domain/__init__.py
|
Python
|
lgpl-3.0
| 179
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/source_info.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1cgoogle/api/source_info.proto\x12\ngoogle.api\x1a\x19google/protobuf/any.proto"8\n\nSourceInfo\x12*\n\x0csource_files\x18\x01 \x03(\x0b\x32\x14.google.protobuf.AnyBq\n\x0e\x63om.google.apiB\x0fSourceInfoProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
)
_SOURCEINFO = DESCRIPTOR.message_types_by_name["SourceInfo"]
SourceInfo = _reflection.GeneratedProtocolMessageType(
"SourceInfo",
(_message.Message,),
{
"DESCRIPTOR": _SOURCEINFO,
"__module__": "google.api.source_info_pb2"
# @@protoc_insertion_point(class_scope:google.api.SourceInfo)
},
)
_sym_db.RegisterMessage(SourceInfo)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\017SourceInfoProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
_SOURCEINFO._serialized_start = 71
_SOURCEINFO._serialized_end = 127
# @@protoc_insertion_point(module_scope)
|
googleapis/python-api-common-protos
|
google/api/source_info_pb2.py
|
Python
|
apache-2.0
| 2,292
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import os
import logging
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
for file_name in os.listdir(path):
os.remove(path + os.sep + file_name)
|
NejcZupec/ggrc-core
|
test/selenium/src/lib/file_ops.py
|
Python
|
apache-2.0
| 886
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import auth
from tempest import config
from tempest import exceptions
CONF = config.CONF
class Manager(object):
"""
Base manager class
Manager objects are responsible for providing a configuration object
and a client object for a test case to use in performing actions.
"""
def __init__(self, credentials=None):
"""
We allow overriding of the credentials used within the various
client classes managed by the Manager object. Left as None, the
standard username/password/tenant_name[/domain_name] is used.
:param credentials: Override of the credentials
"""
self.auth_version = CONF.identity.auth_version
if credentials is None:
self.credentials = auth.get_default_credentials('user')
else:
self.credentials = credentials
# Check if passed or default credentials are valid
if not self.credentials.is_valid():
raise exceptions.InvalidCredentials()
# Creates an auth provider for the credentials
self.auth_provider = self.get_auth_provider(self.credentials)
# FIXME(andreaf) unused
self.client_attr_names = []
@classmethod
def get_auth_provider_class(cls, credentials):
if isinstance(credentials, auth.KeystoneV3Credentials):
return auth.KeystoneV3AuthProvider
else:
return auth.KeystoneV2AuthProvider
def get_auth_provider(self, credentials):
if credentials is None:
raise exceptions.InvalidCredentials(
'Credentials must be specified')
auth_provider_class = self.get_auth_provider_class(credentials)
return auth_provider_class(
interface=getattr(self, 'interface', None),
credentials=credentials)
|
afaheem88/tempest_neutron
|
tempest/manager.py
|
Python
|
apache-2.0
| 2,462
|
l = int(input())
h = int(input())
t = str(input())
alphabet = [str(input()) for i in range(h)]
for i in range(h):
for char in t.lower():
if char >= 'a' and char <= 'z':
x = ord(char) - ord('a')
else:
x = ord('z') - ord('a') + 1
print(alphabet[i][x*l : x*l+l], end='') # print from X*L, to X*L+L
print('')
|
izanbf1803/Codingame-medium-solutions
|
easy/ASCII Art.py
|
Python
|
mit
| 367
|
#@PydevCodeAnalysisIgnore
from django.utils.translation import ugettext_lazy as _
from django.conf.urls import patterns, url
urlpatterns = patterns('onlinejudge.views',
url(_(r"^$"), "index", {}, "index"),
url(_(r"^challenge/(?P<slug>[-\w]+)/$"), "challenge", {}, "challenge"),
url(_(r"^challenge/(?P<slug>[-\w]+)/update/$"), "challenge_update", {}, "challenge_update"),
url(_(r"^challenge/(?P<slug>[-\w]+)/report/$"), "challenge_report", {}, "challenge_report"),
url(_(r"^contest/(?P<slug>[-\w]+)/report/$"), "contest_report", {}, "contest_report"),
url(_(r"^submission/(?P<id>[\d]+)/grade/$"), "grade_submission", {}, "grade_submission"),
# ajax
url(_(r"^challenge/(?P<slug>[-\w]+)/template/$"), "challenge_submission_template", {}, "challenge_submission_template"),
url(_(r"^challenge/(?P<slug>[-\w]+)/submission/test/$"), "submission_test", {}, "submission_test"),
url(_(r"^challenge/(?P<slug>[-\w]+)/submission/results/$"), "submission_results", {}, "submission_results"),
)
|
darbula/django-onlinejudge
|
onlinejudge/urls.py
|
Python
|
bsd-3-clause
| 1,275
|
# -*- coding: UTF-8 -*-
"""
This module contains functions for calculating evaluation metrics for the generated service recommendations.
"""
import numpy
import pandas
runtime_metrics = ["Training time", "Overall testing time", "Individual testing time"]
quality_metrics = ["Recall", "Precision", "F1", "# of recommendations"]
def results_as_dataframe(user_actions, recommendations):
"""
Converts the recommendation results into a pandas dataframe for easier evaluation.
@param user_actions: A list of the actually performed user actions.
@param recommendations: For each of the performed actions the list of calculated service recommendations.
@return: A pandas dataframe that has as index the performed user actions (there is one row per action). The first
column contains for each action the highest scoring recommendation, the second column contains the second best
recommendation etc.
"""
results = pandas.DataFrame(recommendations, index=pandas.Index(user_actions, name="Actual action"))
results.columns = [(r+1) for r in range(len(results.columns))]
return results
class QualityMetricsCalculator():
"""
This is a utility class that contains a number of methods for calculating overall quality metrics for the produced
recommendations. In general these methods produce pandas dataframes with several rows, where each row corresponds
to one "cutoff" point. For example, a cutoff "4" means that the system cuts the number of recommendations at four,
i.e. the user is shown at most four recommendations. If some post-processing method was used (e.g. show fewer
recommendations if the recommendation conflict is low), then it can happen that fewer than four recommendations
are shown. For reference, the column "# of recommendations" lists the average of the number of recommendations that
were actually shown to the user.
"""
def __init__(self, actual_actions, recommendations):
"""
Initialize the calculation of the quality metrics..
@param actual_actions: A list of strings, each representing one actual user action.
@param recommendations: A list of lists of strings with the same length as actual_actions. Each list of
strings contains the calculated recommendations for the corresponding actual user action.
@return:
"""
self.results = results_as_dataframe(actual_actions, recommendations)
def __unique_actions__(self):
"""
It can happen that one potential user action never happened, but that the corresponding service was recommended.
To be able to count these false positives, we must calculate the list of all potential actions.
"""
occurring_actions = set(self.results.index.values)
occurring_services = pandas.melt(self.results).dropna()["value"]
occurring_services = set(occurring_services.unique())
return sorted(occurring_actions | occurring_services)
def true_positives(self, action):
"""
Counts how often the given action was recommended correctly (true positives, TP).
@param action: The name of the user action for which to count true positives.
@return: A pandas dataset with column TP and several rows, first row lists #TP at cutoff "1", the second row at
cutoff "2", etc.
"""
#get all rows where the actual action corresponds to the given action
r = self.results[self.results.index == action]
if len(r) == 0:
#if there are no such rows, then we have zero true positives, fill result dataframe with zeroes
true_positives = pandas.Series(0.0, index=self.results.columns)
else:
#if recommendation matches the action, set column to "1" (true positive), else set to "0" (false negative)
r = r.applymap(lambda col: 1 if col == action else 0).fillna(0)
#count how many true positives there are in each column
r = r.sum()
#if have a true positive for n-th recommendation, then also have true positive for n+1, n+2 etc
#-> calculate cumulative sum
true_positives = r.cumsum(axis=0).apply(float)
true_positives = pandas.DataFrame(true_positives, columns=["TP"])
true_positives.index.name = "cutoff"
return true_positives
def true_positives_for_all(self):
"""
Create a matrix that contains information about true positives for all possible actions.
@return: A pandas with one column for each action, first row lists #TP at cutoff "1", the second row at
cutoff "2", etc.
"""
tp = [self.true_positives(action) for action in self.__unique_actions__()]
tp = pandas.concat(tp, axis=1)
tp.columns = self.__unique_actions__()
return tp
def false_negatives(self, action):
"""
Counts how often the given action was not recommended correctly (false negatives, FN).
@param action: The name of the user action for which to count false negatives.
@return: A pandas dataset with column FN and several rows, first row lists #FN cutoff "1", the second row at
cutoff "2", etc.
"""
#the amount of false negatives corresponds to the difference between the total number of occurrences of the
#action and the number of false positives
true_positives = self.true_positives(action)
total_occurrences = len(self.results[self.results.index == action])
total_occurrences = pandas.Series(total_occurrences, index=true_positives.index)
false_negatives = total_occurrences - true_positives["TP"]
false_negatives = pandas.DataFrame(false_negatives, columns=["FN"])
false_negatives.index.name = "cutoff"
return false_negatives
def false_positives(self, action):
"""
Counts how often the given action was recommended even though it didn't occur (false positives, FP).
@param action: The name of the user action for which to count false positives.
@return: A pandas dataset with column FP and several rows, first row lists #FP at cutoff "1", the second row at
cutoff "2", etc.
"""
#get all rows where the actual service does NOT correspond to the given action
r = self.results[self.results.index != action]
if len(r) == 0:
#if there are no such rows, then we have zero false positives, fill result dataframe with zeroes
false_positives = pandas.Series(0.0, index=self.results.columns)
else:
#if recommendation matches the action, set column to "1" (false positive), else set to "0" (true negative)
r = r.applymap(lambda col: 1 if col == action else 0)
#count how many false positives there are in each column
r = r.sum()
#if have a false positive for n-th recommendation, then also have false positive for n+1, n+2 etc
#-> calculate cumulative sum
false_positives = r.cumsum(axis=0).apply(float)
false_positives = pandas.DataFrame(false_positives, columns=["FP"])
false_positives.index.name = "cutoff"
return false_positives
@staticmethod
def precision(counts):
"""
Calculate the precision as (true positives)/(true positives + false positives).
@param counts: A dataframe that contains a column "TP" with true positives and "FP" with false positives.
@return: A pandas dataframe with one column "Precision". The first row lists the achieved precision at cutoff
"1", the second row at cutoff "2", etc.
"""
p = counts["TP"]/(counts["TP"] + counts["FP"])
p = pandas.DataFrame({"Precision": p}).fillna(0.0)
return p
@staticmethod
def recall(counts):
"""
Calculate the recall as (true positives)/(true positives + false negatives).
@param counts: A dataframe that contains a column "TP" with true positives and "FN" with false negatives.
@return: A pandas dataframe with one column "Recall". The first row lists the achieved recall at cutoff "1",
the second row at cutoff "2", etc.
"""
p = counts["TP"]/(counts["TP"] + counts["FN"])
p = pandas.DataFrame({"Recall": p}).fillna(0.0)
return p
@staticmethod
def f1(metrics):
"""
Calculate the F1 as the harmonic mean of precision and recall.
@param metrics: A dataframe with a column "Precision" and a column "Recall"
@return: A pandas dataframe with one column "F1". The first row lists the achieved F1 at cutoff "1", the second
row at cutoff "2", etc.
"""
f = (2.0*metrics["Precision"]*metrics["Recall"]) / (metrics["Precision"]+metrics["Recall"])
f = pandas.DataFrame({"F1": f}).fillna(0.0)
return f
def number_of_recommendations(self):
"""
Count how many recommendations the user was actually shown (e.g. when using a dynamic cutoff such as "show
less recommendations when recommendation conflict is low").Number of recommendation is not an quality metric
but fits here conceptually.
@return: A pandas dataframe with one column "# of recommendations". The first row lists the # at cutoff "1", the
second row at cutoff "2", etc.
"""
n = (self.results.count(axis=0)/float(len(self.results))).cumsum()
n = pandas.DataFrame({"# of recommendations": n})
n.index.name = "cutoff"
return n
def calculate_for_action(self, action):
"""
Calculate precision, recall and F1 for one action (= one possible user action)
@param action: Which user action to calculate the metrics for.
@return: A pandas dataframe containing columns for "Precision", "Recall", "F1". The first row lists
calculated metrics at cutoff "1", the second row at cutoff "2", etc. A fourth column "action" simply lists the
action name in all rows, this column is necessary for later merging the metrics of all actions.
"""
#count how many true positives, false positives and false negatives occurred for this action
counts = pandas.concat([self.true_positives(action),
self.false_negatives(action),
self.false_positives(action)],
axis=1)
#use these counts to calculate the relevant metrics
metrics = pandas.concat([self.precision(counts),
self.recall(counts)],
axis=1)
metrics["F1"] = self.f1(metrics)["F1"]
#add column that contains name of the action in all rows, to prepare for merging the metrics for all actions
metrics["action"] = pandas.Series(action, index=metrics.index)
return metrics
def calculate(self):
"""
Performs the actual calculation of the weighted average of precision, recall and F1 over all actions and counts
the number of recommendations that where actually shown to the user.
@return: A pandas dataframe containing one column for each of the four quality metrics. The first row lists
calculated metrics at cutoff "1", the second row at cutoff "2"
"""
#make one big matrix with the metrics for all actions
actions = self.__unique_actions__()
metrics = pandas.concat([self.calculate_for_action(action) for action in actions])
#count for each action how often the corresponding action actually occurred
occurrences = pandas.TimeSeries(self.results.index.values).value_counts()
occurrences = occurrences.reindex(actions).fillna(0)
#calculate the weighted average for each of the metrics (i.e. actions that occur more often have a higher
#influence on the overall results for "Precision", "Recall and "F1")
actions_as_index = lambda group: group.set_index("action").reindex(actions).fillna(0.0)
weighted_average_for_column = lambda col: numpy.average(col.values, weights=occurrences.values)
weighted_average = lambda group: actions_as_index(group).apply(weighted_average_for_column)
metrics = metrics.groupby(level="cutoff").aggregate(weighted_average)
del(metrics["action"]) #get rid of now unnecessary "action" column
#do not need weighted average for # of recommendations, simply add counts as fourth column
metrics["# of recommendations"] = self.number_of_recommendations()
return metrics
def confusion_matrix(self):
"""
Calculate a confusion matrix: for each action count how often each service was recommended
@return: A pandas dataframe, with one row for each possible action and one row for each possible
service recommendation. Each matrix item counts how often the service was recommended when the action happened.
"""
cutoff = 1 #only makes sense for cutoff=1
def confusions_for_action(action):
r = self.results[self.results.index == action][cutoff]
return r.groupby(r).count()
actions = self.__unique_actions__()
matrix = [confusions_for_action(action) for action in actions]
matrix = pandas.concat(matrix, axis=1).reindex(actions).transpose()
matrix.index = actions
matrix = matrix.reindex(actions).fillna(0)
matrix.index.name = "Actual action"
return matrix
|
krasch/smart-assistants
|
evaluation/metrics.py
|
Python
|
mit
| 13,611
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
def upgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
stack = sqlalchemy.Table('software_deployment', meta, autoload=True)
# Align with 64 character length used in keystone project table
stack_user_project_id = sqlalchemy.Column('stack_user_project_id',
sqlalchemy.String(length=64))
stack_user_project_id.create(stack)
def downgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
stack = sqlalchemy.Table('software_deployment', meta, autoload=True)
stack.c.stack_user_project_id.drop()
|
redhat-openstack/heat
|
heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py
|
Python
|
apache-2.0
| 1,204
|
import uuid
from datetime import datetime
from django.test import TestCase
from casexml.apps.case.mock import CaseBlock, IndexAttrs
from pillowtop.es_utils import initialize_index_and_mapping
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.es.tests.utils import es_test
from corehq.elastic import get_es_new, send_to_elasticsearch
from corehq.form_processor.models import CommCareCase
from corehq.pillows.case_search import transform_case_for_elasticsearch
from corehq.pillows.mappings.case_search_mapping import CASE_SEARCH_INDEX_INFO
from corehq.util.elastic import ensure_index_deleted
from corehq.util.test_utils import trap_extra_setup
from ..api.core import serialize_case, serialize_es_case
from ..utils import submit_case_blocks
@es_test
class TestAPISerialization(TestCase):
domain = 'test-update-cases'
maxDiff = None
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.domain_obj = create_domain(cls.domain)
cls.parent_case_id = str(uuid.uuid4())
case_id = str(uuid.uuid4())
xform, cases = submit_case_blocks([
CaseBlock(
case_id=cls.parent_case_id,
case_type='player',
case_name='Elizabeth Harmon',
external_id='1',
owner_id='methuen_home',
create=True,
update={
'sport': 'chess',
'rank': '1600',
'dob': '1948-11-02',
}
).as_text(),
CaseBlock(
case_id=case_id,
case_type='match',
case_name='Harmon/Luchenko',
owner_id='harmon',
external_id='14',
create=True,
update={
'winner': 'Harmon',
'accuracy': '84.3',
},
index={
'parent': IndexAttrs(case_type='player', case_id=cls.parent_case_id, relationship='child')
},
).as_text()
], domain=cls.domain)
cls.parent_case = CommCareCase.objects.get_case(cls.parent_case_id, cls.domain)
cls.case = CommCareCase.objects.get_case(case_id, cls.domain)
for case in [cls.case, cls.parent_case]:
# Patch datetimes for test consistency
case.opened_on = datetime(2021, 2, 18, 10, 59)
case.modified_on = datetime(2021, 2, 18, 10, 59)
case.server_modified_on = datetime(2021, 2, 18, 10, 59)
cls.es = get_es_new()
with trap_extra_setup(ConnectionError):
initialize_index_and_mapping(cls.es, CASE_SEARCH_INDEX_INFO)
for case in [cls.case, cls.parent_case]:
send_to_elasticsearch(
'case_search',
transform_case_for_elasticsearch(cls.case.to_json())
)
cls.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index)
@classmethod
def tearDownClass(cls):
cls.domain_obj.delete()
ensure_index_deleted(CASE_SEARCH_INDEX_INFO.index)
super().tearDownClass()
def test_serialization(self):
self.assertEqual(
serialize_case(self.case),
{
"domain": self.domain,
"case_id": self.case.case_id,
"case_type": "match",
"case_name": "Harmon/Luchenko",
"external_id": "14",
"owner_id": "harmon",
"date_opened": "2021-02-18T10:59:00.000000Z",
"last_modified": "2021-02-18T10:59:00.000000Z",
"server_last_modified": "2021-02-18T10:59:00.000000Z",
"closed": False,
"date_closed": None,
"properties": {
"winner": "Harmon",
'accuracy': '84.3',
},
"indices": {
"parent": {
"case_id": self.parent_case_id,
"case_type": "player",
"relationship": "child",
}
}
}
)
def test_es_serialization(self):
es_case = CaseSearchES().doc_id(self.case.case_id).run().hits[0]
self.assertEqual(serialize_case(self.case), serialize_es_case(es_case))
|
dimagi/commcare-hq
|
corehq/apps/hqcase/tests/test_serialization.py
|
Python
|
bsd-3-clause
| 4,423
|
import os
import multiprocessing
bind = '0.0.0.0:5000'
backlog = 2048
worker_class = 'gevent'
workers = 2
threads = 1
worker_connections = 1000
timeout = 30
keepalive = 2
max_requests = 1000
max_requests_jitter = 50
spew = False
daemon = False
pidfile = None
umask = 666
user = os.getenv('USER')
group = os.getenv('USER')
tmp_upload_dir = None
errorlog = '-'
loglevel = 'info'
accesslog = '-'
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
proc_name = None
#
# Server hooks
#
# post_fork - Called just after a worker has been forked.
#
# A callable that takes a server and worker instance
# as arguments.
#
# pre_fork - Called just prior to forking the worker subprocess.
#
# A callable that accepts the same arguments as after_fork
#
# pre_exec - Called just prior to forking off a secondary
# master process during things like config reloading.
#
# A callable that takes a server instance as the sole argument.
#
def post_fork(server, worker):
server.log.info("Worker spawned (pid: %s)", worker.pid)
def pre_fork(server, worker):
pass
def pre_exec(server):
server.log.info("Forked child, re-executing.")
def when_ready(server):
server.log.info("Server is ready. Spawning workers")
def worker_int(worker):
worker.log.info("worker received INT or QUIT signal")
## get traceback info
import threading, sys, traceback
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
code = []
for threadId, stack in sys._current_frames().items():
code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""),
threadId))
for filename, lineno, name, line in traceback.extract_stack(stack):
code.append('File: "%s", line %d, in %s' % (filename,
lineno, name))
if line:
code.append(" %s" % (line.strip()))
worker.log.debug("\n".join(code))
def worker_abort(worker):
worker.log.info("worker received SIGABRT signal")
|
benlaken/EEMiniService
|
python_app/gunicorn.py
|
Python
|
mit
| 2,032
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-06 08:14
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('subscriptions', '0004_auto_20160923_0900'),
('subscriptions', '0005_filter'),
]
operations = [
]
|
stadtgestalten/stadtgestalten
|
grouprise/features/subscriptions/migrations/0006_merge_20161006_1014.py
|
Python
|
agpl-3.0
| 338
|
"""Classes for handling connection to StatusDB."""
import couchdb
import logging
from datetime import datetime
logger = logging.getLogger(__name__)
class StatusdbSession(object):
"""Wrapper class for couchdb."""
def __init__(self, config, db=None):
user = config.get('username')
password = config.get('password')
port = config.get('port')
url = config.get('url')
url_string = 'http://{}:{}@{}:{}'.format(user, password, url, port)
display_url_string = 'http://{}:{}@{}:{}'.format(user, '*********', url, port)
self.connection = couchdb.Server(url=url_string)
if not self.connection:
raise Exception('Couchdb connection failed for url {}'.format(display_url_string))
if db:
self.db_connection = self.connection[db]
def get_entry(self, name, use_id_view=False):
"""Retrieve entry from a given db for a given name.
:param name: unique name identifier (primary key, not the uuid)
"""
if use_id_view:
view = self.id_view
else:
view = self.name_view
if not view.get(name, None):
return None
return self.db.get(view.get(name))
def save_db_doc(self, doc, db=None):
try:
db = db or self.db
db.save(doc)
except Exception as e:
raise Exception('Failed saving document due to {}'.format(e))
def get_project_flowcell(self, project_id, open_date='2015-01-01', date_format='%Y-%m-%d'):
"""From information available in flowcell db connection,
collect the flowcell this project was sequenced.
:param project_id: NGI project ID to get the flowcells
:param open_date: Open date of project to skip the check for all flowcells
:param date_format: The format of specified open_date
"""
try:
open_date = datetime.strptime(open_date, date_format)
except:
open_date = datetime.strptime('2015-01-01', '%Y-%m-%d')
project_flowcells = {}
date_sorted_fcs = sorted(list(self.proj_list.keys()), key=lambda k: datetime.strptime(k.split('_')[0], '%y%m%d'), reverse=True)
for fc in date_sorted_fcs:
fc_date, fc_name = fc.split('_')
if datetime.strptime(fc_date,'%y%m%d') < open_date:
break
if project_id in self.proj_list[fc] and fc_name not in project_flowcells.keys():
project_flowcells[fc_name] = {'name':fc_name,'run_name':fc, 'date':fc_date, 'db':self.db.name}
return project_flowcells
class ProjectSummaryConnection(StatusdbSession):
def __init__(self, config, dbname='projects'):
super(ProjectSummaryConnection, self).__init__(config)
self.db = self.connection[dbname]
self.name_view = {k.key: k.id for k in self.db.view('project/project_name', reduce=False)}
self.id_view = {k.key: k.id for k in self.db.view('project/project_id', reduce=False)}
class FlowcellRunMetricsConnection(StatusdbSession):
def __init__(self, config, dbname='flowcells'):
super(FlowcellRunMetricsConnection, self).__init__(config)
self.db = self.connection[dbname]
self.name_view = {k.key:k.id for k in self.db.view('names/name', reduce=False)}
self.proj_list = {k.key:k.value for k in self.db.view('names/project_ids_list', reduce=False) if k.key}
class X_FlowcellRunMetricsConnection(StatusdbSession):
def __init__(self, config, dbname='x_flowcells'):
super(X_FlowcellRunMetricsConnection, self).__init__(config)
self.db = self.connection[dbname]
self.name_view = {k.key:k.id for k in self.db.view('names/name', reduce=False)}
self.proj_list = {k.key:k.value for k in self.db.view('names/project_ids_list', reduce=False) if k.key}
def update_doc(db, obj, over_write_db_entry=False):
view = db.view('info/name')
if len(view[obj['name']].rows) == 1:
remote_doc = view[obj['name']].rows[0].value
doc_id = remote_doc.pop('_id')
doc_rev = remote_doc.pop('_rev')
if remote_doc != obj:
if not over_write_db_entry:
obj = merge_dicts(obj, remote_doc)
obj['_id'] = doc_id
obj['_rev'] = doc_rev
db[doc_id] = obj
logger.info('Updating {}'.format(obj['name']))
elif len(view[obj['name']].rows) == 0:
db.save(obj)
logger.info('Saving {}'.format(obj['name']))
else:
logger.warn('More than one row with name {} found'.format(obj['name']))
def merge_dicts(d1, d2):
"""Merge dictionary d2 into dictionary d1.
If the same key is found, the one in d1 will be used.
"""
for key in d2:
if key in d1:
if isinstance(d1[key], dict) and isinstance(d2[key], dict):
merge(d1[key], d2[key])
elif d1[key] == d2[key]:
pass # same leaf value
else:
logger.debug('Values for key {key} in d1 and d2 differ, '
'using the value of d1'.format(key=key))
else:
d1[key] = d2[key]
return d1
|
SciLifeLab/TACA
|
taca/utils/statusdb.py
|
Python
|
mit
| 5,182
|
# Tai Sakuma <tai.sakuma@gmail.com>
import itertools
import operator
import numpy as np
##__________________________________________________________________||
class DeltaR(object):
def __init__(self,
obj1_eta_phi_names=('eta', 'phi'),
obj2_eta_phi_names=('eta', 'phi')
):
self.obj1_eta_phi_names = obj1_eta_phi_names
self.obj2_eta_phi_names = obj2_eta_phi_names
def __repr__(self):
name_value_pairs = (
('obj1_eta_phi_names', self.obj1_eta_phi_names),
('obj2_eta_phi_names', self.obj2_eta_phi_names),
)
return '{}({})'.format(
self.__class__.__name__,
', '.join(['{}={!r}'.format(n, v) for n, v in name_value_pairs]),
)
def __call__(self, obj1, obj2):
return self._deltaR(
getattr(obj1, self.obj1_eta_phi_names[0]),
getattr(obj1, self.obj1_eta_phi_names[1]),
getattr(obj2, self.obj2_eta_phi_names[0]),
getattr(obj2, self.obj2_eta_phi_names[1]),
)
def _deltaR(self, eta1, phi1, eta2, phi2):
deta = eta1 - eta2
dphi = self._deltaPhi(phi1, phi2)
return np.sqrt(deta*deta + dphi*dphi)
def _deltaPhi(self, phi1, phi2):
ret = phi1 - phi2
while ret >= 2*np.pi:
ret -= 2*np.pi
while ret <= -2*np.pi:
ret += 2*np.pi
return ret
##__________________________________________________________________||
class ObjectMatch(object):
def __init__(self, in_obj1, in_obj2,
out_obj1_matched,
out_obj2_matched_sorted=None,
out_obj1_unmatched=None,
out_obj2_unmatched=None,
distance_func=DeltaR(),
max_distance=0.4
):
self.obj1_name = in_obj1
self.obj2_name = in_obj2
self.obj1_matched_name = out_obj1_matched
self.obj2_matched_sorted_name = out_obj2_matched_sorted
self.obj1_unmatched_name = out_obj1_unmatched
self.obj2_unmatched_name = out_obj2_unmatched
self.distance_func = distance_func
self.max_distance = max_distance
def __repr__(self):
name_value_pairs = (
('in_obj1_name', self.obj1_name),
('in_obj2_name', self.obj2_name),
('out_obj1_matched_name', self.obj1_matched_name),
('out_obj2_matched_sorted_name', self.obj2_matched_sorted_name),
('out_obj1_unmatched_name', self.obj1_unmatched_name),
('out_obj2_unmatched_name', self.obj2_unmatched_name),
('distance_func', self.distance_func),
('max_distance', self.max_distance),
)
return '{}({})'.format(
self.__class__.__name__,
', '.join(['{}={!r}'.format(n, v) for n, v in name_value_pairs]),
)
def begin(self, event):
self.obj1_matched = [ ]
self.obj2_matched_sorted = [ ]
self.obj1_unmatched = [ ]
self.obj2_unmatched = [ ]
self._attach_to_event(event)
def _attach_to_event(self, event):
setattr(event, self.obj1_matched_name, self.obj1_matched)
if self.obj2_matched_sorted_name is not None:
setattr(event, self.obj2_matched_sorted_name, self.obj2_matched_sorted)
if self.obj1_unmatched_name is not None:
setattr(event, self.obj1_unmatched_name, self.obj1_unmatched)
if self.obj2_unmatched_name is not None:
setattr(event, self.obj2_unmatched_name, self.obj2_unmatched)
def event(self, event):
self._attach_to_event(event)
obj1 = getattr(event, self.obj1_name)
obj2 = getattr(event, self.obj2_name)
m1, m2, u1, u2 = split_matched_unmatched(obj1, obj2, self.distance_func, self.max_distance)
self.obj1_matched[:] = m1
self.obj2_matched_sorted[:] = m2
self.obj1_unmatched[:] = u1
self.obj2_unmatched[:] = u2
def end(self):
self.obj1_matched = None
self.obj1_unmatched = None
self.obj2_matched_sorted = None
self.obj2_unmatched = None
##__________________________________________________________________||
def split_matched_unmatched(obj1, obj2, distance_func, max_distance):
distances = [[(i1, i2, distance_func(o1, o2)) for i1, o1 in enumerate(obj1)] for i2, o2 in enumerate(obj2)]
# a list of lists of (index1, index2, distance) grouped by index2
# e.g.,
# [
# [(0, 0, 13.0), (1, 0, 10.0), (2, 0, 7.0), (3, 0, 4.0)],
# [(0, 1, 6.5), (1, 1, 3.5), (2, 1, 0.5), (3, 1, 2.5)],
# [(0, 2, 5.0), (1, 2, 2.0), (2, 2, 1.0), (3, 2, 4.0)],
# [(0, 3, 2.0), (1, 3, 1.0), (2, 3, 4.0), (3, 3, 7.0)],
# [(0, 4, 1.0), (1, 4, 2.0), (2, 4, 5.0), (3, 4, 8.0)]
# ]
distances = [l for l in distances if l]
# remove empty sublists
distances = (min(l, key=operator.itemgetter(2)) for l in distances)
# select one with the minimum distance in each sublist
# e.g., [(3, 0, 4.0), (2, 1, 0.5), (2, 2, 1.0), (1, 3, 1.0), (0, 4, 1.0)]
distances = (l for l in distances if l[2] <= max_distance)
# remove ones with distances greater than maximum distances
# e.g., [(2, 1, 0.5), (2, 2, 1.0), (1, 3, 1.0), (0, 4, 1.0)]
# note index1 == 2 happens twice
distances = sorted(distances, key=operator.itemgetter(0))
# sort by index1
# e.g., [(0, 4, 1.0), (1, 3, 1.0), (2, 1, 0.5), (2, 2, 1.0)]
distances = [list(g) for _, g in itertools.groupby(distances, key=operator.itemgetter(0))]
# group by index1
# e.g., [[(0, 4, 1.0)], [(1, 3, 1.0)], [(2, 1, 0.5), (2, 2, 1.0)]]
distances = [min(l, key=operator.itemgetter(2)) for l in distances]
# select one with the minimum distance in each sublist
# e.g., [(0, 4, 1.0), (1, 3, 1.0), (2, 1, 0.5)]
obj1_matched = [obj1[i] for i, j, d in distances]
obj2_matched_sorted = [obj2[j] for i, j, d in distances]
obj1_unmatched = [o for o in obj1 if o not in obj1_matched]
obj2_unmatched = [o for o in obj2 if o not in obj2_matched_sorted]
return obj1_matched, obj2_matched_sorted, obj1_unmatched, obj2_unmatched
##__________________________________________________________________||
|
TaiSakuma/scribblers
|
scribblers/match.py
|
Python
|
bsd-3-clause
| 6,247
|
'''
from flask.ext.wtf import Form, TextField, BooleanField
from flask.ext.wtf import Required
class LoginForm(Form):
username = TextField('username', validators = [Required()])
password = TextField('password', validators = [Required()])
'''
|
matthewhughes/really-scrapable-web-app
|
app/forms.py
|
Python
|
mit
| 255
|
# Copyright 2014 Google Inc. All Rights Reserved.
"""Helper methods for record-set transactions."""
import os
from dns import rdatatype
import yaml
from googlecloudapis.dns.v1 import dns_v1_messages as messages
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core.util import resource_printer
from googlecloudsdk.dns.lib import import_util
from googlecloudsdk.dns.lib import util
DEFAULT_PATH = 'transaction.yaml'
def WriteToYamlFile(yaml_file, change):
"""Writes the given change in yaml format to the given file.
Args:
yaml_file: file, File into which the change should be written.
change: Change, Change to be written out.
"""
printer = resource_printer.YamlPrinter(yaml_file)
printer.AddRecord(change)
def _RecordSetsFromDictionaries(record_set_dictionaries):
"""Converts list of record-set dictionaries into list of ResourceRecordSets.
Args:
record_set_dictionaries: [{str:str}], list of record-sets as dictionaries.
Returns:
list of ResourceRecordSets equivalent to given list of yaml record-sets
"""
record_sets = []
for record_set_dict in record_set_dictionaries:
record_set = messages.ResourceRecordSet()
# Need to assign kind to default value for useful equals comparisons.
record_set.kind = record_set.kind
record_set.name = record_set_dict['name']
record_set.ttl = record_set_dict['ttl']
record_set.type = record_set_dict['type']
record_set.rrdatas = record_set_dict['rrdatas']
record_sets.append(record_set)
return record_sets
def ChangeFromYamlFile(yaml_file):
"""Returns the change contained in the given yaml file.
Args:
yaml_file: file, A yaml file with change.
Returns:
Change, the change contained in the given yaml file.
"""
change_dict = yaml.safe_load(yaml_file)
change = messages.Change()
change.additions = _RecordSetsFromDictionaries(change_dict['additions'])
change.deletions = _RecordSetsFromDictionaries(change_dict['deletions'])
return change
def CreateRecordSetFromArgs(args):
"""Creates and returns a record-set from the given args.
Args:
args: The arguments to use to create the record-set.
Raises:
ToolException: If given record-set type is not supported
Returns:
ResourceRecordSet, the record-set created from the given args.
"""
rd_type = rdatatype.from_text(args.type)
if rd_type not in import_util.RDATA_TRANSLATIONS:
raise exceptions.ToolException(
'unsupported record-set type [{0}]'.format(args.type))
record_set = messages.ResourceRecordSet()
# Need to assign kind to default value for useful equals comparisons.
record_set.kind = record_set.kind
record_set.name = util.AppendTrailingDot(args.name)
record_set.ttl = args.ttl
record_set.type = args.type
record_set.rrdatas = args.data
if rd_type is rdatatype.TXT or rd_type is rdatatype.SPF:
record_set.rrdatas = [import_util.QuotedText(datum) for datum in args.data]
return record_set
class TransactionFile(object):
"""Context for reading/writing from/to a transaction file."""
def __init__(self, trans_file_path, mode='r'):
if not os.path.isfile(trans_file_path):
raise exceptions.ToolException(
'transaction not found at [{0}]'.format(trans_file_path))
self.__trans_file_path = trans_file_path
try:
self.__trans_file = open(trans_file_path, mode)
except IOError as exp:
msg = 'unable to open transaction [{0}] because [{1}]'
msg = msg.format(trans_file_path, exp)
raise exceptions.ToolException(msg)
def __enter__(self):
return self.__trans_file
def __exit__(self, typ, value, traceback):
self.__trans_file.close()
if typ is IOError or typ is yaml.YAMLError:
msg = 'unable to read/write transaction [{0}] because [{1}]'
msg = msg.format(self.__trans_file_path, value)
raise exceptions.ToolException(msg)
|
wemanuel/smry
|
smry/server-auth/ls/google-cloud-sdk/lib/googlecloudsdk/dns/lib/transaction_util.py
|
Python
|
apache-2.0
| 3,899
|
#!/usr/bin/env python
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
__license__ = 'GPL v3'
import json, os, traceback
from PyQt4.Qt import (Qt, QDialog, QDialogButtonBox, QSyntaxHighlighter, QFont,
QRegExp, QApplication, QTextCharFormat, QColor, QCursor,
QIcon, QSize)
from calibre import sanitize_file_name_unicode
from calibre.constants import config_dir
from calibre.gui2.dialogs.template_dialog_ui import Ui_TemplateDialog
from calibre.utils.formatter_functions import formatter_functions
from calibre.utils.icu import sort_key
from calibre.ebooks.metadata.book.base import Metadata
from calibre.ebooks.metadata.book.formatter import SafeFormat
from calibre.library.coloring import (displayable_columns, color_row_key)
from calibre.gui2 import error_dialog, choose_files, pixmap_to_data
class ParenPosition:
def __init__(self, block, pos, paren):
self.block = block
self.pos = pos
self.paren = paren
self.highlight = False
def set_highlight(self, to_what):
self.highlight = to_what
class TemplateHighlighter(QSyntaxHighlighter):
Config = {}
Rules = []
Formats = {}
BN_FACTOR = 1000
KEYWORDS = ["program"]
def __init__(self, parent=None):
super(TemplateHighlighter, self).__init__(parent)
self.initializeFormats()
TemplateHighlighter.Rules.append((QRegExp(
"|".join([r"\b%s\b" % keyword for keyword in self.KEYWORDS])),
"keyword"))
TemplateHighlighter.Rules.append((QRegExp(
"|".join([r"\b%s\b" % builtin for builtin in
formatter_functions().get_builtins()])),
"builtin"))
TemplateHighlighter.Rules.append((QRegExp(
r"\b[+-]?[0-9]+[lL]?\b"
r"|\b[+-]?0[xX][0-9A-Fa-f]+[lL]?\b"
r"|\b[+-]?[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?\b"),
"number"))
stringRe = QRegExp(r"""(?:[^:]'[^']*'|"[^"]*")""")
stringRe.setMinimal(True)
TemplateHighlighter.Rules.append((stringRe, "string"))
lparenRe = QRegExp(r'\(')
lparenRe.setMinimal(True)
TemplateHighlighter.Rules.append((lparenRe, "lparen"))
rparenRe = QRegExp(r'\)')
rparenRe.setMinimal(True)
TemplateHighlighter.Rules.append((rparenRe, "rparen"))
self.regenerate_paren_positions()
self.highlighted_paren = False
def initializeFormats(self):
Config = self.Config
Config["fontfamily"] = "monospace"
#Config["fontsize"] = 10
for name, color, bold, italic in (
("normal", "#000000", False, False),
("keyword", "#000080", True, False),
("builtin", "#0000A0", False, False),
("comment", "#007F00", False, True),
("string", "#808000", False, False),
("number", "#924900", False, False),
("lparen", "#000000", True, True),
("rparen", "#000000", True, True)):
Config["%sfontcolor" % name] = color
Config["%sfontbold" % name] = bold
Config["%sfontitalic" % name] = italic
baseFormat = QTextCharFormat()
baseFormat.setFontFamily(Config["fontfamily"])
#baseFormat.setFontPointSize(Config["fontsize"])
for name in ("normal", "keyword", "builtin", "comment",
"string", "number", "lparen", "rparen"):
format = QTextCharFormat(baseFormat)
format.setForeground(QColor(Config["%sfontcolor" % name]))
if Config["%sfontbold" % name]:
format.setFontWeight(QFont.Bold)
format.setFontItalic(Config["%sfontitalic" % name])
self.Formats[name] = format
def find_paren(self, bn, pos):
dex = bn * self.BN_FACTOR + pos
return self.paren_pos_map.get(dex, None)
def highlightBlock(self, text):
bn = self.currentBlock().blockNumber()
textLength = text.length()
self.setFormat(0, textLength, self.Formats["normal"])
if text.isEmpty():
pass
elif text[0] == "#":
self.setFormat(0, text.length(), self.Formats["comment"])
return
for regex, format in TemplateHighlighter.Rules:
i = regex.indexIn(text)
while i >= 0:
length = regex.matchedLength()
if format in ['lparen', 'rparen']:
pp = self.find_paren(bn, i)
if pp and pp.highlight:
self.setFormat(i, length, self.Formats[format])
else:
self.setFormat(i, length, self.Formats[format])
i = regex.indexIn(text, i + length)
if self.generate_paren_positions:
t = unicode(text)
i = 0
foundQuote = False
while i < len(t):
c = t[i]
if c == ':':
# Deal with the funky syntax of template program mode.
# This won't work if there are more than one template
# expression in the document.
if not foundQuote and i+1 < len(t) and t[i+1] == "'":
i += 2
elif c in ["'", '"']:
foundQuote = True
i += 1
j = t[i:].find(c)
if j < 0:
i = len(t)
else:
i = i + j
elif c in ['(', ')']:
pp = ParenPosition(bn, i, c)
self.paren_positions.append(pp)
self.paren_pos_map[bn*self.BN_FACTOR+i] = pp
i += 1
def rehighlight(self):
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
QSyntaxHighlighter.rehighlight(self)
QApplication.restoreOverrideCursor()
def check_cursor_pos(self, chr, block, pos_in_block):
found_pp = -1
for i, pp in enumerate(self.paren_positions):
pp.set_highlight(False)
if pp.block == block and pp.pos == pos_in_block:
found_pp = i
if chr not in ['(', ')']:
if self.highlighted_paren:
self.rehighlight()
self.highlighted_paren = False
return
if found_pp >= 0:
stack = 0
if chr == '(':
list = self.paren_positions[found_pp+1:]
else:
list = reversed(self.paren_positions[0:found_pp])
for pp in list:
if pp.paren == chr:
stack += 1
elif stack:
stack -= 1
else:
pp.set_highlight(True)
self.paren_positions[found_pp].set_highlight(True)
break
self.highlighted_paren = True
self.rehighlight()
def regenerate_paren_positions(self):
self.generate_paren_positions = True
self.paren_positions = []
self.paren_pos_map = {}
self.rehighlight()
self.generate_paren_positions = False
class TemplateDialog(QDialog, Ui_TemplateDialog):
def __init__(self, parent, text, mi=None, fm=None, color_field=None,
icon_field_key=None, icon_rule_kind=None):
QDialog.__init__(self, parent)
Ui_TemplateDialog.__init__(self)
self.setupUi(self)
self.coloring = color_field is not None
self.iconing = icon_field_key is not None
cols = []
if fm is not None:
for key in sorted(displayable_columns(fm),
key=lambda(k): sort_key(fm[k]['name']) if k != color_row_key else 0):
if key == color_row_key and not self.coloring:
continue
from calibre.gui2.preferences.coloring import all_columns_string
name = all_columns_string if key == color_row_key else fm[key]['name']
if name:
cols.append((name, key))
self.color_layout.setVisible(False)
self.icon_layout.setVisible(False)
if self.coloring:
self.color_layout.setVisible(True)
for n1, k1 in cols:
self.colored_field.addItem(n1, k1)
self.colored_field.setCurrentIndex(self.colored_field.findData(color_field))
colors = QColor.colorNames()
colors.sort()
self.color_name.addItems(colors)
elif self.iconing:
self.icon_layout.setVisible(True)
for n1, k1 in cols:
self.icon_field.addItem(n1, k1)
self.icon_file_names = []
d = os.path.join(config_dir, 'cc_icons')
if os.path.exists(d):
for icon_file in os.listdir(d):
icon_file = icu_lower(icon_file)
if os.path.exists(os.path.join(d, icon_file)):
if icon_file.endswith('.png'):
self.icon_file_names.append(icon_file)
self.icon_file_names.sort(key=sort_key)
self.update_filename_box()
self.icon_with_text.setChecked(True)
if icon_rule_kind == 'icon_only':
self.icon_without_text.setChecked(True)
self.icon_field.setCurrentIndex(self.icon_field.findData(icon_field_key))
if mi:
self.mi = mi
else:
self.mi = Metadata(_('Title'), [_('Author')])
self.mi.author_sort = _('Author Sort')
self.mi.series = _('Series')
self.mi.series_index = 3
self.mi.rating = 4.0
self.mi.tags = [_('Tag 1'), _('Tag 2')]
self.mi.languages = ['eng']
if fm is not None:
self.mi.set_all_user_metadata(fm.custom_field_metadata())
# Remove help icon on title bar
icon = self.windowIcon()
self.setWindowFlags(self.windowFlags()&(~Qt.WindowContextHelpButtonHint))
self.setWindowIcon(icon)
self.last_text = ''
self.highlighter = TemplateHighlighter(self.textbox.document())
self.textbox.cursorPositionChanged.connect(self.text_cursor_changed)
self.textbox.textChanged.connect(self.textbox_changed)
self.textbox.setTabStopWidth(10)
self.source_code.setTabStopWidth(10)
self.documentation.setReadOnly(True)
self.source_code.setReadOnly(True)
if text is not None:
self.textbox.setPlainText(text)
self.buttonBox.button(QDialogButtonBox.Ok).setText(_('&OK'))
self.buttonBox.button(QDialogButtonBox.Cancel).setText(_('&Cancel'))
self.color_copy_button.clicked.connect(self.color_to_clipboard)
self.filename_button.clicked.connect(self.filename_button_clicked)
self.icon_copy_button.clicked.connect(self.icon_to_clipboard)
try:
with open(P('template-functions.json'), 'rb') as f:
self.builtin_source_dict = json.load(f, encoding='utf-8')
except:
self.builtin_source_dict = {}
self.funcs = formatter_functions().get_functions()
self.builtins = formatter_functions().get_builtins()
func_names = sorted(self.funcs)
self.function.clear()
self.function.addItem('')
self.function.addItems(func_names)
self.function.setCurrentIndex(0)
self.function.currentIndexChanged[str].connect(self.function_changed)
self.textbox_changed()
self.rule = (None, '')
tt = _('Template language tutorial')
self.template_tutorial.setText(
'<a href="http://manual.calibre-ebook.com/template_lang.html">'
'%s</a>'%tt)
tt = _('Template function reference')
self.template_func_reference.setText(
'<a href="http://manual.calibre-ebook.com/template_ref.html">'
'%s</a>'%tt)
def filename_button_clicked(self):
try:
path = choose_files(self, 'choose_category_icon',
_('Select Icon'), filters=[
('Images', ['png', 'gif', 'jpg', 'jpeg'])],
all_files=False, select_only_single_file=True)
if path:
icon_path = path[0]
icon_name = sanitize_file_name_unicode(
os.path.splitext(
os.path.basename(icon_path))[0]+'.png')
if icon_name not in self.icon_file_names:
self.icon_file_names.append(icon_name)
self.update_filename_box()
try:
p = QIcon(icon_path).pixmap(QSize(128, 128))
d = os.path.join(config_dir, 'cc_icons')
if not os.path.exists(os.path.join(d, icon_name)):
if not os.path.exists(d):
os.makedirs(d)
with open(os.path.join(d, icon_name), 'wb') as f:
f.write(pixmap_to_data(p, format='PNG'))
except:
traceback.print_exc()
self.icon_files.setCurrentIndex(self.icon_files.findText(icon_name))
self.icon_files.adjustSize()
except:
traceback.print_exc()
return
def update_filename_box(self):
self.icon_files.clear()
self.icon_file_names.sort(key=sort_key)
self.icon_files.addItem('')
self.icon_files.addItems(self.icon_file_names)
for i,filename in enumerate(self.icon_file_names):
icon = QIcon(os.path.join(config_dir, 'cc_icons', filename))
self.icon_files.setItemIcon(i+1, icon)
def color_to_clipboard(self):
app = QApplication.instance()
c = app.clipboard()
c.setText(unicode(self.color_name.currentText()))
def icon_to_clipboard(self):
app = QApplication.instance()
c = app.clipboard()
c.setText(unicode(self.icon_files.currentText()))
def textbox_changed(self):
cur_text = unicode(self.textbox.toPlainText())
if self.last_text != cur_text:
self.last_text = cur_text
self.highlighter.regenerate_paren_positions()
self.text_cursor_changed()
self.template_value.setText(
SafeFormat().safe_format(cur_text, self.mi,
_('EXCEPTION: '), self.mi))
def text_cursor_changed(self):
cursor = self.textbox.textCursor()
position = cursor.position()
t = unicode(self.textbox.toPlainText())
if position > 0 and position <= len(t):
block_number = cursor.blockNumber()
pos_in_block = cursor.positionInBlock() - 1
self.highlighter.check_cursor_pos(t[position-1], block_number,
pos_in_block)
def function_changed(self, toWhat):
name = unicode(toWhat)
self.source_code.clear()
self.documentation.clear()
if name in self.funcs:
self.documentation.setPlainText(self.funcs[name].doc)
if name in self.builtins and name in self.builtin_source_dict:
self.source_code.setPlainText(self.builtin_source_dict[name])
else:
self.source_code.setPlainText(self.funcs[name].program_text)
def accept(self):
txt = unicode(self.textbox.toPlainText()).rstrip()
if self.coloring:
if self.colored_field.currentIndex() == -1:
error_dialog(self, _('No column chosen'),
_('You must specify a column to be colored'), show=True)
return
if not txt:
error_dialog(self, _('No template provided'),
_('The template box cannot be empty'), show=True)
return
self.rule = (unicode(self.colored_field.itemData(
self.colored_field.currentIndex()).toString()), txt)
elif self.iconing:
rt = 'icon' if self.icon_with_text.isChecked() else 'icon_only'
self.rule = (rt,
unicode(self.icon_field.itemData(
self.icon_field.currentIndex()).toString()),
txt)
else:
self.rule = ('', txt)
QDialog.accept(self)
|
pra85/calibre
|
src/calibre/gui2/dialogs/template_dialog.py
|
Python
|
gpl-3.0
| 16,701
|
# -*- coding: utf-8 -*-
"""Markov Decision Process (MDP) Toolbox: ``example`` module
=========================================================
The ``example`` module provides functions to generate valid MDP transition and
reward matrices.
Available functions
-------------------
:func:`~mdptoolbox.example.forest`
A simple forest management example
:func:`~mdptoolbox.example.rand`
A random example
:func:`~mdptoolbox.example.small`
A very small example
"""
# Copyright (c) 2011-2014 Steven A. W. Cordwell
# Copyright (c) 2009 INRA
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as _np
import scipy.sparse as _sp
def forest(S=3, r1=4, r2=2, p=0.1, is_sparse=False):
"""Generate a MDP example based on a simple forest management scenario.
This function is used to generate a transition probability
(``A`` × ``S`` × ``S``) array ``P`` and a reward (``S`` × ``A``) matrix
``R`` that model the following problem. A forest is managed by two actions:
'Wait' and 'Cut'. An action is decided each year with first the objective
to maintain an old forest for wildlife and second to make money selling cut
wood. Each year there is a probability ``p`` that a fire burns the forest.
Here is how the problem is modelled.
Let {0, 1 . . . ``S``-1 } be the states of the forest, with ``S``-1 being
the oldest. Let 'Wait' be action 0 and 'Cut' be action 1.
After a fire, the forest is in the youngest state, that is state 0.
The transition matrix ``P`` of the problem can then be defined as follows::
| p 1-p 0.......0 |
| . 0 1-p 0....0 |
P[0,:,:] = | . . 0 . |
| . . . |
| . . 1-p |
| p 0 0....0 1-p |
| 1 0..........0 |
| . . . |
P[1,:,:] = | . . . |
| . . . |
| . . . |
| 1 0..........0 |
The reward matrix R is defined as follows::
| 0 |
| . |
R[:,0] = | . |
| . |
| 0 |
| r1 |
| 0 |
| 1 |
R[:,1] = | . |
| . |
| 1 |
| r2 |
Parameters
---------
S : int, optional
The number of states, which should be an integer greater than 1.
Default: 3.
r1 : float, optional
The reward when the forest is in its oldest state and action 'Wait' is
performed. Default: 4.
r2 : float, optional
The reward when the forest is in its oldest state and action 'Cut' is
performed. Default: 2.
p : float, optional
The probability of wild fire occurence, in the range ]0, 1[. Default:
0.1.
is_sparse : bool, optional
If True, then the probability transition matrices will be returned in
sparse format, otherwise they will be in dense format. Default: False.
Returns
-------
out : tuple
``out[0]`` contains the transition probability matrix P and ``out[1]``
contains the reward matrix R. If ``is_sparse=False`` then P is a numpy
array with a shape of ``(A, S, S)`` and R is a numpy array with a shape
of ``(S, A)``. If ``is_sparse=True`` then P is a tuple of length ``A``
where each ``P[a]`` is a scipy sparse CSR format matrix of shape
``(S, S)``; R remains the same as in the case of ``is_sparse=False``.
Examples
--------
>>> import mdptoolbox.example
>>> P, R = mdptoolbox.example.forest()
>>> P
array([[[ 0.1, 0.9, 0. ],
[ 0.1, 0. , 0.9],
[ 0.1, 0. , 0.9]],
<BLANKLINE>
[[ 1. , 0. , 0. ],
[ 1. , 0. , 0. ],
[ 1. , 0. , 0. ]]])
>>> R
array([[ 0., 0.],
[ 0., 1.],
[ 4., 2.]])
>>> Psp, Rsp = mdptoolbox.example.forest(is_sparse=True)
>>> len(Psp)
2
>>> Psp[0]
<3x3 sparse matrix of type '<... 'numpy.float64'>'
with 6 stored elements in Compressed Sparse Row format>
>>> Psp[1]
<3x3 sparse matrix of type '<... 'numpy.int64'>'
with 3 stored elements in Compressed Sparse Row format>
>>> Rsp
array([[ 0., 0.],
[ 0., 1.],
[ 4., 2.]])
>>> (Psp[0].todense() == P[0]).all()
True
>>> (Rsp == R).all()
True
"""
assert S > 1, "The number of states S must be greater than 1."
assert (r1 > 0) and (r2 > 0), "The rewards must be non-negative."
assert 0 <= p <= 1, "The probability p must be in [0; 1]."
# Definition of Transition matrix
if is_sparse:
P = []
rows = list(range(S)) * 2
cols = [0] * S + list(range(1, S)) + [S - 1]
vals = [p] * S + [1-p] * S
P.append(_sp.coo_matrix((vals, (rows, cols)), shape=(S, S)).tocsr())
rows = list(range(S))
cols = [0] * S
vals = [1] * S
P.append(_sp.coo_matrix((vals, (rows, cols)), shape=(S, S)).tocsr())
else:
P = _np.zeros((2, S, S))
P[0, :, :] = (1 - p) * _np.diag(_np.ones(S - 1), 1)
P[0, :, 0] = p
P[0, S - 1, S - 1] = (1 - p)
P[1, :, :] = _np.zeros((S, S))
P[1, :, 0] = 1
# Definition of Reward matrix
R = _np.zeros((S, 2))
R[S - 1, 0] = r1
R[:, 1] = _np.ones(S)
R[0, 1] = 0
R[S - 1, 1] = r2
return(P, R)
def _randDense(states, actions, mask):
"""Generate random dense ``P`` and ``R``. See ``rand`` for details.
"""
# definition of transition matrix : square stochastic matrix
P = _np.zeros((actions, states, states))
# definition of reward matrix (values between -1 and +1)
R = _np.zeros((actions, states, states))
for action in range(actions):
for state in range(states):
# create our own random mask if there is no user supplied one
if mask is None:
m = _np.random.random(states)
r = _np.random.random()
m[m <= r] = 0
m[m > r] = 1
elif mask.shape == (actions, states, states):
m = mask[action][state] # mask[action, state, :]
else:
m = mask[state]
# Make sure that there is atleast one transition in each state
if m.sum() == 0:
m[_np.random.randint(0, states)] = 1
P[action][state] = m * _np.random.random(states)
P[action][state] = P[action][state] / P[action][state].sum()
R[action][state] = (m * (2 * _np.random.random(states) -
_np.ones(states, dtype=int)))
return(P, R)
def _randSparse(states, actions, mask):
"""Generate random sparse ``P`` and ``R``. See ``rand`` for details.
"""
# definition of transition matrix : square stochastic matrix
P = [None] * actions
# definition of reward matrix (values between -1 and +1)
R = [None] * actions
for action in range(actions):
# it may be more efficient to implement this by constructing lists
# of rows, columns and values then creating a coo_matrix, but this
# works for now
PP = _sp.dok_matrix((states, states))
RR = _sp.dok_matrix((states, states))
for state in range(states):
if mask is None:
m = _np.random.random(states)
m[m <= 2/3.0] = 0
m[m > 2/3.0] = 1
elif mask.shape == (actions, states, states):
m = mask[action][state] # mask[action, state, :]
else:
m = mask[state]
n = int(m.sum()) # m[state, :]
if n == 0:
m[_np.random.randint(0, states)] = 1
n = 1
# find the columns of the vector that have non-zero elements
nz = m.nonzero()
if len(nz) == 1:
cols = nz[0]
else:
cols = nz[1]
vals = _np.random.random(n)
vals = vals / vals.sum()
reward = 2*_np.random.random(n) - _np.ones(n)
PP[state, cols] = vals
RR[state, cols] = reward
# PP.tocsr() takes the same amount of time as PP.tocoo().tocsr()
# so constructing PP and RR as coo_matrix in the first place is
# probably "better"
P[action] = PP.tocsr()
R[action] = RR.tocsr()
return(P, R)
def rand(S, A, is_sparse=False, mask=None):
"""Generate a random Markov Decision Process.
Parameters
----------
S : int
Number of states (> 1)
A : int
Number of actions (> 1)
is_sparse : bool, optional
False to have matrices in dense format, True to have sparse matrices.
Default: False.
mask : array, optional
Array with 0 and 1 (0 indicates a place for a zero probability), shape
can be ``(S, S)`` or ``(A, S, S)``. Default: random.
Returns
-------
out : tuple
``out[0]`` contains the transition probability matrix P and ``out[1]``
contains the reward matrix R. If ``is_sparse=False`` then P is a numpy
array with a shape of ``(A, S, S)`` and R is a numpy array with a shape
of ``(S, A)``. If ``is_sparse=True`` then P and R are tuples of length
``A``, where each ``P[a]`` is a scipy sparse CSR format matrix of shape
``(S, S)`` and each ``R[a]`` is a scipy sparse csr format matrix of
shape ``(S, 1)``.
Examples
--------
>>> import numpy, mdptoolbox.example
>>> numpy.random.seed(0) # Needed to get the output below
>>> P, R = mdptoolbox.example.rand(4, 3)
>>> P
array([[[ 0.21977283, 0.14889403, 0.30343592, 0.32789723],
[ 1. , 0. , 0. , 0. ],
[ 0. , 0.43718772, 0.54480359, 0.01800869],
[ 0.39766289, 0.39997167, 0.12547318, 0.07689227]],
<BLANKLINE>
[[ 1. , 0. , 0. , 0. ],
[ 0.32261337, 0.15483812, 0.32271303, 0.19983549],
[ 0.33816885, 0.2766999 , 0.12960299, 0.25552826],
[ 0.41299411, 0. , 0.58369957, 0.00330633]],
<BLANKLINE>
[[ 0.32343037, 0.15178596, 0.28733094, 0.23745272],
[ 0.36348538, 0.24483321, 0.16114188, 0.23053953],
[ 1. , 0. , 0. , 0. ],
[ 0. , 0. , 1. , 0. ]]])
>>> R
array([[[-0.23311696, 0.58345008, 0.05778984, 0.13608912],
[-0.07704128, 0. , -0. , 0. ],
[ 0. , 0.22419145, 0.23386799, 0.88749616],
[-0.3691433 , -0.27257846, 0.14039354, -0.12279697]],
<BLANKLINE>
[[-0.77924972, 0. , -0. , -0. ],
[ 0.47852716, -0.92162442, -0.43438607, -0.75960688],
[-0.81211898, 0.15189299, 0.8585924 , -0.3628621 ],
[ 0.35563307, -0. , 0.47038804, 0.92437709]],
<BLANKLINE>
[[-0.4051261 , 0.62759564, -0.20698852, 0.76220639],
[-0.9616136 , -0.39685037, 0.32034707, -0.41984479],
[-0.13716313, 0. , -0. , -0. ],
[ 0. , -0. , 0.55810204, 0. ]]])
>>> numpy.random.seed(0) # Needed to get the output below
>>> Psp, Rsp = mdptoolbox.example.rand(100, 5, is_sparse=True)
>>> len(Psp), len(Rsp)
(5, 5)
>>> Psp[0]
<100x100 sparse matrix of type '<... 'numpy.float64'>'
with 3296 stored elements in Compressed Sparse Row format>
>>> Rsp[0]
<100x100 sparse matrix of type '<... 'numpy.float64'>'
with 3296 stored elements in Compressed Sparse Row format>
>>> # The number of non-zero elements (nnz) in P and R are equal
>>> Psp[1].nnz == Rsp[1].nnz
True
"""
# making sure the states and actions are more than one
assert S > 1, "The number of states S must be greater than 1."
assert A > 1, "The number of actions A must be greater than 1."
# if the user hasn't specified a mask, then we will make a random one now
if mask is not None:
# the mask needs to be SxS or AxSxS
try:
assert mask.shape in ((S, S), (A, S, S)), (
"'mask' must have dimensions S×S or A×S×S."
)
except AttributeError:
raise TypeError("'mask' must be a numpy array or matrix.")
# generate the transition and reward matrices based on S, A and mask
if is_sparse:
P, R = _randSparse(S, A, mask)
else:
P, R = _randDense(S, A, mask)
return(P, R)
def small():
"""A very small Markov decision process.
The probability transition matrices are::
| | 0.5 0.5 | |
| | 0.8 0.2 | |
P = | |
| | 0.0 1.0 | |
| | 0.1 0.9 | |
The reward matrix is::
R = | 5 10 |
| -1 2 |
Returns
=======
out : tuple
``out[0]`` is a numpy array of the probability transition matriices.
``out[1]`` is a numpy arrray of the reward matrix.
Examples
========
>>> import mdptoolbox.example
>>> P, R = mdptoolbox.example.small()
>>> P
array([[[ 0.5, 0.5],
[ 0.8, 0.2]],
<BLANKLINE>
[[ 0. , 1. ],
[ 0.1, 0.9]]])
>>> R
array([[ 5, 10],
[-1, 2]])
"""
P = _np.array([[[0.5, 0.5], [0.8, 0.2]], [[0, 1], [0.1, 0.9]]])
R = _np.array([[5, 10], [-1, 2]])
return(P, R)
|
McCabeJM/pymdptoolbox
|
src/mdptoolbox/example.py
|
Python
|
bsd-3-clause
| 15,234
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.core.urlresolvers import reverse
from openbudgets.apps.pages import factories
class PageTestCase(TestCase):
"""Tests for pages.Page objects and their related views. urls, etc."""
def setUp(self):
self.page = factories.Page.create()
def test_page_detailview(self):
detailview = reverse('page', args=(self.page.slug,))
response = self.client.get(detailview)
self.assertEqual(response.status_code, 200)
self.assertTrue('page' in response.context)
def test_404_page(self):
"""Does the 404 page work?"""
response = self.client.get('/this-page-can-not-possibly-exist-here/')
self.assertEqual(response.status_code, 404)
|
shaib/openbudgets
|
openbudgets/apps/pages/tests.py
|
Python
|
bsd-3-clause
| 771
|
#coding=utf-8
import time
import logging
import os
class BasePage():
'''页面基础类'''
# 初始化
def __init__(self,selenium_driver,base_url):
self.driver = selenium_driver
self.base_url = base_url
#self.pagetitle = pagetitle
self.timeout = 10
# 打开不同的子页面
def _open(self, url):
#print("The url is %s" % url)
self.driver.get(url)
self.driver.maximize_window()
time.sleep(2)
#assert self.driver.current_url == url, 'Did not load on %s' % url
def open(self):
self._open(self.base_url)
def implicitly_wait(self,time): #隐性等待时间
self.driver.implicitly_wait(time)
# 元素定位方法封装
def find_element(self, *loc):
return self.driver.find_element(*loc)
def hadle(self):
now_handle = self.driver.current_window_handle #得到当前窗口句柄
def switch(self):
self.driver.switch_to_frame("g_iframe")
#截图方法
def get_windows_img(self):
logger = logging.getLogger(__name__)
file_path = 'D:/screenshots/'
rq = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
screen_name = file_path + rq + '.png'
print(screen_name)
try:
self.driver.get_screenshot_as_file(screen_name)
logger.info("Had take screenshot and save to folder : /screenshots")
except NameError as e:
logger.error("Failed to take screenshot! %s" % e)
|
gtla6224422/atx_script
|
page_object/BasePage.py
|
Python
|
apache-2.0
| 1,539
|
"""Function-like object creating triclinic lattices.
The following lattice creator is defined:
Triclinic
"""
from ase.lattice.bravais import Bravais
import numpy as np
from ase.data import reference_states as _refstate
class TriclinicFactory(Bravais):
"A factory for creating triclinic lattices."
# The name of the crystal structure in ChemicalElements
xtal_name = "triclinic"
# The natural basis vectors of the crystal structure
int_basis = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
basis_factor = 1.0
# Converts the natural basis back to the crystallographic basis
inverse_basis = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
inverse_basis_factor = 1.0
def get_lattice_constant(self):
"Get the lattice constant of an element with triclinic crystal structure."
if _refstate[self.atomicnumber]['symmetry'] != self.xtal_name:
raise ValueError(('Cannot guess the %s lattice constant of'
+ ' an element with crystal structure %s.')
% (self.xtal_name,
_refstate[self.atomicnumber]['symmetry']))
return _refstate[self.atomicnumber].copy()
def make_crystal_basis(self):
"Make the basis matrix for the crystal unit cell and the system unit cell."
lattice = self.latticeconstant
if type(lattice) == type({}):
a = lattice['a']
try:
b = lattice['b']
except KeyError:
b = a * lattice['b/a']
try:
c = lattice['c']
except KeyError:
c = a * lattice['c/a']
alpha = lattice['alpha']
beta = lattice['beta']
gamma = lattice['gamma']
else:
if len(lattice) == 6:
(a,b,c,alpha,beta,gamma) = lattice
else:
raise ValueError, "Improper lattice constants for triclinic crystal."
degree = np.pi / 180.0
cosa = np.cos(alpha*degree)
cosb = np.cos(beta*degree)
sinb = np.sin(beta*degree)
cosg = np.cos(gamma*degree)
sing = np.sin(gamma*degree)
lattice = np.array([[a,0,0],
[b*cosg, b*sing,0],
[c*cosb, c*(cosa-cosb*cosg)/sing,
c*np.sqrt(sinb**2 - ((cosa-cosb*cosg)/sing)**2)]])
self.latticeconstant = lattice
self.miller_basis = lattice
self.crystal_basis = (self.basis_factor *
np.dot(self.int_basis, lattice))
self.basis = np.dot(self.directions, self.crystal_basis)
assert abs(np.dot(lattice[0],lattice[1]) - a*b*cosg) < 1e-5
assert abs(np.dot(lattice[0],lattice[2]) - a*c*cosb) < 1e-5
assert abs(np.dot(lattice[1],lattice[2]) - b*c*cosa) < 1e-5
assert abs(np.dot(lattice[0],lattice[0]) - a*a) < 1e-5
assert abs(np.dot(lattice[1],lattice[1]) - b*b) < 1e-5
assert abs(np.dot(lattice[2],lattice[2]) - c*c) < 1e-5
Triclinic = TriclinicFactory()
|
grhawk/ASE
|
tools/ase/lattice/triclinic.py
|
Python
|
gpl-2.0
| 3,214
|
from flask import Flask, request, redirect, render_template, session, flash
from mysqlconnection import MySQLConnector
import re
app = Flask(__name__)
mysql = MySQLConnector(app, 'emailval')
app.secret_key = 'secret'
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
@app.route('/')
def validation():
return render_template('validation.html')
@app.route('/emails', methods=['POST'])
def email():
if not EMAIL_REGEX.match(request.form['buttonbox']):
flash('invalid emale')
return redirect('/')
else:
flash ('Great Job!');
query = "INSERT INTO email (email,updated_at,created_at) VALUES (:email,NOW(),NOW())"
data = {'email':request.form['buttonbox']}
mysql.query_db(query,data)
query = "SELECT created_at FROM email"
query = "SELECT * FROM email"
email = mysql.query_db(query)
# if len(request.form['buttonbox']) < 1:
# flash('need a proper emale')
return render_template('email.html', email = email)
# @app.route('/emails')
# def show(email_id):
# query = "SELECT * FROM email WHERE id = :specific_id"
# data = {'specific_id': email_id}
# emails = mysql.query_db(query, data)
# return render_template('email.html', email = email)
@app.route('/delete/<id>')
def delete(id):
query = "DELETE FROM email WHERE id = :id"
data = {'id': id}
mysql.query_db(query, data)
flash("The email address ID {} has been deleted".format(id))
query = "SELECT * FROM email"
email = mysql.query_db(query)
return render_template('email.html', email = email)
app.run(debug=True)
|
jiobert/python
|
Smith_Ben/Assignments/email_validation/emai.py
|
Python
|
mit
| 1,552
|
import argparse
import pandas
import os
import numpy as np
import matplotlib.pyplot as plt
import functools
from collections import Counter
import math
import random
def accuracy(decision_vector):
ok = len([s[2] for s in decision_vector if confusion_matrix_label(s) in ['TP', 'TN']])
return float(ok) / len(decision_vector)
def determine_bias(mu0, mu1, weights, J, f=accuracy):
bias = 0
dr = functools.partial(decision_rule, bias=bias)
predictor = np.vectorize(dr)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1, mu0, weights, J)
decision_vector = np.column_stack((D,J,S,S,D))
confusion_matrix, labels = calculate_confusion_matrix(decision_vector)
max_v = 0
max_i = 0
for i, e in enumerate(decision_vector):
if labels[i] in ['FP', 'FN']:
dr = functools.partial(decision_rule, bias=e[3])
predictor = np.vectorize(dr)
D, S, S0, S1 = calculate_decision_vector(predictor, mu1, mu0, weights, J)
dv = np.column_stack((D,J,S,S,D))
confusion_matrix, labels = calculate_confusion_matrix(dv)
v = f(dv)
max_v = max_v if max_v > v else v
max_i = max_i if max_v > v else i
#print('{}/{} - {} | {}'.format(i, len(decision_vector), max_v, decision_vector[max_i][3]))
return decision_vector[max_i][3]
def decision_rule(s, eta1=0, eta0=0, l1=1, l0=0, bias=0):
if s > bias:
if s > eta1:
return 1
else:
return l1
else:
if s < eta0:
return 0
else:
return l0
def confusion_matrix_label(o,i=0):
if o[1] == 1:
return 'TP' if o[i] == 1 else 'FN'
else:
return 'TN' if o[i] == 0 else 'FP'
def calculate_confusion_matrix(decision_vector):
cf_label = np.array(map(confusion_matrix_label, decision_vector))
return Counter(cf_label), cf_label
def calculate_decision_vector(predictor, mu1, mu0, weights, J):
S1 = np.matmul(weights, mu1)
S0 = np.matmul(weights, mu0)
S = S1 - S0
D = predictor(S)
return D, S, S0, S1
def main(args):
weights = pandas.read_table(args.weights, delim_whitespace=True, header=None)
mu0 = pandas.read_table(args.mu0, delim_whitespace=True, header=None)
mu1 = pandas.read_table(args.mu1, delim_whitespace=True, header=None)
J = pandas.read_table(args.outcomes, delim_whitespace=True, header=None)[:len(weights)]
weights = weights.values
mu0 = mu0.values
mu1 = mu1.values
J = J.values
bias = determine_bias(mu0, mu1, weights, J)
print(bias)
def parse_args(parser0):
args = parser.parse_args()
# Check path
return args
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Adjust model')
parser.add_argument('--weights', type=str)
parser.add_argument('--mu0', type=str)
parser.add_argument('--mu1', type=str)
parser.add_argument('--outcomes', type=str)
parser.add_argument('--l1', default=1, type=int)
parser.add_argument('--l0', default=0, type=int)
parser.add_argument('--eta1', default=0., type=float)
parser.add_argument('--eta0', default=0., type=float)
args = parse_args(parser)
main(args)
|
aquemy/HCBR
|
script/biais.py
|
Python
|
mit
| 3,235
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import eventlet
from oslo_config import cfg
from eventlet import wsgi
from st2common import log as logging
from st2common.service_setup import setup as common_setup
from st2common.service_setup import teardown as common_teardown
from st2common.util.wsgi import shutdown_server_kill_pending_requests
from st2stream.signal_handlers import register_stream_signal_handlers
from st2stream.listener import get_listener_if_set
from st2stream import config
config.register_opts()
from st2stream import app
__all__ = [
'main'
]
eventlet.monkey_patch(
os=True,
select=True,
socket=True,
thread=False if '--use-debugger' in sys.argv else True,
time=True)
LOG = logging.getLogger(__name__)
# How much time to give to the request in progress to finish in seconds before killing them
WSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2
def _setup():
common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,
register_signal_handlers=True, register_internal_trigger_types=False,
run_migrations=False)
def _run_server():
host = cfg.CONF.stream.host
port = cfg.CONF.stream.port
LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)
max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS
worker_pool = eventlet.GreenPool(max_pool_size)
sock = eventlet.listen((host, port))
def queue_shutdown(signal_number, stack_frame):
eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,
worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)
# We register a custom SIGINT handler which allows us to kill long running active requests.
# Note: Eventually we will support draining (waiting for short-running requests), but we
# will still want to kill long running stream requests.
register_stream_signal_handlers(handler_func=queue_shutdown)
wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)
return 0
def _teardown():
common_teardown()
def main():
try:
_setup()
return _run_server()
except SystemExit as exit_code:
sys.exit(exit_code)
except KeyboardInterrupt:
listener = get_listener_if_set()
if listener:
listener.shutdown()
except Exception:
LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())
return 1
finally:
_teardown()
|
punalpatel/st2
|
st2stream/st2stream/cmd/api.py
|
Python
|
apache-2.0
| 3,288
|
#!/usr/bin/env python
import sys
import re
import getopt
from feed_maker_util import IO
def main():
state = 0
url_prefix = "https://spring.io"
result_list = []
num_of_recent_feeds = 1000
optlist, _ = getopt.getopt(sys.argv[1:], "n:")
for o, a in optlist:
if o == '-n':
num_of_recent_feeds = int(a)
for line in IO.read_stdin_as_line_list():
if state == 0:
if re.search(r'<h2 class="blog--title">', line):
state = 1
elif state == 1:
m = re.search(r'<a href="(?P<link>[^"]+)">(?P<title>.*)</a>', line)
if m:
link = url_prefix + m.group("link")
title = m.group("title")
if re.search(r'([Bb]ootiful ([Pp]odcast|GCP)|[Aa]vailable|[Rr]eleased|(\d+\.\d+\.\d+(.| )(M\d+|RC\d+|RELEASE)\)?$)|This [Ww]eek|now GA|goes (GA|RC\d+)|is out|SpringOne2GX|[Ww]ebinar|SR\d)', title):
state = 0
continue
title = re.sub(r'&', '&', title)
state = 2
elif state == 2:
m = re.search(r'<time class=("|\')date("|\')[^>]*datetime="(?P<date>20\d+-\d+-\d+) ', line)
if m:
date = m.group("date")
title = date + " " + title
result_list.append((link, title))
state = 0
for (link, title) in result_list[:num_of_recent_feeds]:
print("%s\t%s" % (link, title))
if __name__ == "__main__":
sys.exit(main())
|
terzeron/FeedMakerApplications
|
study/_spring/capture_item_link_title.py
|
Python
|
gpl-2.0
| 1,528
|
from __future__ import division, print_function
"""
Scripts to automate running the DEIMOS spec1d pipeline (in IDL)
"""
import os
import sys
import time
invoke_spec2d_templ = "et_domask,'{planfn}'\nexit\n"
def invoke_spec2d(path, maskname):
"""
Runs spec2d in the given path, assuming there's a {maskname}.plan file
Note that you have to manually close the returned proc.stdout!
"""
import subprocess
planfn = os.path.abspath(os.path.join(path, maskname + '.plan'))
logfn = os.path.abspath(os.path.join(path, maskname + '.log'))
if not os.path.isdir(path):
raise IOError('"{0}" is not a directory!'.format(path))
if not os.path.isfile(planfn):
raise IOError('Plan file "{0}" does not exist!'.format(planfn))
logf = open(logfn, 'w')
proc = subprocess.Popen('idl', cwd=path, stdin=subprocess.PIPE,
stdout=logf, stderr=subprocess.STDOUT)
proc.stdin.write(invoke_spec2d_templ.format(**locals()))
# proc = subprocess.Popen('ls', cwd=path, stdin=None,
# stdout=logf, stderr=subprocess.STDOUT)
proc.maskname = maskname
return proc
def try_finish_spec2d(proc):
if proc.poll() is None:
return False
else:
if proc.returncode != 0:
print('The process for plan file "{0}" returned {1}... '
'possible problem? Check logs.'.format(proc.maskname, proc.returncode))
if proc.stdout is not None and not proc.stdout.closed:
proc.stdout.close()
if proc.stderr is not None and not proc.stderr.closed:
proc.stderr.close()
return True
def find_unfinished_planfiles(msknames):
planfiles = []
for nm in msknames:
if os.path.isfile(nm):
planfiles.append(nm)
elif os.path.isdir(nm):
path, name = os.path.split(nm)
if name == '':
nm = path
path, name = os.path.split(nm)
planfiles.append(os.path.join(path, name, name + '.plan'))
for i, pf in reversed(list(enumerate(planfiles))):
path, name = os.path.split(pf)
if os.path.isfile(os.path.join(path, 'doneprocessing.txt')):
print("doneprocessing was found for", name, 'skipping!')
del planfiles[i]
return planfiles
def scatter_spec2ds(planpaths, maxtorun=2, waittime=1, verbose=True):
"""
`planpaths` is list of planfiles
`maxtorun` is the number of simultaneous processes to run
`waittime` is the time in sec to wait between polling
"""
procsdone = []
procsrunning = []
toinvoke = []
for plp in planpaths:
if plp.endswith('.plan'):
plp = plp[:-5]
path, name = os.path.split(plp)
toinvoke.append((path, name))
sleepsdone = 0
while len(toinvoke) > 0 or len(procsrunning) > 0:
#first check if any are running that have finished
for i, p in reversed(list(enumerate(procsrunning))):
if try_finish_spec2d(p): # True -> proc done
if verbose:
print('\nFinished spec2d for', p.maskname)
del procsrunning[i]
procsdone.append(p)
sleepsdone = 0
#now try to invoke any that remain to be invoked
rem_from_toinvoke = []
for i, (path, name) in enumerate(toinvoke):
if len(procsrunning) < maxtorun:
if verbose:
print('\nInvoking spec2d for', path, name)
procsrunning.append(invoke_spec2d(path, name))
rem_from_toinvoke.append(i)
sleepsdone = 0
for i in reversed(sorted(rem_from_toinvoke)):
del toinvoke[i]
if verbose:
sys.stdout.write('Sleeping for {0} sec\r'.format(waittime*sleepsdone))
sys.stdout.flush()
time.sleep(waittime)
sleepsdone += 1
return dict([(p.maskname, p) for p in procsdone])
|
eteq/erikutils
|
erikutils/spec2d_runner.py
|
Python
|
mit
| 3,998
|
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import timesince, floatformat
from horizon import tables
from horizon.templatetags.sizeformat import mbformat
class CSVSummary(tables.LinkAction):
name = "csv_summary"
verbose_name = _("Download CSV Summary")
classes = ("btn-download",)
iconfont = "iconfont icon-folderadd media-object"
card = "card card-blue"
def get_link_url(self, usage=None):
return self.table.kwargs['usage'].csv_link()
class BaseUsageTable(tables.DataTable):
vcpus = tables.Column('vcpus', verbose_name=_("VCPUs"))
disk = tables.Column('local_gb', verbose_name=_("Disk"))
memory = tables.Column('memory_mb',
verbose_name=_("RAM"),
filters=(mbformat,),
attrs={"data-type": "size"})
hours = tables.Column('vcpu_hours', verbose_name=_("VCPU Hours"),
filters=(lambda v: floatformat(v, 2),))
class GlobalUsageTable(BaseUsageTable):
tenant = tables.Column('tenant_name', verbose_name=_("Project Name"))
disk_hours = tables.Column('disk_gb_hours',
verbose_name=_("Disk GB Hours"),
filters=(lambda v: floatformat(v, 2),))
def get_object_id(self, datum):
return datum.tenant_id
class Meta:
name = "global_usage"
verbose_name = _("Usage Summary")
columns = ("tenant", "vcpus", "disk", "memory",
"hours", "disk_hours")
table_actions = (CSVSummary,)
multi_select = False
def get_instance_link(datum):
view = "horizon:project:instances:detail"
if datum.get('instance_id', False):
return urlresolvers.reverse(view, args=(datum.get('instance_id'),))
else:
return None
class TenantUsageTable(BaseUsageTable):
instance = tables.Column('name',
verbose_name=_("Instance Name"),
link=get_instance_link)
uptime = tables.Column('uptime_at',
verbose_name=_("Uptime"),
filters=(timesince,))
def get_object_id(self, datum):
return datum.get('instance_id', id(datum))
class Meta:
name = "tenant_usage"
verbose_name = _("Usage Summary")
columns = ("instance", "vcpus", "disk", "memory", "uptime")
table_actions = ()
multi_select = False
|
MKTCloud/MKTCloud
|
openstack_dashboard/usage/tables.py
|
Python
|
apache-2.0
| 2,531
|
# Copyright (c) 2011-2015 Rackspace US, Inc.
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Incubator utilities for :mod:`simpl.rest`."""
import traceback
import bottle
import six # pylint: disable=wrong-import-order
import voluptuous as volup # pylint: disable=wrong-import-order
from simpl import rest as simpl_rest
class MultiValidationError(Exception):
"""Basically a re-imagining of a `voluptuous.MultipleInvalid` error.
Reformats multiple errors messages for easy debugging of invalid
Checkmatefiles.
"""
def __init__(self, errors):
"""MultiValidationError constructor.
:param errors:
List of `voluptuous.Invalid` or `voluptuous.MultipleInvalid`
exception objects.
"""
self.errors = errors
self.message = self._generate_message()
def __str__(self):
"""Just return the pre-computed message.
See :meth:`_generate_message`.
"""
return self.message
def __repr__(self):
"""Simple representation of the exception, with the full message."""
indented_message = '\n'.join(
sorted('\t' + x for x in self.message.split('\n'))
)
return (
'%(cls_name)s(\n%(message)s\n)'
% dict(cls_name=self.__class__.__name__, message=indented_message)
)
def _generate_message(self):
"""Reformat `path` attributes of each `error` and create a new message.
Join `path` attributes together in a more readable way, to enable easy
debugging of an invalid Checkmatefile.
:returns:
Reformatted error paths and messages, as a multi-line string.
"""
reformatted_paths = (
''.join(
"[%s]" % str(x)
# If it's not a string, don't put quotes around it. We do this,
# for example, when the value is an int, in the case of a list
# index.
if isinstance(x, six.integer_types)
# Otherwise, assume the path node is a string and put quotes
# around the key name, as if we were drilling down into a
# nested dict.
else "['%s']" % str(x)
for x in error.path
)
for error in self.errors
)
messages = (error.msg for error in self.errors)
# combine each path with its message:
zipped = zip(reformatted_paths, messages)
combined_messages = (
'%(path)s: %(messages)s' % dict(path=path, messages=message)
for path, message in zipped
)
return '\n'.join(sorted(combined_messages))
def coerce_one(schema=str):
"""Expect the input sequence to contain a single value.
:keyword schema:
Custom schema to apply to the input value. Defaults to just string,
since this is designed for query params.
"""
def validate(val):
"""Unpack a single item from the inputs sequence and run validation.
NOTE(larsbutler): This code is highly opinionated for bottle, since
bottle query params are wrapped in a list, even if there is just a
single value for a given parameter.
"""
[value] = val
return volup.Coerce(schema)(value)
return validate
def coerce_many(schema=str):
"""Expect the input to be a sequence of items which conform to `schema`."""
def validate(val):
"""Apply schema check/version to each item."""
return [volup.Coerce(schema)(x) for x in val]
return validate
def schema(body_schema=None, body_required=False, query_schema=None, # noqa
content_types=None, default_body=None):
"""Decorator to parse and validate API body and query string.
This decorator allows one to define the entire 'schema' for an API
endpoint.
:keyword body_schema:
Callable that accepts raw data and returns the coerced (or unchanged)
body content if it is valid. Otherwise, an error should be raised.
:keyword body_required:
`True` if some body content is required by the request. Defaults to
`False`.
:keyword query_schema:
Callable that accepts raw data and returns the coerced (or unchanged)
query string content if it is valid. Otherwise, an error should be
raised.
:keyword content_types:
List of allowed contents types for request body contents. Defaults to
`['application/json']`.
:keyword default_body:
Default body value to pass to the endpoint handler if `body_required`
is `True` but no body was given. This can be useful for specifying
complex request body defaults.
"""
if not content_types:
content_types = ['application/json']
if not all('json' in t for t in content_types):
raise NotImplementedError("Only 'json' body supported.")
def deco(func):
"""Return a decorated callable."""
def wrapped(*args, **kwargs):
"""Validate/coerce request body and parameters."""
try:
# validate the request body per the schema (if applicable):
try:
body = bottle.request.json
except ValueError as exc:
raise simpl_rest.HTTPError(
body=str(exc),
status=400,
exception=exc,
traceback=traceback.format_exc(),
)
if body is None:
body = default_body
if body_required and not body:
raise simpl_rest.HTTPError(
body='Request body cannot be empty.',
status=400,
)
if body_schema:
try:
body = body_schema(body)
except volup.MultipleInvalid as exc:
raise MultiValidationError(exc.errors)
# validate the query string per the schema (if application):
query = bottle.request.query.dict # pylint: disable=no-member
if query_schema is not None:
try:
query = query_schema(query)
except volup.MultipleInvalid as exc:
raise MultiValidationError(exc.errors)
if not query:
# If the query dict is empty, just set it to None.
query = None
# Conditionally add 'body' or 'schema' to kwargs.
if any([body_schema, body_required, default_body]):
kwargs['body'] = body
if query_schema:
kwargs['query'] = query
return func(
*args,
**kwargs
)
except MultiValidationError as exc:
raise simpl_rest.HTTPError(
body=str(exc),
status=400,
exception=exc,
traceback=traceback.format_exc(),
)
return wrapped
return deco
|
samstav/simpl
|
simpl/incubator/rest.py
|
Python
|
apache-2.0
| 7,797
|
class InitDefaulter():
_init_defaults = {}
def __init__(self, *args, **kwargs):
for name, value in self._init_defaults.items():
if name not in kwargs:
kwargs[name] = value
super().__init__(*args, **kwargs)
|
gitaarik/jazzchords
|
apps/core/helpers/init_defaulter.py
|
Python
|
gpl-3.0
| 262
|
import numpy as np
from .shared import StaticContainerStore, StaticContainer
import mdtraj
from openpathsampling.netcdfplus import WeakLRUCache
variables = ['statics']
lazy = ['statics']
storables = ['statics']
dimensions = ['n_atoms', 'n_spatial']
def netcdfplus_init(store):
static_store = StaticContainerStore()
static_store.set_caching(WeakLRUCache(10000))
name = store.prefix + 'statics'
static_store.set_dimension_prefix_store(store)
store.storage.create_store(name, static_store, False)
store.create_variable(
'statics',
'lazyobj.' + name,
description="the snapshot index (0..n_configuration-1) of "
"snapshot '{idx}'.")
@property
def coordinates(snapshot):
"""
Returns
-------
coordinates: numpy.ndarray, shape=(atoms, 3), dtype=numpy.float32
the atomic coordinates of the configuration. The coordinates are
wrapped in a `simtk.unit.Unit`.
"""
if snapshot.statics is not None:
return snapshot.statics.coordinates
return None
@coordinates.setter
def coordinates(self, value):
if value is not None:
sc = StaticContainer(coordinates=value, box_vectors=self.box_vectors)
else:
sc = None
self.statics = sc
@property
def box_vectors(snapshot):
"""
Returns
-------
box_vectors: numpy.ndarray, shape=(3, 3), dtype=numpy.float32
the box_vectors of the configuration. The coordinates are wrapped in a
simtk.unit.Unit.
"""
if snapshot.statics is not None:
return snapshot.statics.box_vectors
return None
@box_vectors.setter
def box_vectors(self, value):
if value is not None:
sc = StaticContainer(box_vectors=value, coordinates=self.coordinates)
else:
sc = None
self.statics = sc
@property
def md(snapshot):
"""
Returns
-------
md : mdtraj.Trajectory
the actual trajectory object. Can be used with all functions from mdtraj
Notes
-----
Rather slow since the topology has to be made each time. Try to avoid it
"""
if snapshot.statics is not None:
n_atoms = snapshot.coordinates.shape[0]
output = np.zeros([1, n_atoms, 3], np.float32)
output[0, :, :] = snapshot.coordinates
return mdtraj.Trajectory(output, snapshot.topology.mdtraj)
@property
def xyz(snapshot):
"""
Returns
-------
xyz : numpy.ndarray, shape=(atoms, 3), dtype=numpy.float32
atomic coordinates without dimensions. Be careful.
"""
import simtk.unit as u
coord = snapshot.coordinates
if type(coord) is u.Quantity:
return coord._value
else:
return coord
|
jhprinz/openpathsampling
|
openpathsampling/engines/features/statics.py
|
Python
|
lgpl-2.1
| 2,708
|
#source : http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
#!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import fcntl
import struct
import sl_metro
import time
def param(ifname):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
except IOError:
return False
def waiting(ifname):
metro = sl_metro.Metro(0.5)
is_waiting = True
while is_waiting:
if metro.update():
if param(ifname) == False:
print "waiting for IP"
else:
is_waiting = False
print "Got ip is : " , param(ifname)
time.sleep(3)
|
DiamondOhana/jphacks
|
rpi_main/sonilab/get_ip.py
|
Python
|
mit
| 806
|
from enum import Enum
#####################################################################################
# This file is generated by Json2Class (https://github.com/DragonSpawn/Json2Class) #
# Modifications to this file will be lost the next time you run the tool. #
# #
#####################################################################################
class Gender(Enum):
Unspecified = 0
Male = 1
Female = 2
|
HenrikPoulsen/Json2Class
|
test/SampleProjects/PySample/Generated/gender.py
|
Python
|
mit
| 522
|
#!/usr/bin/env python
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2016 NIWA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Write task job files."""
import os
import re
import stat
from cylc.batch_sys_manager import BATCH_SYS_MANAGER
from cylc.cfgspec.globalcfg import GLOBAL_CFG
import cylc.flags
from cylc.task_id import TaskID
from cylc.task_message import TaskMessage
from cylc.task_outputs import (
TASK_OUTPUT_STARTED, TASK_OUTPUT_SUCCEEDED, TASK_OUTPUT_FAILED)
class JobFile(object):
"""Write task job files."""
_INSTANCE = None
@classmethod
def get_inst(cls):
"""Return a unique instance of this class."""
if cls._INSTANCE is None:
cls._INSTANCE = cls()
return cls._INSTANCE
def __init__(self):
self.suite_env = {}
def set_suite_env(self, suite_env):
"""Configure suite environment for all job files."""
self.suite_env.clear()
self.suite_env.update(suite_env)
def write(self, local_job_file_path, job_conf):
"""Write each job script section in turn."""
# ########### !!!!!!!! WARNING !!!!!!!!!!! #####################
# BE EXTREMELY WARY OF CHANGING THE ORDER OF JOB SCRIPT SECTIONS
# Users may be relying on the existing order (see for example
# the comment below on suite bin path being required before
# task runtime environment setup).
# ##############################################################
# Access to cylc must be configured before user environment so
# that cylc commands can be used in defining user environment
# variables: NEXT_CYCLE=$( cylc cycle-point --offset-hours=6 )
handle = open(local_job_file_path, 'wb')
self._write_header(handle, job_conf)
self._write_directives(handle, job_conf)
self._write_prelude(handle, job_conf)
self._write_err_trap(handle, job_conf)
self._write_init_script(handle, job_conf)
self._write_environment_1(handle, job_conf)
self._write_env_script(handle, job_conf)
# suite bin access must be before runtime environment
# because suite bin commands may be used in variable
# assignment expressions: FOO=$(command args).
self._write_suite_bin_access(handle, job_conf)
self._write_environment_2(handle, job_conf)
self._write_task_started(handle, job_conf)
self._write_identity_script(handle, job_conf)
self._write_script(handle, job_conf)
self._write_epilogue(handle, job_conf)
handle.close()
# make it executable
mode = (
os.stat(local_job_file_path).st_mode |
stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
os.chmod(local_job_file_path, mode)
@classmethod
def _write_header(cls, handle, job_conf):
"""Write job script header."""
handle.write("#!" + job_conf['job script shell'])
handle.write("\n#\n# ++++ THIS IS A CYLC TASK JOB SCRIPT ++++")
for prefix, value in [
("# Suite: ", job_conf['suite name']),
("# Task: ", job_conf['task id']),
(BATCH_SYS_MANAGER.LINE_PREFIX_JOB_LOG_DIR,
job_conf['job log dir']),
(BATCH_SYS_MANAGER.LINE_PREFIX_BATCH_SYS_NAME,
job_conf['batch system name']),
(BATCH_SYS_MANAGER.LINE_PREFIX_BATCH_SUBMIT_CMD_TMPL,
job_conf['batch submit command template']),
(BATCH_SYS_MANAGER.LINE_PREFIX_EXECUTION_TIME_LIMIT,
job_conf['execution time limit'])]:
if value:
handle.write("\n%s%s" % (prefix, value))
@classmethod
def _write_directives(cls, handle, job_conf):
"""Job directives."""
lines = BATCH_SYS_MANAGER.format_directives(job_conf)
if lines:
handle.write('\n\n# DIRECTIVES:')
for line in lines:
handle.write('\n' + line)
@classmethod
def _write_prelude(cls, handle, job_conf):
"""Job script prelude."""
if cylc.flags.debug:
if 'bash' in job_conf['job script shell']:
handle.write("\n\nPS4='+[\D{%Y%m%dT%H%M%S%z}]\u@\h '")
handle.write('\n\nset -x')
handle.write('\n\necho "JOB SCRIPT STARTING"')
# set cylc version and source profile scripts before turning on
# error trapping so that profile errors do not abort the job
handle.write('\n\nprelude() {')
keys = GLOBAL_CFG.get_host_item(
'copyable environment variables',
job_conf['host'], job_conf['owner'])
for key in keys + ['CYLC_DIR', 'CYLC_VERSION']:
if key in os.environ:
handle.write("\n export %s='%s'" % (key, os.environ[key]))
handle.write(
r'''
for FILE_NAME in \
"${HOME}/.cylc/job-init-env.sh" \
"${CYLC_DIR}/conf/job-init-env.sh" \
"${CYLC_DIR}/conf/job-init-env-default.sh"
do
if [[ -f "${FILE_NAME}" ]]; then
. "${FILE_NAME}" 1>/dev/null 2>&1
break
fi
done
}
prelude''')
@classmethod
def _write_err_trap(cls, handle, job_conf):
"""Write error trap.
Note that the job script must be bash- and ksh-compatible, hence use of
"typeset" below instead of the more sensible but bash-specific "local".
"""
args = {
"signals_str": " ".join(
BATCH_SYS_MANAGER.get_fail_signals(job_conf)),
"priority": TaskMessage.CRITICAL,
"message1": TASK_OUTPUT_FAILED,
"message2": TaskMessage.FAIL_MESSAGE_PREFIX}
handle.write(r"""
# TRAP ERROR SIGNALS:
set -u # Fail when using an undefined variable
FAIL_SIGNALS='%(signals_str)s'
TRAP_FAIL_SIGNAL() {
typeset SIGNAL=$1
echo "Received signal $SIGNAL" >&2
typeset S=
for S in ${VACATION_SIGNALS:-} $FAIL_SIGNALS; do
trap "" $S
done
if [[ -n "${CYLC_TASK_MESSAGE_STARTED_PID:-}" ]]; then
wait "${CYLC_TASK_MESSAGE_STARTED_PID}" 2>/dev/null || true
fi
cylc task message -p '%(priority)s' "%(message2)s$SIGNAL" '%(message1)s'
exit 1
}
for S in $FAIL_SIGNALS; do
trap "TRAP_FAIL_SIGNAL $S" $S
done
unset S""" % args)
vacation_signal = BATCH_SYS_MANAGER.get_vacation_signal(job_conf)
if vacation_signal:
args = {
"signals_str": vacation_signal,
"priority": TaskMessage.WARNING,
"message": TaskMessage.VACATION_MESSAGE_PREFIX}
handle.write(r"""
# TRAP VACATION SIGNALS:
VACATION_SIGNALS='%(signals_str)s'
TRAP_VACATION_SIGNAL() {
typeset SIGNAL=$1
echo "Received signal $SIGNAL" >&2
typeset S=
for S in $VACATION_SIGNALS $FAIL_SIGNALS; do
trap "" $S
done
if [[ -n "${CYLC_TASK_MESSAGE_STARTED_PID:-}" ]]; then
wait "${CYLC_TASK_MESSAGE_STARTED_PID}" 2>/dev/null || true
fi
cylc task message -p '%(priority)s' "%(message)s$SIGNAL"
exit 1
}
S=
for S in $VACATION_SIGNALS; do
trap "TRAP_VACATION_SIGNAL $S" $S
done
unset S""" % args)
if 'bash' in job_conf['job script shell']:
handle.write("\nset -o pipefail")
@classmethod
def _write_init_script(cls, handle, job_conf):
"""Init-script."""
global_init_script = GLOBAL_CFG.get_host_item(
'global init-script', job_conf["host"], job_conf["owner"])
if global_init_script:
handle.write("\n\n# GLOBAL INIT-SCRIPT:\n")
handle.write(global_init_script)
if not job_conf['init-script']:
return
handle.write("\n\n# INIT-SCRIPT:\n")
handle.write(job_conf['init-script'])
def _write_environment_1(self, handle, job_conf):
"""Suite and task environment."""
handle.write("\n\n# CYLC SUITE ENVIRONMENT:")
# write the static suite variables
for var, val in sorted(self.suite_env.items()):
handle.write("\nexport " + var + "=" + str(val))
if str(self.suite_env.get('CYLC_UTC')) == 'True':
handle.write("\nexport TZ=UTC")
handle.write("\n")
# override and write task-host-specific suite variables
suite_work_dir = GLOBAL_CFG.get_derived_host_item(
job_conf['suite name'], 'suite work directory',
job_conf['host'], job_conf['owner'])
st_env = {}
st_env['CYLC_SUITE_RUN_DIR'] = GLOBAL_CFG.get_derived_host_item(
job_conf['suite name'], 'suite run directory',
job_conf['host'], job_conf['owner'])
st_env['CYLC_SUITE_WORK_DIR'] = suite_work_dir
st_env['CYLC_SUITE_SHARE_DIR'] = GLOBAL_CFG.get_derived_host_item(
job_conf['suite name'], 'suite share directory',
job_conf['host'], job_conf['owner'])
# DEPRECATED
st_env['CYLC_SUITE_SHARE_PATH'] = '$CYLC_SUITE_SHARE_DIR'
rsp = job_conf['remote suite path']
if rsp:
st_env['CYLC_SUITE_DEF_PATH'] = rsp
else:
# replace home dir with '$HOME' for evaluation on the task host
st_env['CYLC_SUITE_DEF_PATH'] = re.sub(
os.environ['HOME'], '$HOME',
self.suite_env['CYLC_SUITE_DEF_PATH_ON_SUITE_HOST'])
for var, val in sorted(st_env.items()):
handle.write("\nexport " + var + "=" + str(val))
task_work_dir = os.path.join(
suite_work_dir, job_conf['work sub-directory'])
use_login_shell = GLOBAL_CFG.get_host_item(
'use login shell', job_conf['host'], job_conf['owner'])
comms = GLOBAL_CFG.get_host_item(
'task communication method', job_conf['host'], job_conf['owner'])
task_name, point_string = TaskID.split(job_conf['task id'])
handle.write("\n\n# CYLC TASK ENVIRONMENT:")
handle.write("\nexport CYLC_TASK_COMMS_METHOD=" + comms)
handle.write("\nexport CYLC_TASK_CYCLE_POINT=" + point_string)
handle.write("\nexport CYLC_TASK_CYCLE_TIME=" + point_string)
handle.write("\nexport CYLC_TASK_ID=" + job_conf['task id'])
handle.write(
"\nexport CYLC_TASK_IS_COLDSTART=" +
str(job_conf['is cold-start']))
handle.write(
"\nexport CYLC_TASK_LOG_ROOT=" + job_conf['job file path'])
handle.write(
"\nexport CYLC_TASK_MSG_MAX_TRIES=" +
str(GLOBAL_CFG.get(['task messaging', 'maximum number of tries'])))
handle.write(
"\nexport CYLC_TASK_MSG_RETRY_INTVL=%f" %
GLOBAL_CFG.get(['task messaging', 'retry interval']))
handle.write(
"\nexport CYLC_TASK_MSG_TIMEOUT=%f" %
GLOBAL_CFG.get(['task messaging', 'connection timeout']))
handle.write("\nexport CYLC_TASK_NAME=" + task_name)
handle.write(
'\nexport CYLC_TASK_NAMESPACE_HIERARCHY="' +
' '.join(job_conf['namespace hierarchy']) + '"')
handle.write(
"\nexport CYLC_TASK_SSH_LOGIN_SHELL=" + str(use_login_shell))
handle.write(
"\nexport CYLC_TASK_SUBMIT_NUMBER=" + str(job_conf['submit num']))
handle.write(
"\nexport CYLC_TASK_TRY_NUMBER=" +
str(job_conf['try number']))
handle.write("\nexport CYLC_TASK_WORK_DIR=" + task_work_dir)
# DEPRECATED
handle.write("\nexport CYLC_TASK_WORK_PATH=$CYLC_TASK_WORK_DIR")
handle.write("\nexport %s=$$" % (TaskMessage.CYLC_JOB_PID))
@classmethod
def _write_env_script(cls, handle, job_conf):
"""Env-script."""
if not job_conf['env-script']:
return
handle.write("\n\n# ENV-SCRIPT:\n")
handle.write(job_conf['env-script'])
@classmethod
def _write_suite_bin_access(cls, handle, _):
"""Suite bin/ directory access."""
handle.write(
"\n\n# ACCESS TO THE SUITE BIN DIRECTORY:" +
"\nexport PATH=$CYLC_SUITE_DEF_PATH/bin:$PATH")
def _write_environment_2(self, handle, job_conf):
"""Run time environment part 2."""
env = job_conf['runtime environment']
if not env:
return
# generate variable assignment expressions
handle.write("\n\n# TASK RUNTIME ENVIRONMENT:")
for var, val in env.items():
handle.write(self._get_var_assign(var, val))
# export them all now (see note)
handle.write("\nexport")
for var in env:
handle.write(" " + var)
@classmethod
def _get_var_assign(cls, var, value):
"""Generate an environment variable assignment expression 'var=value'.
Values are quoted to handle internal spaces, but escape initial tilde
(quoting disables tilde expansion).
"""
value = str(value) # (needed?)
match = re.match(r"^(~[^/\s]*/)(.*)$", value)
if match:
# ~foo/bar or ~/bar
# write as ~foo/"bar" or ~/"bar"
head, tail = match.groups()
expr = '\n%s=%s"%s"' % (var, head, tail)
elif re.match(r"^~[^\s]*$", value):
# plain ~foo or just ~
# just leave unquoted as subsequent spaces don't
# make sense in this case anyway
expr = '\n%s=%s' % (var, value)
else:
# Non tilde values - quote the lot.
# This gets values like "~one ~two" too, but these
# (in variable values) aren't expanded by the shell
# anyway so it doesn't matter.
expr = '\n%s="%s"' % (var, value)
# NOTE ON TILDE EXPANSION:
# The code above handles the following correctly:
# | ~foo/bar
# | ~/bar
# | ~/filename with spaces
# | ~foo
# | ~
# NOTE: the reason for separate export of user-specified
# variables is this: inline export does not activate the
# error trap if sub-expressions fail, e.g. (note typo in
# 'echo' command name):
# export FOO=$( ecko foo ) # error not trapped!
# FOO=$( ecko foo ) # error trapped
return expr
@classmethod
def _write_task_started(cls, handle, _):
"""Script to send start message and create work directory."""
handle.write(r"""
# SEND TASK STARTED MESSAGE:
cylc task message '%(message)s' &
CYLC_TASK_MESSAGE_STARTED_PID=$!
# SHARE DIRECTORY CREATE:
mkdir -p $CYLC_SUITE_SHARE_DIR || true
# WORK DIRECTORY CREATE:
mkdir -p $(dirname $CYLC_TASK_WORK_DIR) || true
mkdir -p $CYLC_TASK_WORK_DIR
cd $CYLC_TASK_WORK_DIR""" % {"message": TASK_OUTPUT_STARTED})
@classmethod
def _write_identity_script(cls, handle, _):
"""Write script for suite and task identity."""
handle.write(r"""
# TASK SELF-IDENTIFY:
echo "cylc Suite and Task Identity:"
echo " Suite Name : $CYLC_SUITE_NAME"
echo " Suite Host : $CYLC_SUITE_HOST"
echo " Suite Port : $CYLC_SUITE_PORT"
echo " Suite Owner : $CYLC_SUITE_OWNER"
echo " Task ID : $CYLC_TASK_ID"
if [[ $(uname) == AIX ]]; then
# on AIX the hostname command has no '-f' option
__TMP_DOMAIN=$(namerslv -sn 2>/dev/null | awk '{print $2}')
echo " Task Host : $(hostname).${__TMP_DOMAIN}"
else
echo " Task Host : $(hostname -f)"
fi
echo " Task Owner : $USER"
echo " Task Submit No.: $CYLC_TASK_SUBMIT_NUMBER"
echo " Task Try No.: $CYLC_TASK_TRY_NUMBER"
echo""")
@classmethod
def _write_script(cls, handle, job_conf):
"""Write pre-script, script, and post-script."""
for prefix in ['pre-', '', 'post-']:
value = job_conf[prefix + 'script']
if value:
handle.write("\n\n# %sSCRIPT:\n%s" % (
prefix.upper(), value))
@classmethod
def _write_epilogue(cls, handle, job_conf):
"""Write epilogue."""
handle.write(r"""
# EMPTY WORK DIRECTORY REMOVE:
cd
rmdir $CYLC_TASK_WORK_DIR 2>/dev/null || true
# SEND TASK SUCCEEDED MESSAGE:
wait "${CYLC_TASK_MESSAGE_STARTED_PID}" 2>/dev/null || true
cylc task message '%(message)s'
echo 'JOB SCRIPT EXITING (TASK SUCCEEDED)'
trap '' EXIT
""" % {"message": TASK_OUTPUT_SUCCEEDED})
handle.write("%s%s\n" % (
BATCH_SYS_MANAGER.LINE_PREFIX_EOF, job_conf['job log dir']))
|
benfitzpatrick/cylc
|
lib/cylc/job_file.py
|
Python
|
gpl-3.0
| 16,967
|
# -*- coding: utf8 -*-
#
# Copyright (C) 2014 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
{
'name': 'Automatic Move Processing',
'version': '0.1',
'author': 'NDP Systèmes',
'maintainer': 'NDP Systèmes',
'category': 'Warehouse',
'depends': ['stock'],
'description': """
Automatic Move Processing
=========================
This modules adds the possibility to have move automatically processed as soon as the products are available in the
move source location.
It also adds the possibility to define the move as being automatic in a procurement rule.
Automatic moves are triggered by previous move when the move is chained or by the scheduler otherwise.
""",
'website': 'http://www.ndp-systemes.fr',
'data': [
'stock_auto_move_view.xml',
],
'demo': [
'stock_auto_move_demo.xml',
],
'test': [],
'installable': True,
'auto_install': False,
'license': 'AGPL-3',
'application': False,
}
|
ndp-systemes/odoo-addons
|
stock_auto_move/__openerp__.py
|
Python
|
agpl-3.0
| 1,664
|
#!/usr/bin/env python
import os,sys,time
definepath=os.getcwd()
sys.path.append("%s/bin/ftsrc/" % (definepath))
import include
try:
import psyco
psyco.full()
except ImportError:
pass
definepath=os.getcwd()
while 1==1:
include.print_banner()
menu=raw_input("""
Microsoft SQL Attack Tools
1. MSSQL Injector
2. MSSQL Bruter
3. SQLPwnage
(q)uit
Enter your choice : """)
if menu == 'q': break
# MSSQL Injector Start
if menu == '1':
sys.path.append("%s/bin/ftsrc/" % (definepath))
try:
try:
reload(sqlinjector)
except Exception: pass
import sqlinjector
except Exception,e:
print e
# Start SQL Bruter
if menu == '2':
sys.path.append("%s/bin/ftsrc/" % (definepath))
try:
try:
reload(sqlbrute)
except Exception: pass
import sqlbrute
except Exception,e:
print e
#SQLPwnage Start
if menu == '3':
sys.path.append("%s/bin/ftsrc/sqlpwnage/" % (definepath))
try:
try:
reload(sqlpwnage)
except Exception: pass
import sqlpwnage
except Exception,e:
print e
# Cancel
if menu == '4':
break
|
goofwear/raspberry_pwn
|
src/pentest/fasttrack/bin/menu/mssqlattacks.py
|
Python
|
gpl-3.0
| 1,202
|
config = {
"interfaces": {
"google.cloud.dataproc.v1beta2.WorkflowTemplateService": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000
}
},
"methods": {
"CreateWorkflowTemplate": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"GetWorkflowTemplate": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"InstantiateWorkflowTemplate": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UpdateWorkflowTemplate": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListWorkflowTemplates": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"DeleteWorkflowTemplate": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
|
jonparrott/google-cloud-python
|
dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py
|
Python
|
apache-2.0
| 2,053
|
import argparse,fileinput,os,sys,subprocess
import os
import random
import cPickle
#caffe_root = '../../caffe/' #PATH TO CAFFE ROOT
#sys.path.insert(0,caffe_root + 'python')
#import caffe
#caffe.set_device(0)
#caffe.set_mode_gpu()
#Parse the arguments
def ParseInputArguments():
parser = argparse.ArgumentParser()
# Parse input arguments
parser.add_argument('net', help = 'path to network architecture')
parser.add_argument('train', help = 'path to training data')
parser.add_argument('test', help = 'path to test data')
parser.add_argument('snapshot_prefix', help = 'prefix of the output network')
parser.add_argument('max_iter', type = int, help = 'total number of iterations')
parser.add_argument('--init', help = 'path to pre-trained model')
parser.add_argument('--init_type', choices = ['fin','res'], help = "fin: for finetuning, res: for resuming training")
parser.add_argument('--base_lr', default = 0.001, type = float, help = 'initial learning rate')
parser.add_argument('--display', default = 20, type = int, help = 'display output every #display iterations')
parser.add_argument('--test_interval', default = 500 , type = int, help = 'test every #test_interval iterations')
parser.add_argument('--snapshot', default = 500, type = int, help = 'produce an output every #snapshot iterations')
parser.add_argument('--type', default = 'SGD', choices = ['SGD','AdaDelta','AdaGrad','Adam','Nesterov','RMSProp'], help = 'back-propagation algorithm')
parser.add_argument('--momentum',default = 0.9, type = float, help = ' weight of the previous update')
parser.add_argument('--lr_policy',default = 'step',choices=['step','fixed','exp','inv','multistep','poly','sigmoid'] ,help = 'learning rate decay policy')
parser.add_argument('--test_iter', default = 75 , type = int, help = 'perform #test_iter iterations when testing')
parser.add_argument('--stepsize', default = 700 , type = int, help = 'reduce learning rate every #stepsize iterations')
parser.add_argument('--gamma', default = 0.1, type = float, help = 'reduce learning rate to an order of #gamma')
parser.add_argument('--weight_decay', default = 0.005, type = float, help = 'regularization term of the neural net')
parser.add_argument('--solver_mode', default = 'CPU', choices = ['CPU','GPU'], help = 'where to run the program')
parser.add_argument('--batch_size', default = 128, type = int, help = 'size of input batch')
#parser.add_argument('--device_id', default = 0, type = int, choices=[0,1], help = '0:for CPU, 1: for GPU')
args = parser.parse_args()
solver = PrintSolverSetup(args)
return args,solver
#Create the solver file .- Solver file works also as a descriptor for the experiment. -Extra information is written as comment
def PrintSolverSetup(args):
mode = 0
solver = args.snapshot_prefix+"_solver.prototxt"
print "Export experiment parameters to solver file:",solver
fsetup = open(args.snapshot_prefix+"_solver.prototxt", 'w')
for arg in vars(args):
if arg is 'type':
if str(getattr(args, arg)) == "AdaGrad":
mode=1
if mode==1 and arg is 'momentum':
continue
if arg in ['init','train','test','init_type','batch_size']:
fsetup.write('#')
if (type(getattr(args, arg)) is str) and arg is not 'solver_mode':
fsetup.write(arg +': "'+ str(getattr(args, arg))+'"\n')
continue
fsetup.write(arg +': '+ str(getattr(args, arg))+'\n')
fsetup.write("test_state: { stage: 'test-on-test' }"+'\n')
fsetup.write("test_initialization: false"+'\n')
fsetup.write("random_seed: 1701")
fsetup.close()
return solver
#Change paths to training and test data to the NETWORK.prototxt file
def ChangeNetworkDataRoots(train,test,ftrain,ftest,batch_size):
for line in fileinput.input(args.net, inplace=True):
tmp = line.split(':')
initstring = tmp[0]
if tmp[0].strip() =='phase':
phase = tmp[1].strip()
if tmp[0].strip() == 'source':
if phase.upper() == 'TRAIN':
print initstring+": \""+ftrain+"\"\n",
else:
print initstring+": \""+ftest+"\"\n",
continue
if tmp[0].strip() =='root_folder':
if phase.upper() == 'TRAIN':
print initstring+": \""+train+'/\"\n',
else:
print initstring+": \""+test+'/\"\n',
continue
if tmp[0].strip() =='batch_size':
print initstring+":"+ str(batch_size)+"\n",
continue
print line,
# Create Source Files
def CreateResourceFiles(snapshot_prefix,train,test):
allLabels = list(set(os.listdir(train)+os.listdir(test)))
allLabels = sorted(allLabels)
StringTrain = []
StringTest = []
# Create Train Source File
fnameTrain = snapshot_prefix + "_TrainSource.txt"
train_file = open(fnameTrain, "w")
for idx,label in enumerate(allLabels):
datadir = "/".join((train,label))
if os.path.exists(datadir):
trainSamples = os.listdir(datadir)
for sample in trainSamples:
StringTrain.append('/'.join((label,sample))+' '+str(idx))
# Create Test Source File
fnameTest = snapshot_prefix + "_TestSource.txt"
test_file = open(fnameTest, "w")
for idx,label in enumerate(allLabels):
datadir = "/".join((test,label))
if os.path.exists(datadir):
testSamples = os.listdir(datadir)
for sample in testSamples:
StringTest.append('/'.join((label,sample))+' '+str(idx))
random.shuffle(StringTrain)
random.shuffle(StringTest)
for s in StringTrain:
train_file.write(s+'\n')
for s in StringTest:
test_file.write(s+'\n')
train_file.close()
test_file.close()
cPickle.dump(allLabels, open(snapshot_prefix + "_classNames", 'wb'))
return fnameTrain,fnameTest
# Modify execution file
def train(solver_prototxt_filename, init, init_type):
for line in fileinput.input('train_net.sh', inplace=True):
if '-solver' in line:
tmp = line.split('-solver')
if init==None:
print tmp[0]+" -solver "+ solver_prototxt_filename
elif init_type == 'fin':
print tmp[0]+" -solver "+ solver_prototxt_filename +" -weights " + init # .caffemodel file requiered for finetuning
elif init_type == 'res':
print tmp[0]+" -solver "+ solver_prototxt_filename +" -snapshot " + init # .solverstate file requiered for resuming training
else:
raise ValueError("No specific init_type defined for pre-trained network "+init)
else:
print line,
os.system("chmod +x train_net.sh")
os.system('./train_net.sh')
if __name__ == "__main__":
args,solver = ParseInputArguments()
ftrain,ftest = CreateResourceFiles(args.snapshot_prefix,args.train,args.test)
ChangeNetworkDataRoots(args.train,args.test,ftrain,ftest,args.batch_size)
train(solver,args.init,args.init_type)
|
MikeMpapa/CNNs-Speech-Music-Discrimination
|
trainCNN.py
|
Python
|
mit
| 7,063
|
"""
@author: Maneesh D
@email: maneeshd77@gmail.com
"""
def is_prime(num):
limit = int(num ** 0.5) + 1
for i in range(2, limit):
if num % i == 0:
return False
return True
def sum_primes(upto_num):
total = 2
for num in range(3, upto_num+1, 2):
if is_prime(num):
total += num
return total
def main():
print("The Sum of prime numbers till 2 million is %d" % sum_primes(2000000))
if __name__ == '__main__':
from timeit import Timer
t = Timer(stmt="main()", setup="from primes_single_thread import main")
print("Execution Time= %.3f Seconds" % t.timeit(number=1))
|
maneeshd/PyTutorial
|
Advanced/Multi-Threading & Processing/threading/primes_single_thread.py
|
Python
|
mit
| 647
|
#%%
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import matplotlib.cm as cm
from tqdm import trange, tqdm
from sklearn.metrics import adjusted_rand_score, f1_score
from argparse import ArgumentParser
from util.config_parser import ConfigParser_with_eval
import warnings
warnings.filterwarnings('ignore')
#%% parse arguments
def arg_check(value, default):
return value if value else default
default_hypparams_model = "hypparams/model.config"
parser = ArgumentParser()
parser.add_argument("--model", help=f"hyper parameters of model, default is [{default_hypparams_model}]")
args = parser.parse_args()
hypparams_model = arg_check(args.model, default_hypparams_model)
#%%
def load_config(filename):
cp = ConfigParser_with_eval()
cp.read(filename)
return cp
#%%
def get_names():
return np.loadtxt("files.txt", dtype=str)
def get_labels(names):
letter_labels = [np.loadtxt("LABEL/" + name + ".lab") for name in names]
word_labels = [np.loadtxt("LABEL/" + name + ".lab2") for name in names]
return letter_labels, word_labels
def get_datas_and_length(names):
datas = [np.loadtxt("DATA/" + name + ".txt") for name in names]
length = [len(d) for d in datas]
return datas, length
def get_results_of_word(names, length):
return _joblib_get_results(names, length, "s")
def get_results_of_letter(names, length):
return _joblib_get_results(names, length, "l")
def get_results_of_duration(names, length):
return _joblib_get_results(names, length, "d")
def _get_results(names, lengths, c):
return [np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, l)) for name, l in zip(names, lengths)]
def _joblib_get_results(names, lengths, c):
from joblib import Parallel, delayed
def _component(name, length, c):
return np.loadtxt("results/" + name + "_" + c + ".txt").reshape((-1, length))
return Parallel(n_jobs=-1)([delayed(_component)(n, l, c) for n, l in zip(names, lengths)])
def _convert_label(truth, predict, N):
converted_label = np.full_like(truth, N)
for true_lab in range(N):
counted = [np.sum(predict[truth == true_lab] == pred) for pred in range(N)]
pred_lab = np.argmax(counted)
converted_label[predict == pred_lab] = true_lab
return converted_label
def calc_f1_score(truth, predict, N, **kwargs):
converted_predict = _convert_label(truth, predict, N)
return f1_score(truth, converted_predict, labels=np.unique(converted_predict), **kwargs )
def _boundary(label):
diff = np.diff(label)
diff[diff!=0] = 1
return np.concatenate((diff, [0]))
def _plot_discreate_sequence(true_data, title, sample_data, cmap=None, cmap2=None, label_cmap=None):
ax = plt.subplot2grid((10, 1), (1, 0))
plt.sca(ax)
if label_cmap is None:
label_cmap = cmap
ax.matshow([true_data], aspect='auto', cmap=label_cmap)
plt.ylabel('Truth Label')
#label matrix
ax = plt.subplot2grid((10, 1), (2, 0), rowspan = 8)
plt.suptitle(title)
plt.sca(ax)
if cmap2 is not None:
cmap = cmap2
ax.matshow(sample_data, aspect='auto', cmap=cmap)
#write x&y label
plt.xlabel('Frame')
plt.ylabel('Iteration')
plt.xticks(())
#%%
Path("figures").mkdir(exist_ok=True)
Path("summary_files").mkdir(exist_ok=True)
#%% config parse
print("Loading model config...")
config_parser = load_config(hypparams_model)
section = config_parser["model"]
word_num = section["word_num"]
letter_num = section["letter_num"]
print("Done!")
#%%
print("Loading results....")
names = get_names()
datas, length = get_datas_and_length(names)
l_labels, w_labels = get_labels(names)
concat_l_l = np.concatenate(l_labels, axis=0)
concat_w_l = np.concatenate(w_labels, axis=0)
l_results = get_results_of_letter(names, length)
w_results = get_results_of_word(names, length)
d_results = get_results_of_duration(names, length)
concat_l_r = np.concatenate(l_results, axis=1)
concat_w_r = np.concatenate(w_results, axis=1)
log_likelihood = np.loadtxt("summary_files/log_likelihood.txt")
resample_times = np.loadtxt("summary_files/resample_times.txt")
print("Done!")
train_iter = l_results[0].shape[0]
#%%
letter_ARI = np.zeros(train_iter)
letter_macro_f1_score = np.zeros(train_iter)
letter_micro_f1_score = np.zeros(train_iter)
word_ARI = np.zeros(train_iter)
word_macro_f1_score = np.zeros(train_iter)
word_micro_f1_score = np.zeros(train_iter)
#%%
lcolors = ListedColormap([cm.tab20(float(i)/letter_num) for i in range(letter_num)])
wcolors = ListedColormap([cm.tab20(float(i)/word_num) for i in range(word_num)])
#%%
print("Plot results...")
for i, name in enumerate(tqdm(names)):
plt.clf()
# _plot_discreate_sequence(l_labels[i], name + "_l", l_results[i], cmap=lcolors)
_plot_discreate_sequence(_boundary(l_labels[i]), name + "_l", l_results[i], cmap=lcolors, label_cmap=cm.binary)
plt.savefig("figures/" + name + "_l.png")
plt.clf()
# _plot_discreate_sequence(w_labels[i], name + "_s", w_results[i], cmap=wcolors)
_plot_discreate_sequence(_boundary(w_labels[i]), name + "_s", w_results[i], cmap=wcolors, label_cmap=cm.binary)
plt.savefig("figures/" + name + "_s.png")
plt.clf()
# _plot_discreate_sequence(w_labels[i], name + "_d", d_results[i], cmap=wcolors, cmap2=cm.binary)
_plot_discreate_sequence(_boundary(w_labels[i]), name + "_d", d_results[i], cmap2=cm.binary, label_cmap=cm.binary)
plt.savefig("figures/" + name + "_d.png")
print("Done!")
#%% calculate ARI
print("Calculating ARI...")
for t in trange(train_iter):
letter_ARI[t] = adjusted_rand_score(concat_l_l, concat_l_r[t])
letter_macro_f1_score[t] = calc_f1_score(concat_l_l, concat_l_r[t], letter_num, average="macro")
letter_micro_f1_score[t] = calc_f1_score(concat_l_l, concat_l_r[t], letter_num, average="micro")
word_ARI[t] = adjusted_rand_score(concat_w_l, concat_w_r[t])
word_macro_f1_score[t] = calc_f1_score(concat_w_l, concat_w_r[t], word_num, average="macro")
word_micro_f1_score[t] = calc_f1_score(concat_w_l, concat_w_r[t], word_num, average="micro")
print("Done!")
#%% plot ARIs.
plt.clf()
plt.title("Letter ARI")
plt.plot(range(train_iter), letter_ARI, ".-")
#%%
plt.title("Word ARI")
plt.clf()
plt.plot(range(train_iter), word_ARI, ".-")
#%%
plt.clf()
plt.title("Log likelihood")
plt.plot(range(train_iter+1), log_likelihood, ".-")
plt.savefig("figures/Log_likelihood.png")
#%%
plt.clf()
plt.title("Resample times")
plt.plot(range(train_iter), resample_times, ".-")
plt.savefig("figures/Resample_times.png")
#%%
np.savetxt("summary_files/Letter_ARI.txt", letter_ARI)
np.savetxt("summary_files/Letter_macro_F1_score.txt", letter_macro_f1_score)
np.savetxt("summary_files/Letter_micro_F1_score.txt", letter_micro_f1_score)
np.savetxt("summary_files/Word_ARI.txt", word_ARI)
np.savetxt("summary_files/Word_macro_F1_score.txt", word_macro_f1_score)
np.savetxt("summary_files/Word_micro_F1_score.txt", word_micro_f1_score)
with open("summary_files/Sum_of_resample_times.txt", "w") as f:
f.write(str(np.sum(resample_times)))
|
EmergentSystemLabStudent/NPB_DAA
|
sample/summary_and_plot.py
|
Python
|
mit
| 7,089
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 17 15:48:31 2015
@author: thomas.douenne
"""
# L'objectif est de décrire l'évolution des montants des accises de la TICPE depuis 1993
# Import de fonctions spécifiques à Openfisca Indirect Taxation
from openfisca_france_indirect_taxation.examples.utils_example import graph_builder_bar_list
from openfisca_france_indirect_taxation.examples.dataframes_from_legislation.get_accises import \
get_accise_ticpe_majoree
# Recherche des paramètres de la législation
liste = ['ticpe_gazole', 'ticpe_super9598', 'super_plombe_ticpe']
df_accises = get_accise_ticpe_majoree()
# Réalisation des graphiques
graph_builder_bar_list(df_accises['accise majoree sans plomb'], 1, 1)
graph_builder_bar_list(df_accises['accise majoree diesel'], 1, 1)
graph_builder_bar_list(df_accises['accise majoree super plombe'], 1, 1)
|
benjello/openfisca-france-indirect-taxation
|
openfisca_france_indirect_taxation/examples/transports/plot_legislation/plot_ticpe_accises.py
|
Python
|
agpl-3.0
| 865
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.