repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
gsauthof/imapdl
|
refs/heads/master
|
ci/gen-coverage.py
|
1
|
#!/usr/bin/env python3
# 2017, Georg Sauthoff <mail@gms.tf>, GPLv3
import argparse
import logging
import os
import subprocess
import shutil
import sys
sys.path.insert(0, os.path.dirname(__file__))
import filterbr
log = logging.getLogger(__name__)
ex_path = [ '/usr/include/*', 'unittest/*', 'lib*/*', '*@exe/*', 'example/*' ]
brflag = ['--rc', 'lcov_branch_coverage=1']
lcov = 'lcov'
base = os.path.abspath('.')
cov_init_raw = 'coverage_init_raw.info'
cov_post_raw = 'coverage_post_raw.info'
cov_init = 'coverage_init.info'
cov_post = 'coverage_post.info'
cov_br = 'coverage.info'
cov = 'coverage.info'
report_dir = 'coverage'
def setup_logging():
log_format = '{rel_secs:6.1f} {lvl} {message}'
log_date_format = '%Y-%m-%d %H:%M:%S'
class Relative_Formatter(logging.Formatter):
level_dict = { 10 : 'DBG', 20 : 'INF', 30 : 'WRN', 40 : 'ERR',
50 : 'CRI' }
def format(self, rec):
rec.rel_secs = rec.relativeCreated/1000.0
rec.lvl = self.level_dict[rec.levelno]
return super(Relative_Formatter, self).format(rec)
log = logging.getLogger() # root logger
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(Relative_Formatter(log_format, log_date_format, style='{'))
log.addHandler(ch)
def mk_arg_parser():
p = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Do some stuff',
epilog='...')
p.add_argument('--html', action='store_true', default=True,
help='generate HTML report (default: on)')
p.add_argument('--no-html', dest='html', action='store_false',
help='disable html report generation')
p.add_argument('--filter-br', action='store_true', default=True,
help='filter branch coverage data (default: on)')
p.add_argument('--no-filter-br', dest='filter_br', action='store_false',
help='disable branch filtering')
return p
def parse_args(*a):
arg_parser = mk_arg_parser()
args = arg_parser.parse_args(*a)
global cov_br
if args.filter_br:
cov_br = 'coverage_br.info'
return args
def run(*args, **kw):
log.info('Executing: ' + ' '.join(map(lambda s:"'"+s+"'", args[0])))
return subprocess.run(*args, **kw, check=True)
def main(args):
run([lcov, '--directory', base, '--capture', '-o', cov_post_raw] + brflag )
run([lcov, '--directory', base, '--capture', '--initial', '-o', cov_init_raw])
for i, o in [ (cov_init_raw, cov_init), (cov_post_raw, cov_post) ]:
run([lcov, '--remove', i] + ex_path + [ '-o', o] + brflag)
run([lcov, '-a', cov_init, '-a', cov_post, '-o', cov_br] + brflag)
if args.filter_br:
log.info('Filtering branch coverage data ({} -> {})'.format(cov_br, cov))
with open(cov, 'w') as f:
filterbr.filter_lcov_trace_file(cov_br, f)
if args.html:
shutil.rmtree(report_dir, ignore_errors=True)
run(['genhtml', cov, '--branch-coverage', '-o', report_dir])
return 0
if __name__ == '__main__':
setup_logging()
args = parse_args()
sys.exit(main(args))
|
laperry1/android_external_chromium_org
|
refs/heads/cm-12.1
|
tools/win/link_limiter/build_link_limiter.py
|
169
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import shutil
import subprocess
import sys
import tempfile
BUILD_DIR = 'build'
def run_with_vsvars(cmd, tmpdir=None):
fd, filename = tempfile.mkstemp('.bat', text=True)
with os.fdopen(fd, 'w') as f:
print >> f, '@echo off'
print >> f, r'call "%VS100COMNTOOLS%\vsvars32.bat"'
if tmpdir:
print >> f, r'cd %s' % tmpdir
print >> f, cmd
try:
p = subprocess.Popen([filename], shell=True, stdout=subprocess.PIPE,
universal_newlines=True)
out, _ = p.communicate()
return p.returncode, out
finally:
os.unlink(filename)
def get_vc_dir():
_, out = run_with_vsvars('echo VCINSTALLDIR=%VCINSTALLDIR%')
for line in out.splitlines(): # pylint: disable-msg=E1103
if line.startswith('VCINSTALLDIR='):
return line[len('VCINSTALLDIR='):]
return None
def build(infile):
if not os.path.exists(BUILD_DIR):
os.makedirs(BUILD_DIR)
outfile = 'limiter.exe'
outpath = os.path.join(BUILD_DIR, outfile)
cpptime = os.path.getmtime(infile)
if not os.path.exists(outpath) or cpptime > os.path.getmtime(outpath):
print 'Building %s...' % outfile
rc, out = run_with_vsvars(
'cl /nologo /Ox /Zi /W4 /WX /D_UNICODE /DUNICODE'
' /D_CRT_SECURE_NO_WARNINGS /EHsc %s /link /out:%s'
% (os.path.join('..', infile), outfile), BUILD_DIR)
if rc:
print out
print 'Failed to build %s' % outfile
sys.exit(1)
else:
print '%s already built' % outfile
return outpath
def main():
# Switch to our own dir.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
if sys.argv[-1] == 'clean':
if os.path.exists(BUILD_DIR):
shutil.rmtree(BUILD_DIR)
for exe in glob.glob('*.exe'):
os.unlink(exe)
return 0
vcdir = os.environ.get('VCINSTALLDIR')
if not vcdir:
vcdir = get_vc_dir()
if not vcdir:
print 'Could not get VCINSTALLDIR. Run vsvars32.bat?'
return 1
os.environ['PATH'] += (';' + os.path.join(vcdir, 'bin') +
';' + os.path.join(vcdir, r'..\Common7\IDE'))
# Verify that we can find link.exe.
link = os.path.join(vcdir, 'bin', 'link.exe')
if not os.path.exists(link):
print 'link.exe not found at %s' % link
return 1
exe_name = build('limiter.cc')
for shim_exe in ('lib.exe', 'link.exe'):
newpath = '%s__LIMITER.exe' % shim_exe
shutil.copyfile(exe_name, newpath)
print '%s shim built. Use with msbuild like: "/p:LinkToolExe=%s"' \
% (shim_exe, os.path.abspath(newpath))
return 0
if __name__ == '__main__':
sys.exit(main())
|
jkshaver/virtualenv-1.8.2
|
refs/heads/master
|
env/lib/python2.7/site-packages/distribute-0.6.28-py2.7.egg/setuptools/command/install_lib.py
|
454
|
from distutils.command.install_lib import install_lib as _install_lib
import os
class install_lib(_install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def _bytecode_filenames (self, py_filenames):
bytecode_files = []
for py_file in py_filenames:
if not py_file.endswith('.py'):
continue
if self.compile:
bytecode_files.append(py_file + "c")
if self.optimize > 0:
bytecode_files.append(py_file + "o")
return bytecode_files
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
exclude = {}
nsp = self.distribution.namespace_packages
if (nsp and self.get_finalized_command('install')
.single_version_externally_managed
):
for pkg in nsp:
parts = pkg.split('.')
while parts:
pkgdir = os.path.join(self.install_dir, *parts)
for f in '__init__.py', '__init__.pyc', '__init__.pyo':
exclude[os.path.join(pkgdir,f)] = 1
parts.pop()
return exclude
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return _install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = _install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
|
edgarcosta92/ns3
|
refs/heads/master
|
src/netanim/bindings/callbacks_list.py
|
664
|
callback_classes = [
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
poppogbr/genropy
|
refs/heads/master
|
packages/showcase/lib/importer.py
|
1
|
#!/usr/bin/env python
# encoding: utf-8
"""
importer.py
Created by Saverio Porcari on 2008-07-28.
Copyright (c) 2008 __MyCompanyName__. All rights reserved.
"""
from gnr.core.gnrbag import Bag, BagResolver, BagCbResolver, DirectoryResolver
from gnr.app.gnrapp import GnrApp
def importPeople(db, dataBag):
tblObj = db.table('showcase.person')
for item in dataBag['people']:
record = {}
record['name'] = item.getAttr('name')
record['year'] = item.getAttr('year')
record['nationality'] = item.getAttr('nationality')
record['number'] = item.getAttr('id')
tblObj.insert(record)
def importMovie(db, dataBag):
tblObj = db.table('showcase.movie')
for item in dataBag['movie']:
record = {}
record['title'] = item.getAttr('title')
record['year'] = item.getAttr('year')
record['nationality'] = item.getAttr('nationality')
record['number'] = item.getAttr('id')
record['genre'] = item.getAttr('genre')
tblObj.insert(record)
def importCast(db, dataBag):
tblObj = db.table('showcase.cast')
movies = db.table('showcase.movie').query(columns='$id').fetch()
people = db.table('showcase.person').query(columns='$id').fetch()
for item in dataBag['cast']:
record = {}
record['person_id'] = people[int(item.getAttr('person_id'))]['id']
record['movie_id'] = movies[int(item.getAttr('movie_id'))]['id']
record['role'] = item.getAttr('role')
record['prizes'] = item.getAttr('prizes')
tblObj.insert(record)
if __name__ == '__main__':
db = GnrApp('testgarden').db
dataBag = Bag('data.xml')
importPeople(db, dataBag)
importMovie(db, dataBag)
importCast(db, dataBag)
db.commit()
|
mohammed-alfatih/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/vcs.py
|
156
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import subprocess
from functools import partial
from mozlog import get_default_logger
logger = None
def vcs(bin_name):
def inner(command, *args, **kwargs):
global logger
if logger is None:
logger = get_default_logger("vcs")
repo = kwargs.pop("repo", None)
log_error = kwargs.pop("log_error", True)
if kwargs:
raise TypeError, kwargs
args = list(args)
proc_kwargs = {}
if repo is not None:
proc_kwargs["cwd"] = repo
command_line = [bin_name, command] + args
logger.debug(" ".join(command_line))
try:
return subprocess.check_output(command_line, stderr=subprocess.STDOUT, **proc_kwargs)
except subprocess.CalledProcessError as e:
if log_error:
logger.error(e.output)
raise
return inner
git = vcs("git")
hg = vcs("hg")
def bind_to_repo(vcs_func, repo):
return partial(vcs_func, repo=repo)
def is_git_root(path):
try:
rv = git("rev-parse", "--show-cdup", repo=path)
except subprocess.CalledProcessError:
return False
return rv == "\n"
|
ldirer/scikit-learn
|
refs/heads/master
|
sklearn/decomposition/tests/test_truncated_svd.py
|
66
|
"""Test truncated SVD transformer."""
import numpy as np
import scipy.sparse as sp
from sklearn.decomposition import TruncatedSVD
from sklearn.utils import check_random_state
from sklearn.utils.testing import (assert_array_almost_equal, assert_equal,
assert_raises, assert_greater,
assert_array_less)
# Make an X that looks somewhat like a small tf-idf matrix.
# XXX newer versions of SciPy have scipy.sparse.rand for this.
shape = 60, 55
n_samples, n_features = shape
rng = check_random_state(42)
X = rng.randint(-100, 20, np.product(shape)).reshape(shape)
X = sp.csr_matrix(np.maximum(X, 0), dtype=np.float64)
X.data[:] = 1 + np.log(X.data)
Xdense = X.A
def test_algorithms():
svd_a = TruncatedSVD(30, algorithm="arpack")
svd_r = TruncatedSVD(30, algorithm="randomized", random_state=42)
Xa = svd_a.fit_transform(X)[:, :6]
Xr = svd_r.fit_transform(X)[:, :6]
assert_array_almost_equal(Xa, Xr, decimal=5)
comp_a = np.abs(svd_a.components_)
comp_r = np.abs(svd_r.components_)
# All elements are equal, but some elements are more equal than others.
assert_array_almost_equal(comp_a[:9], comp_r[:9])
assert_array_almost_equal(comp_a[9:], comp_r[9:], decimal=2)
def test_attributes():
for n_components in (10, 25, 41):
tsvd = TruncatedSVD(n_components).fit(X)
assert_equal(tsvd.n_components, n_components)
assert_equal(tsvd.components_.shape, (n_components, n_features))
def test_too_many_components():
for algorithm in ["arpack", "randomized"]:
for n_components in (n_features, n_features + 1):
tsvd = TruncatedSVD(n_components=n_components, algorithm=algorithm)
assert_raises(ValueError, tsvd.fit, X)
def test_sparse_formats():
for fmt in ("array", "csr", "csc", "coo", "lil"):
Xfmt = Xdense if fmt == "dense" else getattr(X, "to" + fmt)()
tsvd = TruncatedSVD(n_components=11)
Xtrans = tsvd.fit_transform(Xfmt)
assert_equal(Xtrans.shape, (n_samples, 11))
Xtrans = tsvd.transform(Xfmt)
assert_equal(Xtrans.shape, (n_samples, 11))
def test_inverse_transform():
for algo in ("arpack", "randomized"):
# We need a lot of components for the reconstruction to be "almost
# equal" in all positions. XXX Test means or sums instead?
tsvd = TruncatedSVD(n_components=52, random_state=42, algorithm=algo)
Xt = tsvd.fit_transform(X)
Xinv = tsvd.inverse_transform(Xt)
assert_array_almost_equal(Xinv, Xdense, decimal=1)
def test_integers():
Xint = X.astype(np.int64)
tsvd = TruncatedSVD(n_components=6)
Xtrans = tsvd.fit_transform(Xint)
assert_equal(Xtrans.shape, (n_samples, tsvd.n_components))
def test_explained_variance():
# Test sparse data
svd_a_10_sp = TruncatedSVD(10, algorithm="arpack")
svd_r_10_sp = TruncatedSVD(10, algorithm="randomized", random_state=42)
svd_a_20_sp = TruncatedSVD(20, algorithm="arpack")
svd_r_20_sp = TruncatedSVD(20, algorithm="randomized", random_state=42)
X_trans_a_10_sp = svd_a_10_sp.fit_transform(X)
X_trans_r_10_sp = svd_r_10_sp.fit_transform(X)
X_trans_a_20_sp = svd_a_20_sp.fit_transform(X)
X_trans_r_20_sp = svd_r_20_sp.fit_transform(X)
# Test dense data
svd_a_10_de = TruncatedSVD(10, algorithm="arpack")
svd_r_10_de = TruncatedSVD(10, algorithm="randomized", random_state=42)
svd_a_20_de = TruncatedSVD(20, algorithm="arpack")
svd_r_20_de = TruncatedSVD(20, algorithm="randomized", random_state=42)
X_trans_a_10_de = svd_a_10_de.fit_transform(X.toarray())
X_trans_r_10_de = svd_r_10_de.fit_transform(X.toarray())
X_trans_a_20_de = svd_a_20_de.fit_transform(X.toarray())
X_trans_r_20_de = svd_r_20_de.fit_transform(X.toarray())
# helper arrays for tests below
svds = (svd_a_10_sp, svd_r_10_sp, svd_a_20_sp, svd_r_20_sp, svd_a_10_de,
svd_r_10_de, svd_a_20_de, svd_r_20_de)
svds_trans = (
(svd_a_10_sp, X_trans_a_10_sp),
(svd_r_10_sp, X_trans_r_10_sp),
(svd_a_20_sp, X_trans_a_20_sp),
(svd_r_20_sp, X_trans_r_20_sp),
(svd_a_10_de, X_trans_a_10_de),
(svd_r_10_de, X_trans_r_10_de),
(svd_a_20_de, X_trans_a_20_de),
(svd_r_20_de, X_trans_r_20_de),
)
svds_10_v_20 = (
(svd_a_10_sp, svd_a_20_sp),
(svd_r_10_sp, svd_r_20_sp),
(svd_a_10_de, svd_a_20_de),
(svd_r_10_de, svd_r_20_de),
)
svds_sparse_v_dense = (
(svd_a_10_sp, svd_a_10_de),
(svd_a_20_sp, svd_a_20_de),
(svd_r_10_sp, svd_r_10_de),
(svd_r_20_sp, svd_r_20_de),
)
# Assert the 1st component is equal
for svd_10, svd_20 in svds_10_v_20:
assert_array_almost_equal(
svd_10.explained_variance_ratio_,
svd_20.explained_variance_ratio_[:10],
decimal=5,
)
# Assert that 20 components has higher explained variance than 10
for svd_10, svd_20 in svds_10_v_20:
assert_greater(
svd_20.explained_variance_ratio_.sum(),
svd_10.explained_variance_ratio_.sum(),
)
# Assert that all the values are greater than 0
for svd in svds:
assert_array_less(0.0, svd.explained_variance_ratio_)
# Assert that total explained variance is less than 1
for svd in svds:
assert_array_less(svd.explained_variance_ratio_.sum(), 1.0)
# Compare sparse vs. dense
for svd_sparse, svd_dense in svds_sparse_v_dense:
assert_array_almost_equal(svd_sparse.explained_variance_ratio_,
svd_dense.explained_variance_ratio_)
# Test that explained_variance is correct
for svd, transformed in svds_trans:
total_variance = np.var(X.toarray(), axis=0).sum()
variances = np.var(transformed, axis=0)
true_explained_variance_ratio = variances / total_variance
assert_array_almost_equal(
svd.explained_variance_ratio_,
true_explained_variance_ratio,
)
def test_singular_values():
# Check that the TruncatedSVD output has the correct singular values
rng = np.random.RandomState(0)
n_samples = 100
n_features = 80
X = rng.randn(n_samples, n_features)
apca = TruncatedSVD(n_components=2, algorithm='arpack',
random_state=rng).fit(X)
rpca = TruncatedSVD(n_components=2, algorithm='arpack',
random_state=rng).fit(X)
assert_array_almost_equal(apca.singular_values_, rpca.singular_values_, 12)
# Compare to the Frobenius norm
X_apca = apca.transform(X)
X_rpca = rpca.transform(X)
assert_array_almost_equal(np.sum(apca.singular_values_**2.0),
np.linalg.norm(X_apca, "fro")**2.0, 12)
assert_array_almost_equal(np.sum(rpca.singular_values_**2.0),
np.linalg.norm(X_rpca, "fro")**2.0, 12)
# Compare to the 2-norms of the score vectors
assert_array_almost_equal(apca.singular_values_,
np.sqrt(np.sum(X_apca**2.0, axis=0)), 12)
assert_array_almost_equal(rpca.singular_values_,
np.sqrt(np.sum(X_rpca**2.0, axis=0)), 12)
# Set the singular values and see what we get back
rng = np.random.RandomState(0)
n_samples = 100
n_features = 110
X = rng.randn(n_samples, n_features)
apca = TruncatedSVD(n_components=3, algorithm='arpack',
random_state=rng)
rpca = TruncatedSVD(n_components=3, algorithm='randomized',
random_state=rng)
X_apca = apca.fit_transform(X)
X_rpca = rpca.fit_transform(X)
X_apca /= np.sqrt(np.sum(X_apca**2.0, axis=0))
X_rpca /= np.sqrt(np.sum(X_rpca**2.0, axis=0))
X_apca[:, 0] *= 3.142
X_apca[:, 1] *= 2.718
X_rpca[:, 0] *= 3.142
X_rpca[:, 1] *= 2.718
X_hat_apca = np.dot(X_apca, apca.components_)
X_hat_rpca = np.dot(X_rpca, rpca.components_)
apca.fit(X_hat_apca)
rpca.fit(X_hat_rpca)
assert_array_almost_equal(apca.singular_values_, [3.142, 2.718, 1.0], 14)
assert_array_almost_equal(rpca.singular_values_, [3.142, 2.718, 1.0], 14)
|
alshedivat/tensorflow
|
refs/heads/master
|
tensorflow/contrib/quantization/python/__init__.py
|
179
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A module containing TensorFlow ops whose API may change in the future."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
from tensorflow.contrib.quantization.python.array_ops import *
from tensorflow.contrib.quantization.python.math_ops import *
from tensorflow.contrib.quantization.python.nn_ops import *
# pylint: enable=unused-import,wildcard-import
|
lightningkay/NoahGameFrame
|
refs/heads/master
|
Dependencies/googletest-release-1.8.0/googlemock/scripts/gmock_doctor.py
|
346
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Converts compiler's errors in code using Google Mock to plain English."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import re
import sys
_VERSION = '1.0.3'
_EMAIL = 'googlemock@googlegroups.com'
_COMMON_GMOCK_SYMBOLS = [
# Matchers
'_',
'A',
'AddressSatisfies',
'AllOf',
'An',
'AnyOf',
'ContainerEq',
'Contains',
'ContainsRegex',
'DoubleEq',
'ElementsAre',
'ElementsAreArray',
'EndsWith',
'Eq',
'Field',
'FloatEq',
'Ge',
'Gt',
'HasSubstr',
'IsInitializedProto',
'Le',
'Lt',
'MatcherCast',
'Matches',
'MatchesRegex',
'NanSensitiveDoubleEq',
'NanSensitiveFloatEq',
'Ne',
'Not',
'NotNull',
'Pointee',
'Property',
'Ref',
'ResultOf',
'SafeMatcherCast',
'StartsWith',
'StrCaseEq',
'StrCaseNe',
'StrEq',
'StrNe',
'Truly',
'TypedEq',
'Value',
# Actions
'Assign',
'ByRef',
'DeleteArg',
'DoAll',
'DoDefault',
'IgnoreResult',
'Invoke',
'InvokeArgument',
'InvokeWithoutArgs',
'Return',
'ReturnNew',
'ReturnNull',
'ReturnRef',
'SaveArg',
'SetArgReferee',
'SetArgPointee',
'SetArgumentPointee',
'SetArrayArgument',
'SetErrnoAndReturn',
'Throw',
'WithArg',
'WithArgs',
'WithoutArgs',
# Cardinalities
'AnyNumber',
'AtLeast',
'AtMost',
'Between',
'Exactly',
# Sequences
'InSequence',
'Sequence',
# Misc
'DefaultValue',
'Mock',
]
# Regex for matching source file path and line number in the compiler's errors.
_GCC_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(\d+:)?\s+'
_CLANG_FILE_LINE_RE = r'(?P<file>.*):(?P<line>\d+):(?P<column>\d+):\s+'
_CLANG_NON_GMOCK_FILE_LINE_RE = (
r'(?P<file>.*[/\\^](?!gmock-)[^/\\]+):(?P<line>\d+):(?P<column>\d+):\s+')
def _FindAllMatches(regex, s):
"""Generates all matches of regex in string s."""
r = re.compile(regex)
return r.finditer(s)
def _GenericDiagnoser(short_name, long_name, diagnoses, msg):
"""Diagnoses the given disease by pattern matching.
Can provide different diagnoses for different patterns.
Args:
short_name: Short name of the disease.
long_name: Long name of the disease.
diagnoses: A list of pairs (regex, pattern for formatting the diagnosis
for matching regex).
msg: Compiler's error messages.
Yields:
Tuples of the form
(short name of disease, long name of disease, diagnosis).
"""
for regex, diagnosis in diagnoses:
if re.search(regex, msg):
diagnosis = '%(file)s:%(line)s:' + diagnosis
for m in _FindAllMatches(regex, msg):
yield (short_name, long_name, diagnosis % m.groupdict())
def _NeedToReturnReferenceDiagnoser(msg):
"""Diagnoses the NRR disease, given the error messages by the compiler."""
gcc_regex = (r'In member function \'testing::internal::ReturnAction<R>.*\n'
+ _GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: creating array with negative size')
clang_regex = (r'error:.*array.*negative.*\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of function template specialization '
r'\'testing::internal::ReturnAction<(?P<type>.*)>'
r'::operator Action<.*>\' requested here')
clang11_re = (r'use_ReturnRef_instead_of_Return_to_return_a_reference.*'
r'(.*\n)*?' + _CLANG_NON_GMOCK_FILE_LINE_RE)
diagnosis = """
You are using a Return() action in a function that returns a reference to
%(type)s. Please use ReturnRef() instead."""
return _GenericDiagnoser('NRR', 'Need to Return Reference',
[(clang_regex, diagnosis),
(clang11_re, diagnosis % {'type': 'a type'}),
(gcc_regex, diagnosis % {'type': 'a type'})],
msg)
def _NeedToReturnSomethingDiagnoser(msg):
"""Diagnoses the NRS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'(instantiated from here\n.'
r'*gmock.*actions\.h.*error: void value not ignored)'
r'|(error: control reaches end of non-void function)')
clang_regex1 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'Result\' \(aka \'(?P<return_type>.*)\'\) '
r'with an rvalue of type \'void\'')
clang_regex2 = (_CLANG_FILE_LINE_RE +
r'error: cannot initialize return object '
r'of type \'(?P<return_type>.*)\' '
r'with an rvalue of type \'void\'')
diagnosis = """
You are using an action that returns void, but it needs to return
%(return_type)s. Please tell it *what* to return. Perhaps you can use
the pattern DoAll(some_action, Return(some_value))?"""
return _GenericDiagnoser(
'NRS',
'Need to Return Something',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _NeedToReturnNothingDiagnoser(msg):
"""Diagnoses the NRN disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gmock-actions\.h.*error: instantiation of '
r'\'testing::internal::ReturnAction<R>::Impl<F>::value_\' '
r'as type \'void\'')
clang_regex1 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(?P<return_type>.*)>'
r'::operator Action<void \(.*\)>\' requested here')
clang_regex2 = (r'error: field has incomplete type '
r'\'Result\' \(aka \'void\'\)(\r)?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::DoBothAction<.*>'
r'::operator Action<(?P<return_type>.*) \(.*\)>\' '
r'requested here')
diagnosis = """
You are using an action that returns %(return_type)s, but it needs to return
void. Please use a void-returning action instead.
All actions but the last in DoAll(...) must return void. Perhaps you need
to re-arrange the order of actions in a DoAll(), if you are using one?"""
return _GenericDiagnoser(
'NRN',
'Need to Return Nothing',
[(gcc_regex, diagnosis % {'return_type': '*something*'}),
(clang_regex1, diagnosis),
(clang_regex2, diagnosis)],
msg)
def _IncompleteByReferenceArgumentDiagnoser(msg):
"""Diagnoses the IBRA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to incomplete type \'(?P<type>.*)\'')
clang_regex = (r'.*gtest-printers\.h.*error: invalid application of '
r'\'sizeof\' to an incomplete type '
r'\'(?P<type>.*)( const)?\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE +
r'note: in instantiation of member function '
r'\'testing::internal2::TypeWithoutFormatter<.*>::'
r'PrintValue\' requested here')
diagnosis = """
In order to mock this function, Google Mock needs to see the definition
of type "%(type)s" - declaration alone is not enough. Either #include
the header that defines it, or change the argument to be passed
by pointer."""
return _GenericDiagnoser('IBRA', 'Incomplete By-Reference Argument Type',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionMatcherDiagnoser(msg):
"""Diagnoses the OFM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly\(<unresolved overloaded function type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Truly')
diagnosis = """
The argument you gave to Truly() is an overloaded function. Please tell
your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool Foo(int n);
you should write
Truly(static_cast<bool (*)(int n)>(Foo))"""
return _GenericDiagnoser('OFM', 'Overloaded Function Matcher',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedFunctionActionDiagnoser(msg):
"""Diagnoses the OFA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for call to '
r'\'Invoke\(<unresolved overloaded function type>')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching '
r'function for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+:\s+'
r'note: candidate template ignored:\s+'
r'couldn\'t infer template argument \'FunctionImpl\'')
diagnosis = """
Function you are passing to Invoke is overloaded. Please tell your compiler
which overloaded version you want to use.
For example, if you want to use the version whose signature is
bool MyFunction(int n, double x);
you should write something like
Invoke(static_cast<bool (*)(int n, double x)>(MyFunction))"""
return _GenericDiagnoser('OFA', 'Overloaded Function Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _OverloadedMethodActionDiagnoser(msg):
"""Diagnoses the OMA disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: no matching function for '
r'call to \'Invoke\(.+, <unresolved overloaded function '
r'type>\)')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: no matching function '
r'for call to \'Invoke\'\r?\n'
r'(.*\n)*?'
r'.*\bgmock-generated-actions\.h:\d+:\d+: '
r'note: candidate function template not viable: '
r'requires .*, but 2 (arguments )?were provided')
diagnosis = """
The second argument you gave to Invoke() is an overloaded method. Please
tell your compiler which overloaded version you want to use.
For example, if you want to use the version whose signature is
class Foo {
...
bool Bar(int n, double x);
};
you should write something like
Invoke(foo, static_cast<bool (Foo::*)(int n, double x)>(&Foo::Bar))"""
return _GenericDiagnoser('OMA', 'Overloaded Method Action',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
def _MockObjectPointerDiagnoser(msg):
"""Diagnoses the MOP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: request for member '
r'\'gmock_(?P<method>.+)\' in \'(?P<mock_object>.+)\', '
r'which is of non-class type \'(.*::)*(?P<class_name>.+)\*\'')
clang_regex = (_CLANG_FILE_LINE_RE + r'error: member reference type '
r'\'(?P<class_name>.*?) *\' is a pointer; '
r'(did you mean|maybe you meant) to use \'->\'\?')
diagnosis = """
The first argument to ON_CALL() and EXPECT_CALL() must be a mock *object*,
not a *pointer* to it. Please write '*(%(mock_object)s)' instead of
'%(mock_object)s' as your first argument.
For example, given the mock class:
class %(class_name)s : public ... {
...
MOCK_METHOD0(%(method)s, ...);
};
and the following mock instance:
%(class_name)s* mock_ptr = ...
you should use the EXPECT_CALL like this:
EXPECT_CALL(*mock_ptr, %(method)s(...));"""
return _GenericDiagnoser(
'MOP',
'Mock Object Pointer',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis % {'mock_object': 'mock_object',
'method': 'method',
'class_name': '%(class_name)s'})],
msg)
def _NeedToUseSymbolDiagnoser(msg):
"""Diagnoses the NUS disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE + r'error: \'(?P<symbol>.+)\' '
r'(was not declared in this scope|has not been declared)')
clang_regex = (_CLANG_FILE_LINE_RE +
r'error: (use of undeclared identifier|unknown type name|'
r'no template named) \'(?P<symbol>[^\']+)\'')
diagnosis = """
'%(symbol)s' is defined by Google Mock in the testing namespace.
Did you forget to write
using testing::%(symbol)s;
?"""
for m in (list(_FindAllMatches(gcc_regex, msg)) +
list(_FindAllMatches(clang_regex, msg))):
symbol = m.groupdict()['symbol']
if symbol in _COMMON_GMOCK_SYMBOLS:
yield ('NUS', 'Need to Use Symbol', diagnosis % m.groupdict())
def _NeedToUseReturnNullDiagnoser(msg):
"""Diagnoses the NRNULL disease, given the error messages by the compiler."""
gcc_regex = ('instantiated from \'testing::internal::ReturnAction<R>'
'::operator testing::Action<Func>\(\) const.*\n' +
_GCC_FILE_LINE_RE + r'instantiated from here\n'
r'.*error: no matching function for call to \'ImplicitCast_\('
r'(:?long )?int&\)')
clang_regex = (r'\bgmock-actions.h:.* error: no matching function for '
r'call to \'ImplicitCast_\'\r?\n'
r'(.*\n)*?' +
_CLANG_NON_GMOCK_FILE_LINE_RE + r'note: in instantiation '
r'of function template specialization '
r'\'testing::internal::ReturnAction<(int|long)>::operator '
r'Action<(?P<type>.*)\(\)>\' requested here')
diagnosis = """
You are probably calling Return(NULL) and the compiler isn't sure how to turn
NULL into %(type)s. Use ReturnNull() instead.
Note: the line number may be off; please fix all instances of Return(NULL)."""
return _GenericDiagnoser(
'NRNULL', 'Need to use ReturnNull',
[(clang_regex, diagnosis),
(gcc_regex, diagnosis % {'type': 'the right type'})],
msg)
def _TypeInTemplatedBaseDiagnoser(msg):
"""Diagnoses the TTB disease, given the error messages by the compiler."""
# This version works when the type is used as the mock function's return
# type.
gcc_4_3_1_regex_type_in_retval = (
r'In member function \'int .*\n' + _GCC_FILE_LINE_RE +
r'error: a function call cannot appear in a constant-expression')
gcc_4_4_0_regex_type_in_retval = (
r'error: a function call cannot appear in a constant-expression'
+ _GCC_FILE_LINE_RE + r'error: template argument 1 is invalid\n')
# This version works when the type is used as the mock function's sole
# parameter type.
gcc_regex_type_of_sole_param = (
_GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n')
# This version works when the type is used as a parameter of a mock
# function that has multiple parameters.
gcc_regex_type_of_a_param = (
r'error: expected `;\' before \'::\' token\n'
+ _GCC_FILE_LINE_RE +
r'error: \'(?P<type>.+)\' was not declared in this scope\n'
r'.*error: template argument 1 is invalid\n'
r'.*error: \'.+\' was not declared in this scope')
clang_regex_type_of_retval_or_sole_param = (
_CLANG_FILE_LINE_RE +
r'error: use of undeclared identifier \'(?P<type>.*)\'\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):\d+: error: '
r'non-friend class member \'Result\' cannot have a qualified name'
)
clang_regex_type_of_a_param = (
_CLANG_FILE_LINE_RE +
r'error: C\+\+ requires a type specifier for all declarations\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: '
r'C\+\+ requires a type specifier for all declarations'
)
clang_regex_unknown_type = (
_CLANG_FILE_LINE_RE +
r'error: unknown type name \'(?P<type>[^\']+)\''
)
diagnosis = """
In a mock class template, types or typedefs defined in the base class
template are *not* automatically visible. This is how C++ works. Before
you can use a type or typedef named %(type)s defined in base class Base<T>, you
need to make it visible. One way to do it is:
typedef typename Base<T>::%(type)s %(type)s;"""
for diag in _GenericDiagnoser(
'TTB', 'Type in Template Base',
[(gcc_4_3_1_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_4_4_0_regex_type_in_retval, diagnosis % {'type': 'Foo'}),
(gcc_regex_type_of_sole_param, diagnosis),
(gcc_regex_type_of_a_param, diagnosis),
(clang_regex_type_of_retval_or_sole_param, diagnosis),
(clang_regex_type_of_a_param, diagnosis % {'type': 'Foo'})],
msg):
yield diag
# Avoid overlap with the NUS pattern.
for m in _FindAllMatches(clang_regex_unknown_type, msg):
type_ = m.groupdict()['type']
if type_ not in _COMMON_GMOCK_SYMBOLS:
yield ('TTB', 'Type in Template Base', diagnosis % m.groupdict())
def _WrongMockMethodMacroDiagnoser(msg):
"""Diagnoses the WMM disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'.*this_method_does_not_take_(?P<wrong_args>\d+)_argument.*\n'
r'.*\n'
r'.*candidates are.*FunctionMocker<[^>]+A(?P<args>\d+)\)>')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error:.*array.*negative.*r?\n'
r'(.*\n)*?'
r'(?P=file):(?P=line):(?P=column): error: too few arguments '
r'to function call, expected (?P<args>\d+), '
r'have (?P<wrong_args>\d+)')
clang11_re = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'.*this_method_does_not_take_'
r'(?P<wrong_args>\d+)_argument.*')
diagnosis = """
You are using MOCK_METHOD%(wrong_args)s to define a mock method that has
%(args)s arguments. Use MOCK_METHOD%(args)s (or MOCK_CONST_METHOD%(args)s,
MOCK_METHOD%(args)s_T, MOCK_CONST_METHOD%(args)s_T as appropriate) instead."""
return _GenericDiagnoser('WMM', 'Wrong MOCK_METHODn Macro',
[(gcc_regex, diagnosis),
(clang11_re, diagnosis % {'wrong_args': 'm',
'args': 'n'}),
(clang_regex, diagnosis)],
msg)
def _WrongParenPositionDiagnoser(msg):
"""Diagnoses the WPP disease, given the error messages by the compiler."""
gcc_regex = (_GCC_FILE_LINE_RE +
r'error:.*testing::internal::MockSpec<.* has no member named \''
r'(?P<method>\w+)\'')
clang_regex = (_CLANG_NON_GMOCK_FILE_LINE_RE +
r'error: no member named \'(?P<method>\w+)\' in '
r'\'testing::internal::MockSpec<.*>\'')
diagnosis = """
The closing parenthesis of ON_CALL or EXPECT_CALL should be *before*
".%(method)s". For example, you should write:
EXPECT_CALL(my_mock, Foo(_)).%(method)s(...);
instead of:
EXPECT_CALL(my_mock, Foo(_).%(method)s(...));"""
return _GenericDiagnoser('WPP', 'Wrong Parenthesis Position',
[(gcc_regex, diagnosis),
(clang_regex, diagnosis)],
msg)
_DIAGNOSERS = [
_IncompleteByReferenceArgumentDiagnoser,
_MockObjectPointerDiagnoser,
_NeedToReturnNothingDiagnoser,
_NeedToReturnReferenceDiagnoser,
_NeedToReturnSomethingDiagnoser,
_NeedToUseReturnNullDiagnoser,
_NeedToUseSymbolDiagnoser,
_OverloadedFunctionActionDiagnoser,
_OverloadedFunctionMatcherDiagnoser,
_OverloadedMethodActionDiagnoser,
_TypeInTemplatedBaseDiagnoser,
_WrongMockMethodMacroDiagnoser,
_WrongParenPositionDiagnoser,
]
def Diagnose(msg):
"""Generates all possible diagnoses given the compiler error message."""
msg = re.sub(r'\x1b\[[^m]*m', '', msg) # Strips all color formatting.
# Assuming the string is using the UTF-8 encoding, replaces the left and
# the right single quote characters with apostrophes.
msg = re.sub(r'(\xe2\x80\x98|\xe2\x80\x99)', "'", msg)
diagnoses = []
for diagnoser in _DIAGNOSERS:
for diag in diagnoser(msg):
diagnosis = '[%s - %s]\n%s' % diag
if not diagnosis in diagnoses:
diagnoses.append(diagnosis)
return diagnoses
def main():
print ('Google Mock Doctor v%s - '
'diagnoses problems in code using Google Mock.' % _VERSION)
if sys.stdin.isatty():
print ('Please copy and paste the compiler errors here. Press c-D when '
'you are done:')
else:
print ('Waiting for compiler errors on stdin . . .')
msg = sys.stdin.read().strip()
diagnoses = Diagnose(msg)
count = len(diagnoses)
if not count:
print ("""
Your compiler complained:
8<------------------------------------------------------------
%s
------------------------------------------------------------>8
Uh-oh, I'm not smart enough to figure out what the problem is. :-(
However...
If you send your source code and the compiler's error messages to
%s, you can be helped and I can get smarter --
win-win for us!""" % (msg, _EMAIL))
else:
print ('------------------------------------------------------------')
print ('Your code appears to have the following',)
if count > 1:
print ('%s diseases:' % (count,))
else:
print ('disease:')
i = 0
for d in diagnoses:
i += 1
if count > 1:
print ('\n#%s:' % (i,))
print (d)
print ("""
How did I do? If you think I'm wrong or unhelpful, please send your
source code and the compiler's error messages to %s.
Then you can be helped and I can get smarter -- I promise I won't be upset!""" %
_EMAIL)
if __name__ == '__main__':
main()
|
ooici/marine-integrations
|
refs/heads/master
|
mi/dataset/driver/issmcnsm/flort/driver.py
|
1
|
"""
@package mi.dataset.driver.issmcnsm.flort.driver
@file marine-integrations/mi/dataset/driver/issmcnsm/flort/driver.py
@author Emily Hahn
@brief Driver for the issmcnsm_flort
Release notes:
Initial release
"""
__author__ = 'Emily Hahn'
__license__ = 'Apache 2.0'
import string
from mi.core.log import get_logger ; log = get_logger()
from mi.dataset.dataset_driver import SimpleDataSetDriver
from mi.dataset.parser.issmcnsm_flortd import Issmcnsm_flortdParser, Issmcnsm_flortdParserDataParticle
from mi.dataset.harvester import SingleDirectoryHarvester
class IssmCnsmFLORTDDataSetDriver(SimpleDataSetDriver):
@classmethod
def stream_config(cls):
return [Issmcnsm_flortdParserDataParticle.type()]
def _build_parser(self, parser_state, infile):
"""
Build and return the parser
"""
config = self._parser_config
config.update({
'particle_module': 'mi.dataset.parser.issmcnsm_flortd',
'particle_class': 'Issmcnsm_flortdParserDataParticle'
})
log.debug("My Config: %s", config)
self._parser = Issmcnsm_flortdParser(
config,
parser_state,
infile,
self._save_parser_state,
self._data_callback
)
return self._parser
def _build_harvester(self, driver_state):
"""
Build and return the harvester
"""
self._harvester = SingleDirectoryHarvester(
self._harvester_config,
driver_state,
self._new_file_callback,
self._modified_file_callback,
self._exception_callback
)
return self._harvester
|
jordiclariana/ansible
|
refs/heads/devel
|
lib/ansible/modules/clustering/znode.py
|
13
|
#!/usr/bin/python
# Copyright 2015 WP Engine, Inc. All rights reserved.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: znode
version_added: "2.0"
short_description: Create, delete, retrieve, and update znodes using ZooKeeper
description:
- Create, delete, retrieve, and update znodes using ZooKeeper.
options:
hosts:
description:
- A list of ZooKeeper servers (format '[server]:[port]').
required: true
name:
description:
- The path of the znode.
required: true
value:
description:
- The value assigned to the znode.
default: None
required: false
op:
description:
- An operation to perform. Mutually exclusive with state.
default: None
required: false
state:
description:
- The state to enforce. Mutually exclusive with op.
default: None
required: false
timeout:
description:
- The amount of time to wait for a node to appear.
default: 300
required: false
recursive:
description:
- Recursively delete node and all its children.
default: False
required: false
version_added: "2.1"
requirements:
- kazoo >= 2.1
- python >= 2.6
author: "Trey Perry (@treyperry)"
"""
EXAMPLES = """
# Creating or updating a znode with a given value
- znode:
hosts: 'localhost:2181'
name: /mypath
value: myvalue
state: present
# Getting the value and stat structure for a znode
- znode:
hosts: 'localhost:2181'
name: /mypath
op: get
# Listing a particular znode's children
- znode:
hosts: 'localhost:2181'
name: /zookeeper
op: list
# Waiting 20 seconds for a znode to appear at path /mypath
- znode:
hosts: 'localhost:2181'
name: /mypath
op: wait
timeout: 20
# Deleting a znode at path /mypath
- znode:
hosts: 'localhost:2181'
name: /mypath
state: absent
"""
try:
from kazoo.client import KazooClient
from kazoo.exceptions import NoNodeError, ZookeeperError
from kazoo.handlers.threading import KazooTimeoutError
KAZOO_INSTALLED = True
except ImportError:
KAZOO_INSTALLED = False
def main():
module = AnsibleModule(
argument_spec=dict(
hosts=dict(required=True, type='str'),
name=dict(required=True, type='str'),
value=dict(required=False, default=None, type='str'),
op=dict(required=False, default=None, choices=['get', 'wait', 'list']),
state=dict(choices=['present', 'absent']),
timeout=dict(required=False, default=300, type='int'),
recursive=dict(required=False, default=False, type='bool')
),
supports_check_mode=False
)
if not KAZOO_INSTALLED:
module.fail_json(msg='kazoo >= 2.1 is required to use this module. Use pip to install it.')
check = check_params(module.params)
if not check['success']:
module.fail_json(msg=check['msg'])
zoo = KazooCommandProxy(module)
try:
zoo.start()
except KazooTimeoutError:
module.fail_json(msg='The connection to the ZooKeeper ensemble timed out.')
command_dict = {
'op': {
'get': zoo.get,
'list': zoo.list,
'wait': zoo.wait
},
'state': {
'present': zoo.present,
'absent': zoo.absent
}
}
command_type = 'op' if 'op' in module.params and module.params['op'] is not None else 'state'
method = module.params[command_type]
result, result_dict = command_dict[command_type][method]()
zoo.shutdown()
if result:
module.exit_json(**result_dict)
else:
module.fail_json(**result_dict)
def check_params(params):
if not params['state'] and not params['op']:
return {'success': False, 'msg': 'Please define an operation (op) or a state.'}
if params['state'] and params['op']:
return {'success': False, 'msg': 'Please choose an operation (op) or a state, but not both.'}
return {'success': True}
class KazooCommandProxy():
def __init__(self, module):
self.module = module
self.zk = KazooClient(module.params['hosts'])
def absent(self):
return self._absent(self.module.params['name'])
def exists(self, znode):
return self.zk.exists(znode)
def list(self):
children = self.zk.get_children(self.module.params['name'])
return True, {'count': len(children), 'items': children, 'msg': 'Retrieved znodes in path.',
'znode': self.module.params['name']}
def present(self):
return self._present(self.module.params['name'], self.module.params['value'])
def get(self):
return self._get(self.module.params['name'])
def shutdown(self):
self.zk.stop()
self.zk.close()
def start(self):
self.zk.start()
def wait(self):
return self._wait(self.module.params['name'], self.module.params['timeout'])
def _absent(self, znode):
if self.exists(znode):
self.zk.delete(znode, recursive=self.module.params['recursive'])
return True, {'changed': True, 'msg': 'The znode was deleted.'}
else:
return True, {'changed': False, 'msg': 'The znode does not exist.'}
def _get(self, path):
if self.exists(path):
value, zstat = self.zk.get(path)
stat_dict = {}
for i in dir(zstat):
if not i.startswith('_'):
attr = getattr(zstat, i)
if isinstance(attr, (int, str)):
stat_dict[i] = attr
result = True, {'msg': 'The node was retrieved.', 'znode': path, 'value': value,
'stat': stat_dict}
else:
result = False, {'msg': 'The requested node does not exist.'}
return result
def _present(self, path, value):
if self.exists(path):
(current_value, zstat) = self.zk.get(path)
if value != current_value:
self.zk.set(path, value)
return True, {'changed': True, 'msg': 'Updated the znode value.', 'znode': path,
'value': value}
else:
return True, {'changed': False, 'msg': 'No changes were necessary.', 'znode': path, 'value': value}
else:
self.zk.create(path, value, makepath=True)
return True, {'changed': True, 'msg': 'Created a new znode.', 'znode': path, 'value': value}
def _wait(self, path, timeout, interval=5):
lim = time.time() + timeout
while time.time() < lim:
if self.exists(path):
return True, {'msg': 'The node appeared before the configured timeout.',
'znode': path, 'timeout': timeout}
else:
time.sleep(interval)
return False, {'msg': 'The node did not appear before the operation timed out.', 'timeout': timeout,
'znode': path}
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
zaxliu/scipy
|
refs/heads/master
|
scipy/misc/tests/test_pilutil.py
|
46
|
from __future__ import division, print_function, absolute_import
import os.path
import tempfile
import shutil
import numpy as np
import warnings
from numpy.testing import (assert_, assert_equal, dec, decorate_methods,
TestCase, run_module_suite, assert_allclose)
from scipy import misc
try:
import PIL.Image
except ImportError:
_have_PIL = False
else:
_have_PIL = True
# Function / method decorator for skipping PIL tests on import failure
_pilskip = dec.skipif(not _have_PIL, 'Need to import PIL for this test')
datapath = os.path.dirname(__file__)
class TestPILUtil(TestCase):
def test_imresize(self):
im = np.random.random((10,20))
for T in np.sctypes['float'] + [float]:
# 1.1 rounds to below 1.1 for float16, 1.101 works
im1 = misc.imresize(im,T(1.101))
assert_equal(im1.shape,(11,22))
def test_imresize2(self):
im = np.random.random((20,30))
im2 = misc.imresize(im, (30,40), interp='bicubic')
assert_equal(im2.shape, (30,40))
def test_imresize3(self):
im = np.random.random((15,30))
im2 = misc.imresize(im, (30,60), interp='nearest')
assert_equal(im2.shape, (30,60))
def test_imresize4(self):
im = np.array([[1, 2],
[3, 4]])
# Check that resizing by target size, float and int are the same
im2 = misc.imresize(im, (4,4), mode='F') # output size
im3 = misc.imresize(im, 2., mode='F') # fraction
im4 = misc.imresize(im, 200, mode='F') # percentage
assert_equal(im2, im3)
assert_equal(im2, im4)
def test_bytescale(self):
x = np.array([0,1,2], np.uint8)
y = np.array([0,1,2])
assert_equal(misc.bytescale(x), x)
assert_equal(misc.bytescale(y), [0,127,255])
def test_bytescale_keywords(self):
x = np.array([40, 60, 120, 200, 300, 500])
res_lowhigh = misc.bytescale(x, low=10, high=143)
assert_equal(res_lowhigh, [10, 16, 33, 56, 85, 143])
res_cmincmax = misc.bytescale(x, cmin=60, cmax=300)
assert_equal(res_cmincmax, [0, 0, 64, 149, 255, 255])
assert_equal(misc.bytescale(np.array([3, 3, 3]), low=4), [4, 4, 4])
def test_imsave(self):
with warnings.catch_warnings(record=True): # PIL ResourceWarning
img = misc.imread(os.path.join(datapath, 'data', 'icon.png'))
tmpdir = tempfile.mkdtemp()
try:
fn1 = os.path.join(tmpdir, 'test.png')
fn2 = os.path.join(tmpdir, 'testimg')
with warnings.catch_warnings(record=True): # PIL ResourceWarning
misc.imsave(fn1, img)
misc.imsave(fn2, img, 'PNG')
with warnings.catch_warnings(record=True): # PIL ResourceWarning
data1 = misc.imread(fn1)
data2 = misc.imread(fn2)
assert_allclose(data1, img)
assert_allclose(data2, img)
finally:
shutil.rmtree(tmpdir)
def tst_fromimage(filename, irange):
fp = open(filename, "rb")
img = misc.fromimage(PIL.Image.open(fp))
fp.close()
imin,imax = irange
assert_(img.min() >= imin)
assert_(img.max() <= imax)
@_pilskip
def test_fromimage():
# Test generator for parametric tests
data = {'icon.png':(0,255),
'icon_mono.png':(0,2),
'icon_mono_flat.png':(0,1)}
for fn, irange in data.items():
yield tst_fromimage, os.path.join(datapath,'data',fn), irange
decorate_methods(TestPILUtil, _pilskip)
if __name__ == "__main__":
run_module_suite()
|
AlanZatarain/cortex-vfx
|
refs/heads/master
|
test/IECore/TriangleAlgoTest.py
|
12
|
##########################################################################
#
# Copyright (c) 2008, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import math
from IECore import *
class TriangleAlgoTest( unittest.TestCase ) :
def testContainsPoint( self ) :
r = Rand32()
v0 = V3f( 0, 0, 0 )
v1 = V3f( 1, 0, 0 )
v2 = V3f( 0, 1, 0 )
for i in range( 0, 10000 ) :
p = V3f( r.nextf( -1, 1 ), r.nextf( -1, 1 ), 0 )
if p.x < 0 or p.y < 0 or p.x + p.y > 1 :
self.failIf( triangleContainsPoint( v0, v1, v2, p ) )
else :
self.failUnless( triangleContainsPoint( v0, v1, v2, p ) )
r = Rand32()
for i in range( 0, 10000 ) :
v0 = r.nextV3f()
v1 = r.nextV3f()
v2 = r.nextV3f()
if triangleArea( v0, v1, v2 ) > 0.01 :
u = r.nextf( 0, 1 )
v = r.nextf( 0, 1 )
if u + v < 1 :
w = 1 - ( u + v )
p = u * v0 + v * v1 + w * v2
self.failUnless( triangleContainsPoint( v0, v1, v2, p ) )
def testNormal( self ) :
v0 = V3f( 0, 0, 0 )
v1 = V3f( 1, 0, 0 )
v2 = V3f( 0, 1, 0 )
n = triangleNormal( v0, v1, v2 )
self.assertEqual( n, V3f( 0, 0, 1 ) )
def testContainsPointWithBarycentric( self ) :
r = Rand32()
v0 = V3f( 0, 0, 0 )
v1 = V3f( 1, 0, 0 )
v2 = V3f( 0, 1, 0 )
for i in range( 0, 10000 ) :
b = V3f( r.nextf( -1, 1 ), r.nextf( -1, 1 ), 0 )
b.z = 1 - ( b.x + b.y )
p = trianglePoint( v0, v1, v2, b )
if p.x < 0 or p.y < 0 or p.x + p.y > 1 :
self.failIf( triangleContainsPoint( v0, v1, v2, p ) )
else :
bb = triangleContainsPoint( v0, v1, v2, p )
self.failUnless( bb.equalWithAbsError( b, 0.0001 ) )
if __name__ == "__main__":
unittest.main()
|
google-code/android-scripting
|
refs/heads/master
|
python/xmpppy/doc/examples/bot.py
|
87
|
#!/usr/bin/python
# -*- coding: koi8-r -*-
# $Id: bot.py,v 1.2 2006/10/06 12:30:42 normanr Exp $
import sys
import xmpp
commands={}
i18n={'ru':{},'en':{}}
########################### user handlers start ##################################
i18n['en']['HELP']="This is example jabber bot.\nAvailable commands: %s"
def helpHandler(user,command,args,mess):
lst=commands.keys()
lst.sort()
return "HELP",', '.join(lst)
i18n['en']['EMPTY']="%s"
i18n['en']['HOOK1']='Responce 1: %s'
def hook1Handler(user,command,args,mess):
return "HOOK1",'You requested: %s'%args
i18n['en']['HOOK2']='Responce 2: %s'
def hook2Handler(user,command,args,mess):
return "HOOK2","hook2 called with %s"%(`(user,command,args,mess)`)
i18n['en']['HOOK3']='Responce 3: static string'
def hook3Handler(user,command,args,mess):
return "HOOK3"*int(args)
########################### user handlers stop ###################################
############################ bot logic start #####################################
i18n['en']["UNKNOWN COMMAND"]='Unknown command "%s". Try "help"'
i18n['en']["UNKNOWN USER"]="I do not know you. Register first."
def messageCB(conn,mess):
text=mess.getBody()
user=mess.getFrom()
user.lang='en' # dup
if text.find(' ')+1: command,args=text.split(' ',1)
else: command,args=text,''
cmd=command.lower()
if commands.has_key(cmd): reply=commands[cmd](user,command,args,mess)
else: reply=("UNKNOWN COMMAND",cmd)
if type(reply)==type(()):
key,args=reply
if i18n[user.lang].has_key(key): pat=i18n[user.lang][key]
elif i18n['en'].has_key(key): pat=i18n['en'][key]
else: pat="%s"
if type(pat)==type(''): reply=pat%args
else: reply=pat(**args)
else:
try: reply=i18n[user.lang][reply]
except KeyError:
try: reply=i18n['en'][reply]
except KeyError: pass
if reply: conn.send(xmpp.Message(mess.getFrom(),reply))
for i in globals().keys():
if i[-7:]=='Handler' and i[:-7].lower()==i[:-7]: commands[i[:-7]]=globals()[i]
############################# bot logic stop #####################################
def StepOn(conn):
try:
conn.Process(1)
except KeyboardInterrupt: return 0
return 1
def GoOn(conn):
while StepOn(conn): pass
if len(sys.argv)<3:
print "Usage: bot.py username@server.net password"
else:
jid=xmpp.JID(sys.argv[1])
user,server,password=jid.getNode(),jid.getDomain(),sys.argv[2]
conn=xmpp.Client(server)#,debug=[])
conres=conn.connect()
if not conres:
print "Unable to connect to server %s!"%server
sys.exit(1)
if conres<>'tls':
print "Warning: unable to estabilish secure connection - TLS failed!"
authres=conn.auth(user,password)
if not authres:
print "Unable to authorize on %s - check login/password."%server
sys.exit(1)
if authres<>'sasl':
print "Warning: unable to perform SASL auth os %s. Old authentication method used!"%server
conn.RegisterHandler('message',messageCB)
conn.sendInitPresence()
print "Bot started."
GoOn(conn)
|
chamakov/namebench
|
refs/heads/master
|
nb_third_party/dns/rrset.py
|
215
|
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS RRsets (an RRset is a named rdataset)"""
import dns.name
import dns.rdataset
import dns.rdataclass
import dns.renderer
class RRset(dns.rdataset.Rdataset):
"""A DNS RRset (named rdataset).
RRset inherits from Rdataset, and RRsets can be treated as
Rdatasets in most cases. There are, however, a few notable
exceptions. RRsets have different to_wire() and to_text() method
arguments, reflecting the fact that RRsets always have an owner
name.
"""
__slots__ = ['name', 'deleting']
def __init__(self, name, rdclass, rdtype, covers=dns.rdatatype.NONE,
deleting=None):
"""Create a new RRset."""
super(RRset, self).__init__(rdclass, rdtype)
self.name = name
self.deleting = deleting
def _clone(self):
obj = super(RRset, self)._clone()
obj.name = self.name
obj.deleting = self.deleting
return obj
def __repr__(self):
if self.covers == 0:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
if not self.deleting is None:
dtext = ' delete=' + dns.rdataclass.to_text(self.deleting)
else:
dtext = ''
return '<DNS ' + str(self.name) + ' ' + \
dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + dtext + ' RRset>'
def __str__(self):
return self.to_text()
def __eq__(self, other):
"""Two RRsets are equal if they have the same name and the same
rdataset
@rtype: bool"""
if not isinstance(other, RRset):
return False
if self.name != other.name:
return False
return super(RRset, self).__eq__(other)
def match(self, name, rdclass, rdtype, covers, deleting=None):
"""Returns True if this rrset matches the specified class, type,
covers, and deletion state."""
if not super(RRset, self).match(rdclass, rdtype, covers):
return False
if self.name != name or self.deleting != deleting:
return False
return True
def to_text(self, origin=None, relativize=True, **kw):
"""Convert the RRset into DNS master file format.
@see: L{dns.name.Name.choose_relativity} for more information
on how I{origin} and I{relativize} determine the way names
are emitted.
Any additional keyword arguments are passed on to the rdata
to_text() method.
@param origin: The origin for relative names, or None.
@type origin: dns.name.Name object
@param relativize: True if names should names be relativized
@type relativize: bool"""
return super(RRset, self).to_text(self.name, origin, relativize,
self.deleting, **kw)
def to_wire(self, file, compress=None, origin=None, **kw):
"""Convert the RRset to wire format."""
return super(RRset, self).to_wire(self.name, file, compress, origin,
self.deleting, **kw)
def to_rdataset(self):
"""Convert an RRset into an Rdataset.
@rtype: dns.rdataset.Rdataset object
"""
return dns.rdataset.from_rdata_list(self.ttl, list(self))
def from_text_list(name, ttl, rdclass, rdtype, text_rdatas):
"""Create an RRset with the specified name, TTL, class, and type, and with
the specified list of rdatas in text format.
@rtype: dns.rrset.RRset object
"""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if isinstance(rdclass, str):
rdclass = dns.rdataclass.from_text(rdclass)
if isinstance(rdtype, str):
rdtype = dns.rdatatype.from_text(rdtype)
r = RRset(name, rdclass, rdtype)
r.update_ttl(ttl)
for t in text_rdatas:
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
r.add(rd)
return r
def from_text(name, ttl, rdclass, rdtype, *text_rdatas):
"""Create an RRset with the specified name, TTL, class, and type and with
the specified rdatas in text format.
@rtype: dns.rrset.RRset object
"""
return from_text_list(name, ttl, rdclass, rdtype, text_rdatas)
def from_rdata_list(name, ttl, rdatas):
"""Create an RRset with the specified name and TTL, and with
the specified list of rdata objects.
@rtype: dns.rrset.RRset object
"""
if isinstance(name, (str, unicode)):
name = dns.name.from_text(name, None)
if len(rdatas) == 0:
raise ValueError("rdata list must not be empty")
r = None
for rd in rdatas:
if r is None:
r = RRset(name, rd.rdclass, rd.rdtype)
r.update_ttl(ttl)
first_time = False
r.add(rd)
return r
def from_rdata(name, ttl, *rdatas):
"""Create an RRset with the specified name and TTL, and with
the specified rdata objects.
@rtype: dns.rrset.RRset object
"""
return from_rdata_list(name, ttl, rdatas)
|
zdary/intellij-community
|
refs/heads/master
|
python/testData/resolve/ModuleTypeAttributes/b.py
|
9
|
__name__ = "abc"
|
Unode/simpletap
|
refs/heads/master
|
setup.py
|
1
|
#!/usr/bin/env python
import setuptools
exec(compile(open("simpletap/version.py").read(), "simpletap/version.py", "exec"))
long_description = open("README.rst").read()
setuptools.setup(
name='simpletap',
packages=setuptools.find_packages(),
version=__version__,
description='Unittest runner producing Test Anything Protocol (TAP) output',
long_description=long_description,
author='Renato Alves',
maintainer='Renato Alves',
author_email='alves.rjc@gmail.com',
license='MIT',
platforms=["any"],
url="https://github.com/Unode/simpletap",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
hoskerism/reefx
|
refs/heads/master
|
testing/workerthread_test.py
|
1
|
#!/user/bin/python
import abc
from workerthread import WorkerThread
import Queue
from workerbase_test import WorkerBaseTestClass, AbstractTestWorkerBase
class WorkerThreadTestClass(WorkerBaseTestClass, WorkerThread):
def __init__(self, inqueue, outqueue):
self.addtestaction("WorkerThreadTestClass.__init__()")
self.gpioOutputQueue = Queue.Queue()
self.sensorQueue = Queue.Queue()
self.testSensorReadings = {}
WorkerBaseTestClass.__init__(self)
WorkerThread.__init__(self, inqueue, outqueue)
def getdeviceoutputresponse(self, responseQueue, timeout):
response = {'CODE':'DEVICE_OUTPUT_RESPONSE',
'VALUE':True}
return response
def readsensorresponse(self, responseQueue, timeout):
request = self.sensorQueue.get(True, timeout)
self.sensorQueue.task_done()
sensor = request['SENSOR']
sensorReadingList = self.testSensorReadings[sensor]
reading = sensorReadingList.pop(0)
# TODO: Exceptions, timeouts etc
# I think exceptions just require value to be 'EXCEPTION'???
response = {'CODE':'SENSOR_RESPONSE',
'VALUE':reading,
'FRIENDLY_VALUE':"friendly {0}".format(reading),
'FRIENDLY_NAME':"friendly {0}".format(sensor)}
return response
def addtestsensor(self, sensor, readings):
self.testSensorReadings[sensor] = []
for reading in readings:
self.testSensorReadings[sensor].append(reading)
class AbstractTestWorkerThread(AbstractTestWorkerBase):
__metaclass__ = abc.ABCMeta
RUNTIME = 0
def setup(self):
self.testobject.setup()
super(AbstractTestWorkerThread, self).setup()
def teardown(self):
self.testobject.teardown('test teardown')
super(AbstractTestWorkerThread, self).teardown()
def assertdevicestatus(self, device, status, message = ""):
if message == "":
message = "Device {0} status error".format(device)
self.assertequals(status, self.testobject.deviceStatuses[device]['VALUE'], message)
def addtestsensor(self, sensor, readings):
self.testobject.addtestsensor(sensor, readings)
def testruntimebase(self):
self.assertequals(self.RUNTIME, self.testobject.RUNTIME, "RUNTIME")
def testdbbase(self):
self.assertequals("aquatest", self.testobject.dbname())
|
bliti/django-nonrel-1.5
|
refs/heads/nonrel-1.5
|
tests/regressiontests/delete_regress/models.py
|
108
|
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Award(models.Model):
name = models.CharField(max_length=25)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey()
class AwardNote(models.Model):
award = models.ForeignKey(Award)
note = models.CharField(max_length=100)
class Person(models.Model):
name = models.CharField(max_length=25)
awards = generic.GenericRelation(Award)
class Book(models.Model):
pagecount = models.IntegerField()
class Toy(models.Model):
name = models.CharField(max_length=50)
class Child(models.Model):
name = models.CharField(max_length=50)
toys = models.ManyToManyField(Toy, through='PlayedWith')
class PlayedWith(models.Model):
child = models.ForeignKey(Child)
toy = models.ForeignKey(Toy)
date = models.DateField(db_column='date_col')
class PlayedWithNote(models.Model):
played = models.ForeignKey(PlayedWith)
note = models.TextField()
class Contact(models.Model):
label = models.CharField(max_length=100)
class Email(Contact):
email_address = models.EmailField(max_length=100)
class Researcher(models.Model):
contacts = models.ManyToManyField(Contact, related_name="research_contacts")
class Food(models.Model):
name = models.CharField(max_length=20, unique=True)
class Eaten(models.Model):
food = models.ForeignKey(Food, to_field="name")
meal = models.CharField(max_length=20)
# Models for #15776
class Policy(models.Model):
policy_number = models.CharField(max_length=10)
class Version(models.Model):
policy = models.ForeignKey(Policy)
class Location(models.Model):
version = models.ForeignKey(Version, blank=True, null=True)
class Item(models.Model):
version = models.ForeignKey(Version)
location = models.ForeignKey(Location, blank=True, null=True)
# Models for #16128
class File(models.Model):
pass
class Image(File):
class Meta:
proxy = True
class Photo(Image):
class Meta:
proxy = True
class FooImage(models.Model):
my_image = models.ForeignKey(Image)
class FooFile(models.Model):
my_file = models.ForeignKey(File)
class FooPhoto(models.Model):
my_photo = models.ForeignKey(Photo)
class FooFileProxy(FooFile):
class Meta:
proxy = True
class OrgUnit(models.Model):
name = models.CharField(max_length=64, unique=True)
class Login(models.Model):
description = models.CharField(max_length=32)
orgunit = models.ForeignKey(OrgUnit)
class House(models.Model):
address = models.CharField(max_length=32)
class OrderedPerson(models.Model):
name = models.CharField(max_length=32)
lives_in = models.ForeignKey(House)
class Meta:
ordering = ['name']
|
jn0/fb2utils
|
refs/heads/master
|
unidecode/x61.py
|
252
|
data = (
'Qiao ', # 0x00
'Chou ', # 0x01
'Bei ', # 0x02
'Xuan ', # 0x03
'Wei ', # 0x04
'Ge ', # 0x05
'Qian ', # 0x06
'Wei ', # 0x07
'Yu ', # 0x08
'Yu ', # 0x09
'Bi ', # 0x0a
'Xuan ', # 0x0b
'Huan ', # 0x0c
'Min ', # 0x0d
'Bi ', # 0x0e
'Yi ', # 0x0f
'Mian ', # 0x10
'Yong ', # 0x11
'Kai ', # 0x12
'Dang ', # 0x13
'Yin ', # 0x14
'E ', # 0x15
'Chen ', # 0x16
'Mou ', # 0x17
'Ke ', # 0x18
'Ke ', # 0x19
'Yu ', # 0x1a
'Ai ', # 0x1b
'Qie ', # 0x1c
'Yan ', # 0x1d
'Nuo ', # 0x1e
'Gan ', # 0x1f
'Yun ', # 0x20
'Zong ', # 0x21
'Sai ', # 0x22
'Leng ', # 0x23
'Fen ', # 0x24
'[?] ', # 0x25
'Kui ', # 0x26
'Kui ', # 0x27
'Que ', # 0x28
'Gong ', # 0x29
'Yun ', # 0x2a
'Su ', # 0x2b
'Su ', # 0x2c
'Qi ', # 0x2d
'Yao ', # 0x2e
'Song ', # 0x2f
'Huang ', # 0x30
'Ji ', # 0x31
'Gu ', # 0x32
'Ju ', # 0x33
'Chuang ', # 0x34
'Ni ', # 0x35
'Xie ', # 0x36
'Kai ', # 0x37
'Zheng ', # 0x38
'Yong ', # 0x39
'Cao ', # 0x3a
'Sun ', # 0x3b
'Shen ', # 0x3c
'Bo ', # 0x3d
'Kai ', # 0x3e
'Yuan ', # 0x3f
'Xie ', # 0x40
'Hun ', # 0x41
'Yong ', # 0x42
'Yang ', # 0x43
'Li ', # 0x44
'Sao ', # 0x45
'Tao ', # 0x46
'Yin ', # 0x47
'Ci ', # 0x48
'Xu ', # 0x49
'Qian ', # 0x4a
'Tai ', # 0x4b
'Huang ', # 0x4c
'Yun ', # 0x4d
'Shen ', # 0x4e
'Ming ', # 0x4f
'[?] ', # 0x50
'She ', # 0x51
'Cong ', # 0x52
'Piao ', # 0x53
'Mo ', # 0x54
'Mu ', # 0x55
'Guo ', # 0x56
'Chi ', # 0x57
'Can ', # 0x58
'Can ', # 0x59
'Can ', # 0x5a
'Cui ', # 0x5b
'Min ', # 0x5c
'Te ', # 0x5d
'Zhang ', # 0x5e
'Tong ', # 0x5f
'Ao ', # 0x60
'Shuang ', # 0x61
'Man ', # 0x62
'Guan ', # 0x63
'Que ', # 0x64
'Zao ', # 0x65
'Jiu ', # 0x66
'Hui ', # 0x67
'Kai ', # 0x68
'Lian ', # 0x69
'Ou ', # 0x6a
'Song ', # 0x6b
'Jin ', # 0x6c
'Yin ', # 0x6d
'Lu ', # 0x6e
'Shang ', # 0x6f
'Wei ', # 0x70
'Tuan ', # 0x71
'Man ', # 0x72
'Qian ', # 0x73
'She ', # 0x74
'Yong ', # 0x75
'Qing ', # 0x76
'Kang ', # 0x77
'Di ', # 0x78
'Zhi ', # 0x79
'Lou ', # 0x7a
'Juan ', # 0x7b
'Qi ', # 0x7c
'Qi ', # 0x7d
'Yu ', # 0x7e
'Ping ', # 0x7f
'Liao ', # 0x80
'Cong ', # 0x81
'You ', # 0x82
'Chong ', # 0x83
'Zhi ', # 0x84
'Tong ', # 0x85
'Cheng ', # 0x86
'Qi ', # 0x87
'Qu ', # 0x88
'Peng ', # 0x89
'Bei ', # 0x8a
'Bie ', # 0x8b
'Chun ', # 0x8c
'Jiao ', # 0x8d
'Zeng ', # 0x8e
'Chi ', # 0x8f
'Lian ', # 0x90
'Ping ', # 0x91
'Kui ', # 0x92
'Hui ', # 0x93
'Qiao ', # 0x94
'Cheng ', # 0x95
'Yin ', # 0x96
'Yin ', # 0x97
'Xi ', # 0x98
'Xi ', # 0x99
'Dan ', # 0x9a
'Tan ', # 0x9b
'Duo ', # 0x9c
'Dui ', # 0x9d
'Dui ', # 0x9e
'Su ', # 0x9f
'Jue ', # 0xa0
'Ce ', # 0xa1
'Xiao ', # 0xa2
'Fan ', # 0xa3
'Fen ', # 0xa4
'Lao ', # 0xa5
'Lao ', # 0xa6
'Chong ', # 0xa7
'Han ', # 0xa8
'Qi ', # 0xa9
'Xian ', # 0xaa
'Min ', # 0xab
'Jing ', # 0xac
'Liao ', # 0xad
'Wu ', # 0xae
'Can ', # 0xaf
'Jue ', # 0xb0
'Cu ', # 0xb1
'Xian ', # 0xb2
'Tan ', # 0xb3
'Sheng ', # 0xb4
'Pi ', # 0xb5
'Yi ', # 0xb6
'Chu ', # 0xb7
'Xian ', # 0xb8
'Nao ', # 0xb9
'Dan ', # 0xba
'Tan ', # 0xbb
'Jing ', # 0xbc
'Song ', # 0xbd
'Han ', # 0xbe
'Jiao ', # 0xbf
'Wai ', # 0xc0
'Huan ', # 0xc1
'Dong ', # 0xc2
'Qin ', # 0xc3
'Qin ', # 0xc4
'Qu ', # 0xc5
'Cao ', # 0xc6
'Ken ', # 0xc7
'Xie ', # 0xc8
'Ying ', # 0xc9
'Ao ', # 0xca
'Mao ', # 0xcb
'Yi ', # 0xcc
'Lin ', # 0xcd
'Se ', # 0xce
'Jun ', # 0xcf
'Huai ', # 0xd0
'Men ', # 0xd1
'Lan ', # 0xd2
'Ai ', # 0xd3
'Lin ', # 0xd4
'Yan ', # 0xd5
'Gua ', # 0xd6
'Xia ', # 0xd7
'Chi ', # 0xd8
'Yu ', # 0xd9
'Yin ', # 0xda
'Dai ', # 0xdb
'Meng ', # 0xdc
'Ai ', # 0xdd
'Meng ', # 0xde
'Dui ', # 0xdf
'Qi ', # 0xe0
'Mo ', # 0xe1
'Lan ', # 0xe2
'Men ', # 0xe3
'Chou ', # 0xe4
'Zhi ', # 0xe5
'Nuo ', # 0xe6
'Nuo ', # 0xe7
'Yan ', # 0xe8
'Yang ', # 0xe9
'Bo ', # 0xea
'Zhi ', # 0xeb
'Kuang ', # 0xec
'Kuang ', # 0xed
'You ', # 0xee
'Fu ', # 0xef
'Liu ', # 0xf0
'Mie ', # 0xf1
'Cheng ', # 0xf2
'[?] ', # 0xf3
'Chan ', # 0xf4
'Meng ', # 0xf5
'Lan ', # 0xf6
'Huai ', # 0xf7
'Xuan ', # 0xf8
'Rang ', # 0xf9
'Chan ', # 0xfa
'Ji ', # 0xfb
'Ju ', # 0xfc
'Huan ', # 0xfd
'She ', # 0xfe
'Yi ', # 0xff
)
|
duramato/CouchPotatoServer
|
refs/heads/develop
|
couchpotato/core/media/_base/providers/torrent/thepiratebay.py
|
2
|
import re
import traceback
from bs4 import BeautifulSoup
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentMagnetProvider
import six
log = CPLog(__name__)
class Base(TorrentMagnetProvider):
urls = {
'detail': '%s/torrent/%s',
'search': '%s/search/%%s/%%s/7/%%s'
}
cat_backup_id = 200
disable_provider = False
http_time_between_calls = 0
proxy_list = [
'https://thepiratebay.mn',
'https://thepiratebay.gd',
'https://thepiratebay.la',
'https://pirateproxy.sx',
'https://piratebay.host',
'https://thepiratebay.expert',
'https://pirateproxy.wf',
'https://pirateproxy.tf',
'https://urbanproxy.eu',
'https://pirate.guru',
'https://piratebays.co',
'https://pirateproxy.yt',
'https://thepiratebay.uk.net',
'https://tpb.ninja',
'https://thehiddenbay.me',
'https://ukunlocked.com',
'https://thebay.tv',
'https://tpb.freed0m4all.net',
'https://piratebays.eu',
'https://thepirateproxy.co',
'https://thepiratebayz.com',
'https://zaatoka.eu',
'https://piratemirror.net',
'https://theproxypirate.pw',
'https://torrentdr.com',
'https://tpbproxy.co',
'https://arrr.xyz',
'https://www.cleantpbproxy.com',
'http://tpb.dashitz.com',
]
def __init__(self):
super(Base, self).__init__()
addEvent('app.test', self.doTest)
def _search(self, media, quality, results):
page = 0
total_pages = 1
cats = self.getCatId(quality)
base_search_url = self.urls['search'] % self.getDomain()
while page < total_pages:
search_url = base_search_url % self.buildUrl(media, page, cats)
page += 1
data = self.getHTMLData(search_url)
if data:
try:
soup = BeautifulSoup(data)
results_table = soup.find('table', attrs = {'id': 'searchResult'})
if not results_table:
return
try:
total_pages = len(soup.find('div', attrs = {'align': 'center'}).find_all('a'))
except:
pass
entries = results_table.find_all('tr')
for result in entries[1:]:
link = result.find(href = re.compile('torrent\/\d+\/'))
download = result.find(href = re.compile('magnet:'))
try:
size = re.search('Size (?P<size>.+),', six.text_type(result.select('font.detDesc')[0])).group('size')
except:
continue
if link and download:
def extra_score(item):
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None]
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None]
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None]
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None]
return confirmed + trusted + vip + moderated
results.append({
'id': re.search('/(?P<id>\d+)/', link['href']).group('id'),
'name': six.text_type(link.string),
'url': download['href'],
'detail_url': self.getDomain(link['href']),
'size': self.parseSize(size),
'seeders': tryInt(result.find_all('td')[2].string),
'leechers': tryInt(result.find_all('td')[3].string),
'extra_score': extra_score,
'get_more_info': self.getMoreInfo
})
except:
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
def isEnabled(self):
return super(Base, self).isEnabled() and self.getDomain()
def correctProxy(self, data):
return 'title="Pirate Search"' in data
def getMoreInfo(self, item):
full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000)
html = BeautifulSoup(full_description)
nfo_pre = html.find('div', attrs = {'class': 'nfo'})
description = ''
try:
description = toUnicode(nfo_pre.text)
except:
pass
item['description'] = description
return item
def doTest(self):
for url in self.proxy_list:
try:
data = self.urlopen(url + '/search/test+search')
if 'value="test+search"' in data:
log.info('Success %s', url)
continue
except:
log.error('%s', traceback.format_exc(0))
config = [{
'name': 'thepiratebay',
'groups': [
{
'tab': 'searcher',
'list': 'torrent_providers',
'name': 'ThePirateBay',
'description': 'The world\'s largest bittorrent tracker. <a href="https://thepiratebay.se/" target="_blank">ThePirateBay</a>',
'wizard': True,
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAAAAAA6mKC9AAAA3UlEQVQY02P4DwT/YADIZvj//7qnozMYODmtAAusZoCDELDAegYGViZhAWZmRoYoqIDupfhNN1M3dTBEggXWMZg9jZRXV77YxhAOFpjDwMAPMoCXmcHsF1SAQZ6bQY2VgUEbKHClcAYzg3mINEO8jSCD478/DPsZmvqWblu1bOmStes3Pp0ezVDF4Gif0Hfx9///74/ObRZ2YNiZ47C8XIRBxFJR0jbSSUud4f9zAQWn8NTuziAt2zy5xIMM/z8LFX0E+fD/x0MRDCeA1v7Z++Y/FDzyvAtyBxIA+h8A8ZKLeT+lJroAAAAASUVORK5CYII=',
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False
},
{
'name': 'domain',
'advanced': True,
'label': 'Proxy server',
'description': 'Domain for requests, keep empty to let CouchPotato pick.',
},
{
'name': 'seed_ratio',
'label': 'Seed ratio',
'type': 'float',
'default': 1,
'description': 'Will not be (re)moved until this seed ratio is met.',
},
{
'name': 'seed_time',
'label': 'Seed time',
'type': 'int',
'default': 40,
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
},
{
'name': 'extra_score',
'advanced': True,
'label': 'Extra Score',
'type': 'int',
'default': 0,
'description': 'Starting score for each release found via this provider.',
}
],
}
]
}]
|
pshen/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/univention/udm_user.py
|
69
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: udm_user
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage posix users on a univention corporate server
description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the user is present or not.
username:
required: true
description:
- User name
aliases: ['name']
firstname:
required: false
description:
- First name. Required if C(state=present).
lastname:
required: false
description:
- Last name. Required if C(state=present).
password:
required: false
default: None
description:
- Password. Required if C(state=present).
birthday:
required: false
default: None
description:
- Birthday
city:
required: false
default: None
description:
- City of users business address.
country:
required: false
default: None
description:
- Country of users business address.
department_number:
required: false
default: None
description:
- Department number of users business address.
aliases: [ departmentNumber ]
description:
required: false
default: None
description:
- Description (not gecos)
display_name:
required: false
default: None
description:
- Display name (not gecos)
aliases: [ displayName ]
email:
required: false
default: ['']
description:
- A list of e-mail addresses.
employee_number:
required: false
default: None
description:
- Employee number
aliases: [ employeeNumber ]
employee_type:
required: false
default: None
description:
- Employee type
aliases: [ employeeType ]
gecos:
required: false
default: None
description:
- GECOS
groups:
required: false
default: []
description:
- "POSIX groups, the LDAP DNs of the groups will be found with the
LDAP filter for each group as $GROUP:
C((&(objectClass=posixGroup)(cn=$GROUP)))."
home_share:
required: false
default: None
description:
- "Home NFS share. Must be a LDAP DN, e.g.
C(cn=home,cn=shares,ou=school,dc=example,dc=com)."
aliases: [ homeShare ]
home_share_path:
required: false
default: None
description:
- Path to home NFS share, inside the homeShare.
aliases: [ homeSharePath ]
home_telephone_number:
required: false
default: []
description:
- List of private telephone numbers.
aliases: [ homeTelephoneNumber ]
homedrive:
required: false
default: None
description:
- Windows home drive, e.g. C("H:").
mail_alternative_address:
required: false
default: []
description:
- List of alternative e-mail addresses.
aliases: [ mailAlternativeAddress ]
mail_home_server:
required: false
default: None
description:
- FQDN of mail server
aliases: [ mailHomeServer ]
mail_primary_address:
required: false
default: None
description:
- Primary e-mail address
aliases: [ mailPrimaryAddress ]
mobile_telephone_number:
required: false
default: []
description:
- Mobile phone number
aliases: [ mobileTelephoneNumber ]
organisation:
required: false
default: None
description:
- Organisation
override_pw_history:
required: false
default: False
description:
- Override password history
aliases: [ overridePWHistory ]
override_pw_length:
required: false
default: False
description:
- Override password check
aliases: [ overridePWLength ]
pager_telephonenumber:
required: false
default: []
description:
- List of pager telephone numbers.
aliases: [ pagerTelephonenumber ]
phone:
required: false
default: []
description:
- List of telephone numbers.
postcode:
required: false
default: None
description:
- Postal code of users business address.
primary_group:
required: false
default: cn=Domain Users,cn=groups,$LDAP_BASE_DN
description:
- Primary group. This must be the group LDAP DN.
aliases: [ primaryGroup ]
profilepath:
required: false
default: None
description:
- Windows profile directory
pwd_change_next_login:
required: false
default: None
choices: [ '0', '1' ]
description:
- Change password on next login.
aliases: [ pwdChangeNextLogin ]
room_number:
required: false
default: None
description:
- Room number of users business address.
aliases: [ roomNumber ]
samba_privileges:
required: false
default: []
description:
- "Samba privilege, like allow printer administration, do domain
join."
aliases: [ sambaPrivileges ]
samba_user_workstations:
required: false
default: []
description:
- Allow the authentication only on this Microsoft Windows host.
aliases: [ sambaUserWorkstations ]
sambahome:
required: false
default: None
description:
- Windows home path, e.g. C('\\\\$FQDN\\$USERNAME').
scriptpath:
required: false
default: None
description:
- Windows logon script.
secretary:
required: false
default: []
description:
- A list of superiors as LDAP DNs.
serviceprovider:
required: false
default: ['']
description:
- Enable user for the following service providers.
shell:
required: false
default: '/bin/bash'
description:
- Login shell
street:
required: false
default: None
description:
- Street of users business address.
title:
required: false
default: None
description:
- Title, e.g. C(Prof.).
unixhome:
required: false
default: '/home/$USERNAME'
description:
- Unix home directory
userexpiry:
required: false
default: Today + 1 year
description:
- Account expiry date, e.g. C(1999-12-31).
position:
required: false
default: ''
description:
- "Define the whole position of users object inside the LDAP tree,
e.g. C(cn=employee,cn=users,ou=school,dc=example,dc=com)."
update_password:
required: false
default: always
description:
- "C(always) will update passwords if they differ.
C(on_create) will only set the password for newly created users."
version_added: "2.3"
ou:
required: false
default: ''
description:
- "Organizational Unit inside the LDAP Base DN, e.g. C(school) for
LDAP OU C(ou=school,dc=example,dc=com)."
subpath:
required: false
default: 'cn=users'
description:
- "LDAP subpath inside the organizational unit, e.g.
C(cn=teachers,cn=users) for LDAP container
C(cn=teachers,cn=users,dc=example,dc=com)."
'''
EXAMPLES = '''
# Create a user on a UCS
- udm_user:
name: FooBar
password: secure_password
firstname: Foo
lastname: Bar
# Create a user with the DN
# C(uid=foo,cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com)
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
ou: school
subpath: 'cn=teachers,cn=users'
# or define the position
- udm_user:
name: foo
password: secure_password
firstname: Foo
lastname: Bar
position: 'cn=teachers,cn=users,ou=school,dc=school,dc=example,dc=com'
'''
RETURN = '''# '''
from datetime import date
import crypt
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
)
from dateutil.relativedelta import relativedelta
def main():
expiry = date.strftime(date.today() + relativedelta(years=1), "%Y-%m-%d")
module = AnsibleModule(
argument_spec = dict(
birthday = dict(default=None,
type='str'),
city = dict(default=None,
type='str'),
country = dict(default=None,
type='str'),
department_number = dict(default=None,
type='str',
aliases=['departmentNumber']),
description = dict(default=None,
type='str'),
display_name = dict(default=None,
type='str',
aliases=['displayName']),
email = dict(default=[''],
type='list'),
employee_number = dict(default=None,
type='str',
aliases=['employeeNumber']),
employee_type = dict(default=None,
type='str',
aliases=['employeeType']),
firstname = dict(default=None,
type='str'),
gecos = dict(default=None,
type='str'),
groups = dict(default=[],
type='list'),
home_share = dict(default=None,
type='str',
aliases=['homeShare']),
home_share_path = dict(default=None,
type='str',
aliases=['homeSharePath']),
home_telephone_number = dict(default=[],
type='list',
aliases=['homeTelephoneNumber']),
homedrive = dict(default=None,
type='str'),
lastname = dict(default=None,
type='str'),
mail_alternative_address= dict(default=[],
type='list',
aliases=['mailAlternativeAddress']),
mail_home_server = dict(default=None,
type='str',
aliases=['mailHomeServer']),
mail_primary_address = dict(default=None,
type='str',
aliases=['mailPrimaryAddress']),
mobile_telephone_number = dict(default=[],
type='list',
aliases=['mobileTelephoneNumber']),
organisation = dict(default=None,
type='str'),
overridePWHistory = dict(default=False,
type='bool',
aliases=['override_pw_history']),
overridePWLength = dict(default=False,
type='bool',
aliases=['override_pw_length']),
pager_telephonenumber = dict(default=[],
type='list',
aliases=['pagerTelephonenumber']),
password = dict(default=None,
type='str',
no_log=True),
phone = dict(default=[],
type='list'),
postcode = dict(default=None,
type='str'),
primary_group = dict(default=None,
type='str',
aliases=['primaryGroup']),
profilepath = dict(default=None,
type='str'),
pwd_change_next_login = dict(default=None,
type='str',
choices=['0', '1'],
aliases=['pwdChangeNextLogin']),
room_number = dict(default=None,
type='str',
aliases=['roomNumber']),
samba_privileges = dict(default=[],
type='list',
aliases=['sambaPrivileges']),
samba_user_workstations = dict(default=[],
type='list',
aliases=['sambaUserWorkstations']),
sambahome = dict(default=None,
type='str'),
scriptpath = dict(default=None,
type='str'),
secretary = dict(default=[],
type='list'),
serviceprovider = dict(default=[''],
type='list'),
shell = dict(default='/bin/bash',
type='str'),
street = dict(default=None,
type='str'),
title = dict(default=None,
type='str'),
unixhome = dict(default=None,
type='str'),
userexpiry = dict(default=expiry,
type='str'),
username = dict(required=True,
aliases=['name'],
type='str'),
position = dict(default='',
type='str'),
update_password = dict(default='always',
choices=['always', 'on_create'],
type='str'),
ou = dict(default='',
type='str'),
subpath = dict(default='cn=users',
type='str'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
subpath = module.params['subpath']
state = module.params['state']
changed = False
users = list(ldap_search(
'(&(objectClass=posixAccount)(uid={}))'.format(username),
attr=['uid']
))
if position != '':
container = position
else:
if ou != '':
ou = 'ou={},'.format(ou)
if subpath != '':
subpath = '{},'.format(subpath)
container = '{}{}{}'.format(subpath, ou, base_dn())
user_dn = 'uid={},{}'.format(username, container)
exists = bool(len(users))
if state == 'present':
try:
if not exists:
obj = umc_module_for_add('users/user', container)
else:
obj = umc_module_for_edit('users/user', user_dn)
if module.params['displayName'] is None:
module.params['displayName'] = '{} {}'.format(
module.params['firstname'],
module.params['lastname']
)
if module.params['unixhome'] is None:
module.params['unixhome'] = '/home/{}'.format(
module.params['username']
)
for k in obj.keys():
if (k != 'password' and
k != 'groups' and
k != 'overridePWHistory' and
k in module.params and
module.params[k] is not None):
obj[k] = module.params[k]
# handle some special values
obj['e-mail'] = module.params['email']
password = module.params['password']
if obj['password'] is None:
obj['password'] = password
if module.params['update_password'] == 'always':
old_password = obj['password'].split('}', 2)[1]
if crypt.crypt(password, old_password) != old_password:
obj['overridePWHistory'] = module.params['overridePWHistory']
obj['overridePWLength'] = module.params['overridePWLength']
obj['password'] = password
diff = obj.diff()
if exists:
for k in obj.keys():
if obj.hasChanged(k):
changed = True
else:
changed = True
if not module.check_mode:
if not exists:
obj.create()
elif changed:
obj.modify()
except:
module.fail_json(
msg="Creating/editing user {} in {} failed".format(
username,
container
)
)
try:
groups = module.params['groups']
if groups:
filter = '(&(objectClass=posixGroup)(|(cn={})))'.format(
')(cn='.join(groups)
)
group_dns = list(ldap_search(filter, attr=['dn']))
for dn in group_dns:
grp = umc_module_for_edit('groups/group', dn[0])
if user_dn not in grp['users']:
grp['users'].append(user_dn)
if not module.check_mode:
grp.modify()
changed = True
except:
module.fail_json(
msg="Adding groups to user {} failed".format(username)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('users/user', user_dn)
if not module.check_mode:
obj.remove()
changed = True
except:
module.fail_json(
msg="Removing user {} failed".format(username)
)
module.exit_json(
changed=changed,
username=username,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
|
NoxWings/GoogleCodeJam
|
refs/heads/master
|
src/round1A_2016/the_last_word.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#===============================================================================
from __future__ import unicode_literals
#===============================================================================
def read_input(strip=True):
return raw_input().strip() if strip else raw_input()
def read_input_multi(strip=True):
return read_input(strip).split()
def read_int():
return int(read_input())
def read_int_multi():
return [int(s) for s in read_input_multi()]
def print_solution(i, solution):
print('Case #{}: {}'.format(i, solution))
#===============================================================================
def solve():
word = read_input()
last_word = []
for l in word:
if last_word:
if (ord(l) >= ord(last_word[0])):
last_word.insert(0, l)
else:
last_word.append(l)
else:
last_word.append(l)
return ''.join(last_word)
#===============================================================================
if __name__ == '__main__':
test_cases = read_int()
for t in xrange(test_cases):
solution = solve()
print_solution(t + 1, solution)
|
Rypac/sublime-format
|
refs/heads/main
|
format.py
|
1
|
import sublime
import sublime_plugin
from .plugin import FormatterRegistry
def queue_command(callback, timeout=100):
sublime.set_timeout(callback, timeout)
def log_error(output, error):
print('Format:', output, error)
registry = FormatterRegistry()
def plugin_loaded():
registry.populate()
def plugin_unloaded():
registry.clear()
def format_region(formatter, view, region, edit):
selection = view.substr(region)
ok, output, error = formatter.format(selection, settings=view.settings())
if ok:
view.replace(edit, region, output)
else:
log_error(output, error)
class FormatSelectionCommand(sublime_plugin.TextCommand):
def is_enabled(self):
return registry.by_view(self.view) is not None
def run(self, edit):
formatter = registry.by_view(self.view)
if formatter:
for region in self.view.sel():
if not region.empty():
format_region(formatter, self.view, region, edit)
else:
log_error('No formatter for source file')
class FormatFileCommand(sublime_plugin.TextCommand):
def is_enabled(self):
return registry.by_view(self.view) is not None
def run(self, edit):
formatter = registry.by_view(self.view)
if formatter:
region = sublime.Region(0, self.view.size())
format_region(formatter, self.view, region, edit)
else:
log_error('No formatter for source file')
class FormatListener(sublime_plugin.EventListener):
def on_pre_save(self, view):
formatter = registry.by_view(view)
if formatter and formatter.format_on_save:
view.run_command('format_file')
class ToggleFormatOnSaveCommand(sublime_plugin.ApplicationCommand):
def is_checked(self, name=None):
if name:
formatter = registry.by_name(name)
return formatter and formatter.format_on_save
return all(f.format_on_save for f in registry.all)
def run(self, name=None):
if name:
formatter = registry.by_name(name)
if formatter:
formatter.format_on_save = not formatter.format_on_save
else:
enable = any(not f.format_on_save for f in registry.all)
for formatter in registry.all:
formatter.format_on_save = enable
class ManageFormatOnSaveCommand(sublime_plugin.WindowCommand):
def run(self, which=None):
enabled = which == 'enabled'
items = [[x.name]
for x in registry.by(lambda f: f.format_on_save == enabled)]
def callback(selection):
if selection >= 0 and selection < len(items):
self.window.run_command('toggle_format_on_save',
{'name': items[selection][0]})
queue_command(lambda: self.window.show_quick_panel(items, callback))
|
moylop260/odoo-dev
|
refs/heads/master
|
addons/gamification/__openerp__.py
|
62
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Gamification',
'version': '1.0',
'author': 'OpenERP SA',
'category': 'Human Ressources',
'depends': ['mail', 'email_template', 'web_kanban_gauge'],
'description': """
Gamification process
====================
The Gamification module provides ways to evaluate and motivate the users of OpenERP.
The users can be evaluated using goals and numerical objectives to reach.
**Goals** are assigned through **challenges** to evaluate and compare members of a team with each others and through time.
For non-numerical achievements, **badges** can be granted to users. From a simple "thank you" to an exceptional achievement, a badge is an easy way to exprimate gratitude to a user for their good work.
Both goals and badges are flexibles and can be adapted to a large range of modules and actions. When installed, this module creates easy goals to help new users to discover OpenERP and configure their user profile.
""",
'data': [
'wizard/update_goal.xml',
'wizard/grant_badge.xml',
'views/badge.xml',
'views/challenge.xml',
'views/goal.xml',
'data/cron.xml',
'security/gamification_security.xml',
'security/ir.model.access.csv',
'data/goal_base.xml',
'data/badge.xml',
'views/gamification.xml',
],
'installable': True,
'application': True,
'auto_install': False,
'qweb': ['static/src/xml/gamification.xml'],
}
|
psobot/wub-machine
|
refs/heads/master
|
remixers/electrohouse.py
|
1
|
"""
dubstep.py <ex: dubstepize.py, wubmachine.py, wubwub.py, etc...>
Turns a song into a dubstep remix.
ElectroHouse inherits from the Remixer class.
Dependencies:
FastModify
Remixer
lame (command line binary)
shntool (command line binary)
soundstretch (command line binary)
by Peter Sobot <hi@petersobot.com>
v1: started Jan. 2011
v2: August 2011
based off of code by Ben Lacker, 2009-02-24.
"""
from remixer import *
from helpers.fastmodify import FastModify
from echonest.modify import Modify
from echonest.action import make_stereo
import numpy
tempo = 128.0
# Audio Division
def half_of(audioData):
return divide(audioData, 2)[0]
def third_of(audioData):
return divide(audioData, 3)[0]
def quarter_of(audioData):
return divide(audioData, 4)[0]
def eighth_of(audioData):
return divide(audioData, 8)[0]
def eighth_triplet(audioData):
return cutnote(audioData, 6)
def quarter_triplet(audioData):
return cutnote(audioData, 3)
def sixteenth_note(audioData):
return cutnote(audioData, 4)
def eighth_note(audioData):
return cutnote(audioData, 2)
def dotted_eighth_note(audioData):
return cutnote(audioData, 0.75)
def quarter_note(audioData):
return cutnote(audioData, 1)
def cutnote(audioData, length):
beatlength = (audioData.sampleRate * 60 / tempo) #in samples
i = beatlength/length
data = audioData.data[0:i]
if len(data) < i:
if audioData.numChannels == 2:
shape = (i - len(data),2)
else:
shape = (i - len(data),)
data = numpy.append(data, numpy.zeros(shape, dtype=numpy.int16), 0)
r = audio.AudioData(
ndarray=data,
numChannels=audioData.numChannels,
sampleRate = audioData.sampleRate
)
return make_stereo(r) if (r.numChannels == 1) else r
def divide(audioData, by):
return [audio.AudioData(
ndarray=audioData.data[i:len(audioData.data)/by],
numChannels=audioData.numChannels,
sampleRate = audioData.sampleRate
) for i in xrange(0, len(audioData.data), len(audioData.data)/by)]
quarter_rest = audio.AudioData(ndarray=numpy.zeros( ((44100 * 60 / tempo), 2), dtype=numpy.int16), numChannels=2, sampleRate=44100)
eighth_rest = audio.AudioData(ndarray=numpy.zeros( ((44100 * 60 / tempo)/2, 2), dtype=numpy.int16), numChannels=2, sampleRate=44100)
dotted_eighth_rest = audio.AudioData(ndarray=numpy.zeros( ((44100 * 60 / tempo)/0.75, 2), dtype=numpy.int16), numChannels=2, sampleRate=44100)
quarter_triplet_rest = audio.AudioData(ndarray=numpy.zeros( ((44100 * 60 / tempo)/3, 2), dtype=numpy.int16), numChannels=2, sampleRate=44100)
sixteenth_rest = audio.AudioData(ndarray=numpy.zeros( ((44100 * 60 / tempo)/4, 2), dtype=numpy.int16), numChannels=2, sampleRate=44100)
rhythm_map = {1: sixteenth_note, 2: eighth_note, 3: dotted_eighth_note, 4: quarter_note}
rest_map = {1: sixteenth_rest, 2: eighth_rest, 3: dotted_eighth_rest, 4: quarter_rest}
class note():
def __init__(self, pitch=None, length=1):
self.pitch = pitch
self.length = length
self.data = rest_map[length]
self.function = rhythm_map[length]
def __repr__(self):
return "%s x 16th note %s" % (self.length, self.pitch if self.pitch is not None else "rest")
def readPattern(filename):
f = open(filename)
f.readline()
# Two spaces for each beat.
# number 1 through 12 means that note (rather, that interval from root)
# dash means continue previous
pattern = []
for s in f:
if "+" in s or '#' in s or s == "\n":
continue
pattern.extend([''.join(x) for x in zip(*[list(s[z::2]) for z in xrange(2)])])
bar = []
for sixteenth in pattern:
if sixteenth == "" or sixteenth == " \n":
continue
elif sixteenth == " ":
bar.append(note())
elif sixteenth == "- ":
last = bar.pop()
bar.append(note(last.pitch, last.length+1))
else:
bar.append(note(int(sixteenth)))
return bar
class ElectroHouse(Remixer):
template = {
'tempo': 128,
'beat': ['beat_%s.wav' % i for i in xrange(0, 4)],
'intro': 'intro_16.wav',
'splash': 'splash.wav',
'build': 'build.wav',
'body' : [
'body/c.wav',
'body/c-sharp.wav',
'body/d.wav',
'body/d-sharp.wav',
'body/e.wav',
'body/f.wav',
'body/f-sharp.wav',
'body/g.wav',
'body/g-sharp.wav',
'body/a.wav',
'body/a-sharp.wav',
'body/b.wav'
],
'mixpoint': 18, # "db factor" of wubs - 0 is softest wubs, infinity is... probably extremely loud
'target': "beats",
'splash_ends': [ 'splash-ends/1.wav',
'splash-ends/2.wav',
'splash-ends/3.wav',
'splash-ends/4.wav'
],
}
st = None
sampleCache = {}
def searchSamples(self, j, key):
"""
Find all samples (beats) of a given key in a given section.
"""
hashkey = "_%s-%s" % (j, key)
if not hashkey in self.sampleCache:
if self.sections:
pool = self.sections[j % len(self.sections)]
elif self.original.analysis.bars:
pool = self.original.analysis.bars
elif self.original.analysis.segments:
pool = self.original.analysis.segments
else:
raise Exception("No samples found for section %s." % j+1)
a = self.getSamples(pool, key)
for tries in xrange(0, 5):
if len(a):
break
key = (key + 7) % 12
a = self.getSamples(pool, key)
else:
for tries in xrange(0, 5):
if len(a):
break
if self.sections:
j = (j + 1) % len(self.sections)
elif self.original.analysis.bars:
j = (j + 1) % len(self.original.analysis.bars)
elif self.original.analysis.segments:
j = (j + 1) % len(self.original.analysis.segments)
key = (key + 2) % 12
a = self.getSamples(pool, key)
self.sampleCache[hashkey] = a
return self.sampleCache[hashkey]
def getSamples(self, section, pitch, target="beats"):
"""
The EchoNest-y workhorse. Finds all beats/bars in a given section, of a given pitch.
"""
hashkey = "__%s.%s" % (str(section), pitch)
if not hashkey in self.sampleCache:
sample_list = audio.AudioQuantumList()
if target == "beats":
try:
sample_list.extend([b for x in section.children() for b in x.children()])
except:
sample_list.extend(section)
elif target == "bars":
sample_list.extend(section.children())
self.sampleCache[hashkey] = sample_list.that(overlap_ends_of(self.original.analysis.segments.that(have_pitch_max(pitch)).that(overlap_starts_of(sample_list))))
return self.sampleCache[hashkey]
def mixfactor(self, segment):
"""
Computes a rough "mixfactor" - the balance between wubs and original audio for a given segment.
Mixfactor returned:
1: full wub
0: full original
Result can be fed into echonest.audio.mix() as the third parameter.
"""
mixfactor = 0
a = (89.0/1.5) + self.template['mixpoint']
b = (188.0/1.5) + self.template['mixpoint']
loud = self.loudness(self.original.analysis.segments, segment)
if not loud:
loud = self.original.analysis.loudness
if loud != -1 * b:
mixfactor = float(float(loud + a)/float(loud + b))
if mixfactor > 0.8:
mixfactor = 0.8
elif mixfactor < 0.3:
mixfactor = 0.3
return mixfactor
def compileIntro(self, section=0, intro=None):
if not intro:
intro = audio.AudioData(self.sample_path + self.template['intro'], sampleRate=44100, numChannels=2, verbose=False)
out = audio.AudioQuantumList()
section_hash_keys = []
for i, item in enumerate(readPattern('samples/electrohouse/intro.txt')):
if item.pitch is None:
out.append(item.data)
else:
samples = self.searchSamples(section, (item.pitch + self.tonic) % 12)
if not samples:
out.append(item.data)
else:
hash_key = str(samples[i%len(samples)])
if not hash_key in self.sampleCache:
self.sampleCache[hash_key] = self.st.shiftTempo(samples[i%len(samples)].render(), self.template['tempo']/self.tempo)
section_hash_keys.append(hash_key)
out.append(
item.function(
self.sampleCache[hash_key]
)
)
shifted = audio.assemble(out, numChannels = 2)
if shifted.numChannels == 1:
shifted = self.mono_to_stereo(shifted)
for hash_key in section_hash_keys:
del self.sampleCache[hash_key]
return self.truncatemix(intro, shifted, 0.3)
def compileSection(self, j, section, backing):
out = audio.AudioQuantumList()
section_hash_keys = []
for i, item in enumerate(readPattern('samples/electrohouse/section.txt')):
if item.pitch is None:
out.append(item.data)
else:
samples = self.searchSamples(j, (item.pitch + self.tonic) % 12)
if not samples:
out.append(item.data)
else:
hash_key = str(samples[i%len(samples)])
if not hash_key in self.sampleCache:
self.sampleCache[hash_key] = self.st.shiftTempo(samples[i%len(samples)].render(), self.template['tempo']/self.tempo)
section_hash_keys.append(hash_key)
out.append(
item.function(
self.sampleCache[hash_key]
)
)
shifted = audio.assemble(out, numChannels = 2)
if shifted.numChannels == 1:
shifted = self.mono_to_stereo(shifted)
for hash_key in section_hash_keys:
del self.sampleCache[hash_key]
return self.truncatemix(backing, shifted, 0.3)
def remix(self):
"""
Wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub.
"""
self.log("Looking up track...", 5)
self.getTag()
self.processArt()
self.log("Listening to %s..." % ('"%s"' % self.tag['title'] if 'title' in self.tag else 'song'), 5)
self.original = audio.LocalAudioFile(self.infile, False)
if not 'title' in self.tag:
self.detectSong(self.original)
self.st = FastModify()
self.log("Choosing key and tempo...", 10)
self.tonic = self.original.analysis.key['value']
self.tempo = self.original.analysis.tempo['value']
if not self.tempo:
self.tempo = 128.0
self.bars = self.original.analysis.bars
self.beats = self.original.analysis.beats
self.sections = self.original.analysis.sections
self.tag['key'] = self.keys[self.tonic] if self.tonic >= 0 and self.tonic < 12 else '?'
if 'title' in self.tag and self.tag['title'] == u'I Wish':
self.tonic += 2
self.tag['key'] = 'D#'
self.tag['tempo'] = self.template['tempo']
self.log("Arranging intro...", 40.0/(len(self.sections) + 1))
intro = audio.AudioData(self.sample_path + self.template['intro'], sampleRate=44100, numChannels=2, verbose=False)
self.partialEncode(self.compileIntro(0, intro))
i = 0 # Required if there are no sections
sections = self.sections[1:] if len(self.sections) % 2 else self.sections
if len(sections) > 2:
backing = audio.AudioData(self.sample_path + self.template['body'][self.tonic], sampleRate=44100, numChannels=2, verbose=False)
for i, section in enumerate(sections):
self.log("Arranging section %s of %s..." % (i+1, len(sections)), 40.0/(len(sections) + 1))
a = self.compileSection(i, section, backing) if i != (len(sections)/2 + 1) else self.compileIntro(i, intro)
self.partialEncode(a)
del a
self.original.unload()
self.log("Adding ending...", 5)
self.partialEncode(
audio.AudioData(
self.sample_path + self.template['splash_ends'][(i + 1) % len(self.template['splash_ends'])],
sampleRate=44100,
numChannels=2,
verbose=False
)
)
self.log("Mixing...", 5)
self.mixwav(self.tempfile)
if self.deleteOriginal:
try:
unlink(self.infile)
except:
pass # File could have been deleted by an eager cleanup script
self.log("Mastering...", 5)
self.lame(self.tempfile, self.outfile)
unlink(self.tempfile)
self.log("Adding artwork...", 20)
self.updateTags(titleSuffix = " (Wub Machine Electro Remix)")
return self.outfile
if __name__ == "__main__":
CMDRemix(ElectroHouse)
|
glovebx/odoo
|
refs/heads/8.0
|
addons/payment_ogone/tests/test_ogone.py
|
430
|
# -*- coding: utf-8 -*-
from lxml import objectify
import time
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_ogone.controllers.main import OgoneController
from openerp.tools import mute_logger
class OgonePayment(PaymentAcquirerCommon):
def setUp(self):
super(OgonePayment, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the adyen account
model, self.ogone_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_ogone', 'payment_acquirer_ogone')
def test_10_ogone_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering + shasign
# ----------------------------------------
form_values = {
'PSPID': 'dummy',
'ORDERID': 'test_ref0',
'AMOUNT': '1',
'CURRENCY': 'EUR',
'LANGUAGE': 'en_US',
'CN': 'Norbert Buyer',
'EMAIL': 'norbert.buyer@example.com',
'OWNERZIP': '1000',
'OWNERADDRESS': 'Huge Street 2/543',
'OWNERCTY': 'Belgium',
'OWNERTOWN': 'Sin City',
'OWNERTELNO': '0032 12 34 56 78',
'SHASIGN': '815f67b8ff70d234ffcf437c13a9fa7f807044cc',
'ACCEPTURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._accept_url),
'DECLINEURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._decline_url),
'EXCEPTIONURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._exception_url),
'CANCELURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._cancel_url),
}
# render the button
res = self.payment_acquirer.render(
cr, uid, self.ogone_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://secure.ogone.com/ncol/test/orderstandard.asp', 'ogone: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'ogone: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
# ----------------------------------------
# Test2: button using tx + validation
# ----------------------------------------
# create a new draft tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref0',
'partner_id': self.buyer_id,
}, context=context
)
# render the button
res = self.payment_acquirer.render(
cr, uid, self.ogone_id,
'should_be_erased', 0.01, self.currency_euro,
tx_id=tx_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://secure.ogone.com/ncol/test/orderstandard.asp', 'ogone: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'ogone: wrong value for form input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
@mute_logger('openerp.addons.payment_ogone.models.ogone', 'ValidationError')
def test_20_ogone_form_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# typical data posted by ogone after client has successfully paid
ogone_post_data = {
'orderID': u'test_ref_2',
'STATUS': u'9',
'CARDNO': u'XXXXXXXXXXXX0002',
'PAYID': u'25381582',
'CN': u'Norbert Buyer',
'NCERROR': u'0',
'TRXDATE': u'11/15/13',
'IP': u'85.201.233.72',
'BRAND': u'VISA',
'ACCEPTANCE': u'test123',
'currency': u'EUR',
'amount': u'1.95',
'SHASIGN': u'7B7B0ED9CBC4A85543A9073374589033A62A05A5',
'ED': u'0315',
'PM': u'CreditCard'
}
# should raise error about unknown tx
with self.assertRaises(ValidationError):
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# create tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 1.95,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref_2',
'partner_name': 'Norbert Buyer',
'partner_country_id': self.country_france_id,
}, context=context
)
# validate it
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# check state
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'done', 'ogone: validation did not put tx into done state')
self.assertEqual(tx.ogone_payid, ogone_post_data.get('PAYID'), 'ogone: validation did not update tx payid')
# reset tx
tx.write({'state': 'draft', 'date_validate': False, 'ogone_payid': False})
# now ogone post is ok: try to modify the SHASIGN
ogone_post_data['SHASIGN'] = 'a4c16bae286317b82edb49188d3399249a784691'
with self.assertRaises(ValidationError):
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# simulate an error
ogone_post_data['STATUS'] = 2
ogone_post_data['SHASIGN'] = 'a4c16bae286317b82edb49188d3399249a784691'
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# check state
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'error', 'ogone: erroneous validation did not put tx into error state')
def test_30_ogone_s2s(self):
test_ref = 'test_ref_%.15f' % time.time()
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# create a new draft tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': test_ref,
'partner_id': self.buyer_id,
'type': 'server2server',
}, context=context
)
# create an alias
res = self.payment_transaction.ogone_s2s_create_alias(
cr, uid, tx_id, {
'expiry_date_mm': '01',
'expiry_date_yy': '2015',
'holder_name': 'Norbert Poilu',
'number': '4000000000000002',
'brand': 'VISA',
}, context=context)
# check an alias is set, containing at least OPENERP
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertIn('OPENERP', tx.partner_reference, 'ogone: wrong partner reference after creating an alias')
res = self.payment_transaction.ogone_s2s_execute(cr, uid, tx_id, {}, context=context)
# print res
# {
# 'orderID': u'reference',
# 'STATUS': u'9',
# 'CARDNO': u'XXXXXXXXXXXX0002',
# 'PAYID': u'24998692',
# 'CN': u'Norbert Poilu',
# 'NCERROR': u'0',
# 'TRXDATE': u'11/05/13',
# 'IP': u'85.201.233.72',
# 'BRAND': u'VISA',
# 'ACCEPTANCE': u'test123',
# 'currency': u'EUR',
# 'amount': u'1.95',
# 'SHASIGN': u'EFDC56879EF7DE72CCF4B397076B5C9A844CB0FA',
# 'ED': u'0314',
# 'PM': u'CreditCard'
# }
|
poojavade/Genomics_Docker
|
refs/heads/master
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/statsmodels-0.5.0-py2.7-linux-x86_64.egg/statsmodels/examples/ex_pairwise.py
|
3
|
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 24 10:26:39 2013
Author: Josef Perktold
"""
from statsmodels.compatnp.py3k import BytesIO, asbytes
import numpy as np
from numpy.testing import assert_almost_equal, assert_equal
ss = '''\
43.9 1 1
39.0 1 2
46.7 1 3
43.8 1 4
44.2 1 5
47.7 1 6
43.6 1 7
38.9 1 8
43.6 1 9
40.0 1 10
89.8 2 1
87.1 2 2
92.7 2 3
90.6 2 4
87.7 2 5
92.4 2 6
86.1 2 7
88.1 2 8
90.8 2 9
89.1 2 10
68.4 3 1
69.3 3 2
68.5 3 3
66.4 3 4
70.0 3 5
68.1 3 6
70.6 3 7
65.2 3 8
63.8 3 9
69.2 3 10
36.2 4 1
45.2 4 2
40.7 4 3
40.5 4 4
39.3 4 5
40.3 4 6
43.2 4 7
38.7 4 8
40.9 4 9
39.7 4 10'''
#idx Treatment StressReduction
ss2 = '''\
1 mental 2
2 mental 2
3 mental 3
4 mental 4
5 mental 4
6 mental 5
7 mental 3
8 mental 4
9 mental 4
10 mental 4
11 physical 4
12 physical 4
13 physical 3
14 physical 5
15 physical 4
16 physical 1
17 physical 1
18 physical 2
19 physical 3
20 physical 3
21 medical 1
22 medical 2
23 medical 2
24 medical 2
25 medical 3
26 medical 2
27 medical 3
28 medical 1
29 medical 3
30 medical 1'''
ss3 = '''\
1 24.5
1 23.5
1 26.4
1 27.1
1 29.9
2 28.4
2 34.2
2 29.5
2 32.2
2 30.1
3 26.1
3 28.3
3 24.3
3 26.2
3 27.8'''
ss5 = '''\
2 - 3 4.340 0.691 7.989 ***
2 - 1 4.600 0.951 8.249 ***
3 - 2 -4.340 -7.989 -0.691 ***
3 - 1 0.260 -3.389 3.909 -
1 - 2 -4.600 -8.249 -0.951 ***
1 - 3 -0.260 -3.909 3.389 '''
#accommodate recfromtxt for python 3.2, requires bytes
ss = asbytes(ss)
ss2 = asbytes(ss2)
ss3 = asbytes(ss3)
ss5 = asbytes(ss5)
dta = np.recfromtxt(BytesIO(ss), names=("Rust","Brand","Replication"))
dta2 = np.recfromtxt(BytesIO(ss2), names = ("idx", "Treatment", "StressReduction"))
dta3 = np.recfromtxt(BytesIO(ss3), names = ("Brand", "Relief"))
dta5 = np.recfromtxt(BytesIO(ss5), names = ('pair', 'mean', 'lower', 'upper', 'sig'), delimiter='\t')
sas_ = dta5[[1,3,2]]
if __name__ == '__main__':
import statsmodels.stats.multicomp as multi #incomplete refactoring
mc = multi.MultiComparison(dta['Rust'], dta['Brand'])
res = mc.tukeyhsd()
print res[0]
mc2 = multi.MultiComparison(dta2['StressReduction'], dta2['Treatment'])
res2 = mc2.tukeyhsd()
print res2[0]
mc2s = multi.MultiComparison(dta2['StressReduction'][3:29], dta2['Treatment'][3:29])
res2s = mc2s.tukeyhsd()
print res2s[0]
res2s_001 = mc2s.tukeyhsd(alpha=0.01)
#R result
tukeyhsd2s = np.array([1.888889,0.8888889,-1,0.2658549,-0.5908785,-2.587133,3.511923,2.368656,0.5871331,0.002837638,0.150456,0.1266072]).reshape(3,4, order='F')
assert_almost_equal(res2s_001[1][4], tukeyhsd2s[:,1:3], decimal=3)
mc3 = multi.MultiComparison(dta3['Relief'], dta3['Brand'])
res3 = mc3.tukeyhsd()
print res3[0]
# for mci in [mc, mc2, mc3]:
# get_thsd(mci)
from scipy import stats
print mc2.allpairtest(stats.ttest_ind, method='b')[0]
'''same as SAS:
>>> np.var(mci.groupstats.groupdemean(), ddof=3)
4.6773333333333351
>>> var_ = np.var(mci.groupstats.groupdemean(), ddof=3)
>>> tukeyhsd(means, nobs, var_, df=None, alpha=0.05, q_crit=qsturng(0.95, 3, 12))[4]
array([[ 0.95263648, 8.24736352],
[-3.38736352, 3.90736352],
[-7.98736352, -0.69263648]])
>>> tukeyhsd(means, nobs, var_, df=None, alpha=0.05, q_crit=3.77278)[4]
array([[ 0.95098508, 8.24901492],
[-3.38901492, 3.90901492],
[-7.98901492, -0.69098508]])
'''
ss5 = '''\
Comparisons significant at the 0.05 level are indicated by ***.
BRAND
Comparison Difference
Between
Means Simultaneous 95% Confidence Limits Sign.
2 - 3 4.340 0.691 7.989 ***
2 - 1 4.600 0.951 8.249 ***
3 - 2 -4.340 -7.989 -0.691 ***
3 - 1 0.260 -3.389 3.909 -
1 - 2 -4.600 -8.249 -0.951 ***
1 - 3 -0.260 -3.909 3.389 '''
ss5 = '''\
2 - 3 4.340 0.691 7.989 ***
2 - 1 4.600 0.951 8.249 ***
3 - 2 -4.340 -7.989 -0.691 ***
3 - 1 0.260 -3.389 3.909 -
1 - 2 -4.600 -8.249 -0.951 ***
1 - 3 -0.260 -3.909 3.389 '''
import StringIO
dta5 = np.recfromtxt(StringIO.StringIO(ss5), names = ('pair', 'mean', 'lower', 'upper', 'sig'), delimiter='\t')
sas_ = dta5[[1,3,2]]
confint1 = res3[1][4]
confint2 = sas_[['lower','upper']].view(float).reshape((3,2))
assert_almost_equal(confint1, confint2, decimal=2)
reject1 = res3[1][1]
reject2 = sas_['sig'] == '***'
assert_equal(reject1, reject2)
meandiff1 = res3[1][2]
meandiff2 = sas_['mean']
assert_almost_equal(meandiff1, meandiff2, decimal=14)
|
gwhitehawk/RSS
|
refs/heads/master
|
rss/core.py
|
1
|
from operator import attrgetter
from treq import get
import feedparser
from rss import __version__
USER_AGENT = "RSS, Simply Syndicated %s" % (__version__,)
def parse(feed):
return feedparser.parse(feed)
def fetch(feed_url):
feed = get(feed_url, headers={"User-Agent" : [USER_AGENT]})
return feed.addCallback(attrgetter("content")).addCallback(parse)
|
aviciimaxwell/odoo
|
refs/heads/8.0
|
addons/account/account_bank.py
|
258
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import fields, osv
class bank(osv.osv):
_inherit = "res.partner.bank"
_columns = {
'journal_id': fields.many2one('account.journal', 'Account Journal', help="This journal will be created automatically for this bank account when you save the record"),
'currency_id': fields.related('journal_id', 'currency', type="many2one", relation='res.currency', readonly=True,
string="Currency", help="Currency of the related account journal."),
}
def create(self, cr, uid, data, context=None):
result = super(bank, self).create(cr, uid, data, context=context)
self.post_write(cr, uid, [result], context=context)
return result
def write(self, cr, uid, ids, data, context=None):
result = super(bank, self).write(cr, uid, ids, data, context=context)
self.post_write(cr, uid, ids, context=context)
return result
def _prepare_name(self, bank):
"Return the name to use when creating a bank journal"
return (bank.bank_name or '') + ' ' + (bank.acc_number or '')
def _prepare_name_get(self, cr, uid, bank_dicts, context=None):
"""Add ability to have %(currency_name)s in the format_layout of res.partner.bank.type"""
currency_ids = list(set(data['currency_id'][0] for data in bank_dicts if data.get('currency_id')))
currencies = self.pool.get('res.currency').browse(cr, uid, currency_ids, context=context)
currency_name = dict((currency.id, currency.name) for currency in currencies)
for data in bank_dicts:
data['currency_name'] = data.get('currency_id') and currency_name[data['currency_id'][0]] or ''
return super(bank, self)._prepare_name_get(cr, uid, bank_dicts, context=context)
def post_write(self, cr, uid, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
obj_acc = self.pool.get('account.account')
obj_data = self.pool.get('ir.model.data')
for bank in self.browse(cr, uid, ids, context):
if bank.company_id and not bank.journal_id:
# Find the code and parent of the bank account to create
dig = 6
current_num = 1
ids = obj_acc.search(cr, uid, [('type','=','liquidity'), ('company_id', '=', bank.company_id.id), ('parent_id', '!=', False)], context=context)
# No liquidity account exists, no template available
if not ids: continue
ref_acc_bank = obj_acc.browse(cr, uid, ids[0], context=context).parent_id
while True:
new_code = str(ref_acc_bank.code.ljust(dig-len(str(current_num)), '0')) + str(current_num)
ids = obj_acc.search(cr, uid, [('code', '=', new_code), ('company_id', '=', bank.company_id.id)])
if not ids:
break
current_num += 1
name = self._prepare_name(bank)
acc = {
'name': name,
'code': new_code,
'type': 'liquidity',
'user_type': ref_acc_bank.user_type.id,
'reconcile': False,
'parent_id': ref_acc_bank.id,
'company_id': bank.company_id.id,
}
acc_bank_id = obj_acc.create(cr,uid,acc,context=context)
jour_obj = self.pool.get('account.journal')
new_code = 1
while True:
code = _('BNK')+str(new_code)
ids = jour_obj.search(cr, uid, [('code','=',code)], context=context)
if not ids:
break
new_code += 1
#create the bank journal
vals_journal = {
'name': name,
'code': code,
'type': 'bank',
'company_id': bank.company_id.id,
'analytic_journal_id': False,
'default_credit_account_id': acc_bank_id,
'default_debit_account_id': acc_bank_id,
}
journal_id = jour_obj.create(cr, uid, vals_journal, context=context)
self.write(cr, uid, [bank.id], {'journal_id': journal_id}, context=context)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
SrNetoChan/QGIS
|
refs/heads/master
|
scripts/qgis_fixes/fix_operator.py
|
77
|
from lib2to3.fixes.fix_operator import FixOperator
|
thundernet8/WRGameVideos-API
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/sanitizer.py
|
283
|
from __future__ import absolute_import, division, unicode_literals
import re
from xml.sax.saxutils import escape, unescape
from six.moves import urllib_parse as urlparse
from .tokenizer import HTMLTokenizer
from .constants import tokenTypes
content_type_rgx = re.compile(r'''
^
# Match a content type <application>/<type>
(?P<content_type>[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
# Match any character set and encoding
(?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
|(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
# Assume the rest is data
,.*
$
''',
re.VERBOSE)
class HTMLSanitizerMixin(object):
""" sanitization of XHTML+MathML+SVG and of inline style attributes."""
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset',
'figcaption', 'figure', 'footer', 'font', 'form', 'header', 'h1',
'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins',
'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option',
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video']
mathml_elements = ['maction', 'math', 'merror', 'mfrac', 'mi',
'mmultiscripts', 'mn', 'mo', 'mover', 'mpadded', 'mphantom',
'mprescripts', 'mroot', 'mrow', 'mspace', 'msqrt', 'mstyle', 'msub',
'msubsup', 'msup', 'mtable', 'mtd', 'mtext', 'mtr', 'munder',
'munderover', 'none']
svg_elements = ['a', 'animate', 'animateColor', 'animateMotion',
'animateTransform', 'clipPath', 'circle', 'defs', 'desc', 'ellipse',
'font-face', 'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph',
'mpath', 'path', 'polygon', 'polyline', 'radialGradient', 'rect',
'set', 'stop', 'svg', 'switch', 'text', 'title', 'tspan', 'use']
acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
'action', 'align', 'alt', 'autocomplete', 'autofocus', 'axis',
'background', 'balance', 'bgcolor', 'bgproperties', 'border',
'bordercolor', 'bordercolordark', 'bordercolorlight', 'bottompadding',
'cellpadding', 'cellspacing', 'ch', 'challenge', 'char', 'charoff',
'choff', 'charset', 'checked', 'cite', 'class', 'clear', 'color',
'cols', 'colspan', 'compact', 'contenteditable', 'controls', 'coords',
'data', 'datafld', 'datapagesize', 'datasrc', 'datetime', 'default',
'delay', 'dir', 'disabled', 'draggable', 'dynsrc', 'enctype', 'end',
'face', 'for', 'form', 'frame', 'galleryimg', 'gutter', 'headers',
'height', 'hidefocus', 'hidden', 'high', 'href', 'hreflang', 'hspace',
'icon', 'id', 'inputmode', 'ismap', 'keytype', 'label', 'leftspacing',
'lang', 'list', 'longdesc', 'loop', 'loopcount', 'loopend',
'loopstart', 'low', 'lowsrc', 'max', 'maxlength', 'media', 'method',
'min', 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'open',
'optimum', 'pattern', 'ping', 'point-size', 'poster', 'pqg', 'preload',
'prompt', 'radiogroup', 'readonly', 'rel', 'repeat-max', 'repeat-min',
'replace', 'required', 'rev', 'rightspacing', 'rows', 'rowspan',
'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src', 'start',
'step', 'style', 'summary', 'suppress', 'tabindex', 'target',
'template', 'title', 'toppadding', 'type', 'unselectable', 'usemap',
'urn', 'valign', 'value', 'variable', 'volume', 'vspace', 'vrml',
'width', 'wrap', 'xml:lang']
mathml_attributes = ['actiontype', 'align', 'columnalign', 'columnalign',
'columnalign', 'columnlines', 'columnspacing', 'columnspan', 'depth',
'display', 'displaystyle', 'equalcolumns', 'equalrows', 'fence',
'fontstyle', 'fontweight', 'frame', 'height', 'linethickness', 'lspace',
'mathbackground', 'mathcolor', 'mathvariant', 'mathvariant', 'maxsize',
'minsize', 'other', 'rowalign', 'rowalign', 'rowalign', 'rowlines',
'rowspacing', 'rowspan', 'rspace', 'scriptlevel', 'selection',
'separator', 'stretchy', 'width', 'width', 'xlink:href', 'xlink:show',
'xlink:type', 'xmlns', 'xmlns:xlink']
svg_attributes = ['accent-height', 'accumulate', 'additive', 'alphabetic',
'arabic-form', 'ascent', 'attributeName', 'attributeType',
'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
'class', 'clip-path', 'color', 'color-rendering', 'content', 'cx',
'cy', 'd', 'dx', 'dy', 'descent', 'display', 'dur', 'end', 'fill',
'fill-opacity', 'fill-rule', 'font-family', 'font-size',
'font-stretch', 'font-style', 'font-variant', 'font-weight', 'from',
'fx', 'fy', 'g1', 'g2', 'glyph-name', 'gradientUnits', 'hanging',
'height', 'horiz-adv-x', 'horiz-origin-x', 'id', 'ideographic', 'k',
'keyPoints', 'keySplines', 'keyTimes', 'lang', 'marker-end',
'marker-mid', 'marker-start', 'markerHeight', 'markerUnits',
'markerWidth', 'mathematical', 'max', 'min', 'name', 'offset',
'opacity', 'orient', 'origin', 'overline-position',
'overline-thickness', 'panose-1', 'path', 'pathLength', 'points',
'preserveAspectRatio', 'r', 'refX', 'refY', 'repeatCount',
'repeatDur', 'requiredExtensions', 'requiredFeatures', 'restart',
'rotate', 'rx', 'ry', 'slope', 'stemh', 'stemv', 'stop-color',
'stop-opacity', 'strikethrough-position', 'strikethrough-thickness',
'stroke', 'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap',
'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity',
'stroke-width', 'systemLanguage', 'target', 'text-anchor', 'to',
'transform', 'type', 'u1', 'u2', 'underline-position',
'underline-thickness', 'unicode', 'unicode-range', 'units-per-em',
'values', 'version', 'viewBox', 'visibility', 'width', 'widths', 'x',
'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title', 'xlink:type',
'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y',
'y1', 'y2', 'zoomAndPan']
attr_val_is_uri = ['href', 'src', 'cite', 'action', 'longdesc', 'poster', 'background', 'datasrc',
'dynsrc', 'lowsrc', 'ping', 'poster', 'xlink:href', 'xml:base']
svg_attr_val_allows_ref = ['clip-path', 'color-profile', 'cursor', 'fill',
'filter', 'marker', 'marker-start', 'marker-mid', 'marker-end',
'mask', 'stroke']
svg_allow_local_href = ['altGlyph', 'animate', 'animateColor',
'animateMotion', 'animateTransform', 'cursor', 'feImage', 'filter',
'linearGradient', 'pattern', 'radialGradient', 'textpath', 'tref',
'set', 'use']
acceptable_css_properties = ['azimuth', 'background-color',
'border-bottom-color', 'border-collapse', 'border-color',
'border-left-color', 'border-right-color', 'border-top-color', 'clear',
'color', 'cursor', 'direction', 'display', 'elevation', 'float', 'font',
'font-family', 'font-size', 'font-style', 'font-variant', 'font-weight',
'height', 'letter-spacing', 'line-height', 'overflow', 'pause',
'pause-after', 'pause-before', 'pitch', 'pitch-range', 'richness',
'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
'unicode-bidi', 'vertical-align', 'voice-family', 'volume',
'white-space', 'width']
acceptable_css_keywords = ['auto', 'aqua', 'black', 'block', 'blue',
'bold', 'both', 'bottom', 'brown', 'center', 'collapse', 'dashed',
'dotted', 'fuchsia', 'gray', 'green', '!important', 'italic', 'left',
'lime', 'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
'transparent', 'underline', 'white', 'yellow']
acceptable_svg_properties = ['fill', 'fill-opacity', 'fill-rule',
'stroke', 'stroke-width', 'stroke-linecap', 'stroke-linejoin',
'stroke-opacity']
acceptable_protocols = ['ed2k', 'ftp', 'http', 'https', 'irc',
'mailto', 'news', 'gopher', 'nntp', 'telnet', 'webcal',
'xmpp', 'callto', 'feed', 'urn', 'aim', 'rsync', 'tag',
'ssh', 'sftp', 'rtsp', 'afs', 'data']
acceptable_content_types = ['image/png', 'image/jpeg', 'image/gif', 'image/webp', 'image/bmp', 'text/plain']
# subclasses may define their own versions of these constants
allowed_elements = acceptable_elements + mathml_elements + svg_elements
allowed_attributes = acceptable_attributes + mathml_attributes + svg_attributes
allowed_css_properties = acceptable_css_properties
allowed_css_keywords = acceptable_css_keywords
allowed_svg_properties = acceptable_svg_properties
allowed_protocols = acceptable_protocols
allowed_content_types = acceptable_content_types
# Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
# stripping out all # attributes not in ALLOWED_ATTRIBUTES. Style
# attributes are parsed, and a restricted set, # specified by
# ALLOWED_CSS_PROPERTIES and ALLOWED_CSS_KEYWORDS, are allowed through.
# attributes in ATTR_VAL_IS_URI are scanned, and only URI schemes specified
# in ALLOWED_PROTOCOLS are allowed.
#
# sanitize_html('<script> do_nasty_stuff() </script>')
# => <script> do_nasty_stuff() </script>
# sanitize_html('<a href="javascript: sucker();">Click here for $100</a>')
# => <a>Click here for $100</a>
def sanitize_token(self, token):
# accommodate filters which use token_type differently
token_type = token["type"]
if token_type in list(tokenTypes.keys()):
token_type = tokenTypes[token_type]
if token_type in (tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]):
if token["name"] in self.allowed_elements:
return self.allowed_token(token, token_type)
else:
return self.disallowed_token(token, token_type)
elif token_type == tokenTypes["Comment"]:
pass
else:
return token
def allowed_token(self, token, token_type):
if "data" in token:
attrs = dict([(name, val) for name, val in
token["data"][::-1]
if name in self.allowed_attributes])
for attr in self.attr_val_is_uri:
if attr not in attrs:
continue
val_unescaped = re.sub("[`\000-\040\177-\240\s]+", '',
unescape(attrs[attr])).lower()
# remove replacement characters from unescaped characters
val_unescaped = val_unescaped.replace("\ufffd", "")
uri = urlparse.urlparse(val_unescaped)
if uri:
if uri.scheme not in self.allowed_protocols:
del attrs[attr]
if uri.scheme == 'data':
m = content_type_rgx.match(uri.path)
if not m:
del attrs[attr]
if m.group('content_type') not in self.allowed_content_types:
del attrs[attr]
for attr in self.svg_attr_val_allows_ref:
if attr in attrs:
attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
' ',
unescape(attrs[attr]))
if (token["name"] in self.svg_allow_local_href and
'xlink:href' in attrs and re.search('^\s*[^#\s].*',
attrs['xlink:href'])):
del attrs['xlink:href']
if 'style' in attrs:
attrs['style'] = self.sanitize_css(attrs['style'])
token["data"] = [[name, val] for name, val in list(attrs.items())]
return token
def disallowed_token(self, token, token_type):
if token_type == tokenTypes["EndTag"]:
token["data"] = "</%s>" % token["name"]
elif token["data"]:
attrs = ''.join([' %s="%s"' % (k, escape(v)) for k, v in token["data"]])
token["data"] = "<%s%s>" % (token["name"], attrs)
else:
token["data"] = "<%s>" % token["name"]
if token.get("selfClosing"):
token["data"] = token["data"][:-1] + "/>"
if token["type"] in list(tokenTypes.keys()):
token["type"] = "Characters"
else:
token["type"] = tokenTypes["Characters"]
del token["name"]
return token
def sanitize_css(self, style):
# disallow urls
style = re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
# gauntlet
if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return ''
if not re.match("^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
return ''
clean = []
for prop, value in re.findall("([-\w]+)\s*:\s*([^:;]*)", style):
if not value:
continue
if prop.lower() in self.allowed_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
'padding']:
for keyword in value.split():
if keyword not in self.acceptable_css_keywords and \
not re.match("^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):
break
else:
clean.append(prop + ': ' + value + ';')
elif prop.lower() in self.allowed_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
class HTMLSanitizer(HTMLTokenizer, HTMLSanitizerMixin):
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=False, lowercaseAttrName=False, parser=None):
# Change case matching defaults as we only output lowercase html anyway
# This solution doesn't seem ideal...
HTMLTokenizer.__init__(self, stream, encoding, parseMeta, useChardet,
lowercaseElementName, lowercaseAttrName, parser=parser)
def __iter__(self):
for token in HTMLTokenizer.__iter__(self):
token = self.sanitize_token(token)
if token:
yield token
|
diplomacy/research
|
refs/heads/master
|
diplomacy_research/models/policy/order_based/dataset/tests/test_no_press_value_all_builder.py
|
1
|
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Runs tests for the NoPressValue (All) Dataset Builder """
from diplomacy_research.models.policy.tests.policy_builder_test_setup import PolicyBuilderTestSetup
from diplomacy_research.models.policy.order_based.dataset.no_press_value_all import DatasetBuilder
from diplomacy_research.utils.process import run_in_separate_process
def launch():
""" Launches the tests """
testable_class = PolicyBuilderTestSetup(DatasetBuilder())
testable_class.run_tests()
def test_run():
""" Runs the test """
run_in_separate_process(target=launch, timeout=60)
|
ties/flask-daapserver
|
refs/heads/master
|
daapserver/daap_data.py
|
2
|
__all__ = [
"dmap_data_types", "dmap_names", "dmap_reverse_data_types",
"dmap_code_types"]
dmap_code_types = {
"abal": ("daap.browsealbumlisting", 12),
"abar": ("daap.browseartistlisting", 12),
"abcp": ("daap.browsecomposerlisting", 12),
"abgn": ("daap.browsegenrelisting", 12),
"abpl": ("daap.baseplaylist", 1),
"abro": ("daap.databasebrowse", 12),
"adbs": ("daap.databasesongs", 12),
"aeAI": ("com.apple.itunes.itms-artistid", 5),
"aeCI": ("com.apple.itunes.itms-composerid", 5),
"aeEN": ("com.apple.itunes.episode-num-str", 9),
"aeES": ("com.apple.itunes.episode-sort", 5),
"aeFP": ("com.apple.itunes.req-fplay", 1),
"aeGD": ("com.apple.itunes.gapless-enc-dr", 5),
"aeGE": ("com.apple.itunes.gapless-enc-del", 5),
"aeGH": ("com.apple.itunes.gapless-heur", 5),
"aeGI": ("com.apple.itunes.itms-genreid", 5),
"aeGR": ("com.apple.itunes.gapless-resy", 7),
"aeGU": ("com.apple.itunes.gapless-dur", 7),
"aeHV": ("com.apple.itunes.has-video", 1),
"aeMK": ("com.apple.itunes.mediakind", 1),
"aeMQ": ("com.apple.itunes.unknown-MQ", 1),
"aeNN": ("com.apple.itunes.network-name", 9),
"aeNV": ("com.apple.itunes.norm-volume", 5),
"aePC": ("com.apple.itunes.is-podcast", 1),
"aePI": ("com.apple.itunes.itms-playlistid", 5),
"aePP": ("com.apple.itunes.is-podcast-playlist", 1),
"aePS": ("com.apple.itunes.special-playlist", 1),
"aeSF": ("com.apple.itunes.itms-storefrontid", 5),
"aeSI": ("com.apple.itunes.itms-songid", 5),
"aeSL": ("com.apple.itunes.unknown-SL", 1),
"aeSN": ("com.apple.itunes.series-name", 9),
"aeSP": ("com.apple.itunes.smart-playlist", 1),
"aeSR": ("com.apple.itunes.unknown-SR", 1),
"aeSU": ("com.apple.itunes.season-num", 5),
"aeSV": ("com.apple.itunes.music-sharing-version", 5),
"aeTr": ("com.apple.itunes.unknown-Tr", 1),
"agrp": ("daap.songgrouping", 9),
"aply": ("daap.databaseplaylists", 12),
"aprm": ("daap.playlistrepeatmode", 1),
"apro": ("daap.protocolversion", 11),
"apsm": ("daap.playlistshufflemode", 1),
"apso": ("daap.playlistsongs", 12),
"arif": ("daap.resolveinfo", 12),
"arsv": ("daap.resolve", 12),
"asaa": ("daap.songalbumartist", 9),
"asac": ("daap.songartworkcount", 3),
"asal": ("daap.songalbum", 9),
"asar": ("daap.songartist", 9),
"asbk": ("daap.bookmarkable", 1),
"asbo": ("daap.songbookmark", 5),
"asbr": ("daap.songbitrate", 3),
"asbt": ("daap.songbeatsperminute", 3),
"ascd": ("daap.songcodectype", 5),
"ascm": ("daap.songcomment", 9),
"ascn": ("daap.songcontentdescription", 9),
"asco": ("daap.songcompilation", 1),
"ascp": ("daap.songcomposer", 9),
"ascr": ("daap.songcontentrating", 1),
"ascs": ("daap.songcodecsubtype", 5),
"asct": ("daap.songcategory", 9),
"asda": ("daap.songdateadded", 10),
"asdb": ("daap.songdisabled", 1),
"asdc": ("daap.songdisccount", 3),
"asdk": ("daap.songdatakind", 1),
"asdm": ("daap.songdatemodified", 10),
"asdn": ("daap.songdiscnumber", 3),
"asdp": ("daap.songdatepurchased", 10),
"asdr": ("daap.songdatereleased", 10),
"asdt": ("daap.songdescription", 9),
"ased": ("daap.songextradata", 3),
"aseq": ("daap.songeqpreset", 9),
"asfm": ("daap.songformat", 9),
"asgn": ("daap.songgenre", 9),
"asgp": ("daap.songgapless", 1),
"asgr": ("daap.supportsgroups", 4),
"ashp": ("daap.songhasbeenplayed", 1),
"asky": ("daap.songkeywords", 9),
"aslc": ("daap.songlongcontentdescription", 9),
"asrv": ("daap.songrelativevolume", 2),
"assa": ("daap.sortartist", 9),
"assc": ("daap.sortcomposer", 9),
"assl": ("daap.sortalbumartist", 9),
"assn": ("daap.sortname", 9),
"assp": ("daap.songstoptime", 5),
"assr": ("daap.songsamplerate", 5),
"asss": ("daap.sortseriesname", 9),
"asst": ("daap.songstarttime", 5),
"assu": ("daap.sortalbum", 9),
"assz": ("daap.songsize", 5),
"astc": ("daap.songtrackcount", 3),
"astm": ("daap.songtime", 5),
"astn": ("daap.songtracknumber", 3),
"asul": ("daap.songdataurl", 9),
"asur": ("daap.songuserrating", 1),
"asyr": ("daap.songyear", 3),
"ated": ("daap.supportsextradata", 3),
"avdb": ("daap.serverdatabases", 12),
"mbcl": ("dmap.bag", 12),
"mccr": ("dmap.contentcodesresponse", 12),
"mcna": ("dmap.contentcodesname", 9),
"mcnm": ("dmap.contentcodesnumber", 5),
"mcon": ("dmap.container", 12),
"mctc": ("dmap.containercount", 5),
"mcti": ("dmap.containeritemid", 5),
"mcty": ("dmap.contentcodestype", 3),
"mdcl": ("dmap.dictionary", 12),
"miid": ("dmap.itemid", 5),
"mikd": ("dmap.itemkind", 1),
"mimc": ("dmap.itemcount", 5),
"minm": ("dmap.itemname", 9),
"mlcl": ("dmap.listing", 12),
"mlid": ("dmap.sessionid", 5),
"mlit": ("dmap.listingitem", 12),
"mlog": ("dmap.loginresponse", 12),
"mpco": ("dmap.parentcontainerid", 5),
"mper": ("dmap.persistentid", 7),
"mpro": ("dmap.protocolversion", 11),
"mrco": ("dmap.returnedcount", 5),
"msal": ("dmap.supportsautologout", 1),
"msas": ("dmap.authenticationschemes", 5),
"msau": ("dmap.authenticationmethod", 1),
"msbr": ("dmap.supportsbrowse", 1),
"msdc": ("dmap.databasescount", 5),
"msed": ("dmap.supportsedit", 1),
"msex": ("dmap.supportsextensions", 1),
"msix": ("dmap.supportsindex", 1),
"mslr": ("dmap.loginrequired", 1),
"mspi": ("dmap.supportspersistentids", 1),
"msqy": ("dmap.supportsquery", 1),
"msrs": ("dmap.supportsresolve", 1),
"msrv": ("dmap.serverinforesponse", 12),
"mstc": ("dmap.utctime", 10),
"mstm": ("dmap.timeoutinterval", 5),
"msto": ("dmap.utcoffset", 6),
"msts": ("dmap.statusstring", 9),
"mstt": ("dmap.status", 5),
"msup": ("dmap.supportsupdate", 1),
"mtco": ("dmap.specifiedtotalcount", 5),
"mudl": ("dmap.deletedidlisting", 12),
"mupd": ("dmap.updateresponse", 12),
"musr": ("dmap.serverrevision", 5),
"muty": ("dmap.updatetype", 1),
"ppro": ("dpap.protocolversion", 5),
"pret": ("dpap.unknown", 12),
}
dmap_data_types = {
1: "b", # byte
2: "ub", # unsigned byte
3: "h", # short
4: "uh", # unsigned short
5: "i", # integer
6: "ui", # unsigned integer
7: "l", # long
8: "ul", # unsigned long
9: "s", # string
10: "t", # timestamp
11: "v", # version
12: "c", # container
}
dmap_names = {
dmap_code_types[k][0]: k for k in dmap_code_types
}
dmap_reverse_data_types = {
dmap_data_types[k]: k for k in dmap_data_types
}
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-2.4.3/Lib/encodings/charmap.py
|
12
|
""" Generic Python Character Mapping Codec.
Use this codec directly rather than through the automatic
conversion mechanisms supplied by unicode() and .encode().
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.charmap_encode
decode = codecs.charmap_decode
class StreamWriter(Codec,codecs.StreamWriter):
def __init__(self,stream,errors='strict',mapping=None):
codecs.StreamWriter.__init__(self,stream,errors)
self.mapping = mapping
def encode(self,input,errors='strict'):
return Codec.encode(input,errors,self.mapping)
class StreamReader(Codec,codecs.StreamReader):
def __init__(self,stream,errors='strict',mapping=None):
codecs.StreamReader.__init__(self,stream,errors)
self.mapping = mapping
def decode(self,input,errors='strict'):
return Codec.decode(input,errors,self.mapping)
### encodings module API
def getregentry():
return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
|
simonwydooghe/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/misc/ovirt.py
|
49
|
#!/usr/bin/python
# Copyright: (c) 2013, Vincent Van der Kussen <vincent at vanderkussen.org>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt
author:
- Vincent Van der Kussen (@vincentvdk)
short_description: oVirt/RHEV platform management
description:
- This module only supports oVirt/RHEV version 3. A newer module M(ovirt_vm) supports oVirt/RHV version 4.
- Allows you to create new instances, either from scratch or an image, in addition to deleting or stopping instances on the oVirt/RHEV platform.
version_added: "1.4"
options:
user:
description:
- The user to authenticate with.
required: true
url:
description:
- The url of the oVirt instance.
required: true
instance_name:
description:
- The name of the instance to use.
required: true
aliases: [ vmname ]
password:
description:
- Password of the user to authenticate with.
required: true
image:
description:
- The template to use for the instance.
resource_type:
description:
- Whether you want to deploy an image or create an instance from scratch.
choices: [ new, template ]
zone:
description:
- Deploy the image to this oVirt cluster.
instance_disksize:
description:
- Size of the instance's disk in GB.
aliases: [ vm_disksize]
instance_cpus:
description:
- The instance's number of CPUs.
default: 1
aliases: [ vmcpus ]
instance_nic:
description:
- The name of the network interface in oVirt/RHEV.
aliases: [ vmnic ]
instance_network:
description:
- The logical network the machine should belong to.
default: rhevm
aliases: [ vmnetwork ]
instance_mem:
description:
- The instance's amount of memory in MB.
aliases: [ vmmem ]
instance_type:
description:
- Define whether the instance is a server, desktop or high_performance.
- I(high_performance) is supported since Ansible 2.5 and oVirt/RHV 4.2.
choices: [ desktop, server, high_performance ]
default: server
aliases: [ vmtype ]
disk_alloc:
description:
- Define whether disk is thin or preallocated.
choices: [ preallocated, thin ]
default: thin
disk_int:
description:
- Interface type of the disk.
choices: [ ide, virtio ]
default: virtio
instance_os:
description:
- Type of Operating System.
aliases: [ vmos ]
instance_cores:
description:
- Define the instance's number of cores.
default: 1
aliases: [ vmcores ]
sdomain:
description:
- The Storage Domain where you want to create the instance's disk on.
region:
description:
- The oVirt/RHEV datacenter where you want to deploy to.
instance_dns:
description:
- Define the instance's Primary DNS server.
aliases: [ dns ]
version_added: "2.1"
instance_domain:
description:
- Define the instance's Domain.
aliases: [ domain ]
version_added: "2.1"
instance_hostname:
description:
- Define the instance's Hostname.
aliases: [ hostname ]
version_added: "2.1"
instance_ip:
description:
- Define the instance's IP.
aliases: [ ip ]
version_added: "2.1"
instance_netmask:
description:
- Define the instance's Netmask.
aliases: [ netmask ]
version_added: "2.1"
instance_rootpw:
description:
- Define the instance's Root password.
aliases: [ rootpw ]
version_added: "2.1"
instance_key:
description:
- Define the instance's Authorized key.
aliases: [ key ]
version_added: "2.1"
state:
description:
- Create, terminate or remove instances.
choices: [ absent, present, restarted, shutdown, started ]
default: present
requirements:
- ovirt-engine-sdk-python
'''
EXAMPLES = '''
- name: Basic example to provision from image
ovirt:
user: admin@internal
url: https://ovirt.example.com
instance_name: ansiblevm04
password: secret
image: centos_64
zone: cluster01
resource_type: template
- name: Full example to create new instance from scratch
ovirt:
instance_name: testansible
resource_type: new
instance_type: server
user: admin@internal
password: secret
url: https://ovirt.example.com
instance_disksize: 10
zone: cluster01
region: datacenter1
instance_cpus: 1
instance_nic: nic1
instance_network: rhevm
instance_mem: 1000
disk_alloc: thin
sdomain: FIBER01
instance_cores: 1
instance_os: rhel_6x64
disk_int: virtio
- name: Stopping an existing instance
ovirt:
instance_name: testansible
state: stopped
user: admin@internal
password: secret
url: https://ovirt.example.com
- name: Start an existing instance
ovirt:
instance_name: testansible
state: started
user: admin@internal
password: secret
url: https://ovirt.example.com
- name: Start an instance with cloud init information
ovirt:
instance_name: testansible
state: started
user: admin@internal
password: secret
url: https://ovirt.example.com
hostname: testansible
domain: ansible.local
ip: 192.0.2.100
netmask: 255.255.255.0
gateway: 192.0.2.1
rootpw: bigsecret
'''
import time
try:
from ovirtsdk.api import API
from ovirtsdk.xml import params
HAS_OVIRTSDK = True
except ImportError:
HAS_OVIRTSDK = False
from ansible.module_utils.basic import AnsibleModule
# ------------------------------------------------------------------- #
# create connection with API
#
def conn(url, user, password):
api = API(url=url, username=user, password=password, insecure=True)
try:
value = api.test()
except Exception:
raise Exception("error connecting to the oVirt API")
return api
# ------------------------------------------------------------------- #
# Create VM from scratch
def create_vm(conn, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int):
if vmdisk_alloc == 'thin':
# define VM params
vmparams = params.VM(name=vmname, cluster=conn.clusters.get(name=zone), os=params.OperatingSystem(type_=vmos),
template=conn.templates.get(name="Blank"), memory=1024 * 1024 * int(vmmem),
cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))), type_=vmtype)
# define disk params
vmdisk = params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=True, interface=vmdisk_int, type_="System",
format='cow',
storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)]))
# define network parameters
network_net = params.Network(name=vmnetwork)
nic_net1 = params.NIC(name='nic1', network=network_net, interface='virtio')
elif vmdisk_alloc == 'preallocated':
# define VM params
vmparams = params.VM(name=vmname, cluster=conn.clusters.get(name=zone), os=params.OperatingSystem(type_=vmos),
template=conn.templates.get(name="Blank"), memory=1024 * 1024 * int(vmmem),
cpu=params.CPU(topology=params.CpuTopology(cores=int(vmcores))), type_=vmtype)
# define disk params
vmdisk = params.Disk(size=1024 * 1024 * 1024 * int(vmdisk_size), wipe_after_delete=True, sparse=False, interface=vmdisk_int, type_="System",
format='raw', storage_domains=params.StorageDomains(storage_domain=[conn.storagedomains.get(name=sdomain)]))
# define network parameters
network_net = params.Network(name=vmnetwork)
nic_net1 = params.NIC(name=vmnic, network=network_net, interface='virtio')
try:
conn.vms.add(vmparams)
except Exception:
raise Exception("Error creating VM with specified parameters")
vm = conn.vms.get(name=vmname)
try:
vm.disks.add(vmdisk)
except Exception:
raise Exception("Error attaching disk")
try:
vm.nics.add(nic_net1)
except Exception:
raise Exception("Error adding nic")
# create an instance from a template
def create_vm_template(conn, vmname, image, zone):
vmparams = params.VM(name=vmname, cluster=conn.clusters.get(name=zone), template=conn.templates.get(name=image), disks=params.Disks(clone=True))
try:
conn.vms.add(vmparams)
except Exception:
raise Exception('error adding template %s' % image)
# start instance
def vm_start(conn, vmname, hostname=None, ip=None, netmask=None, gateway=None,
domain=None, dns=None, rootpw=None, key=None):
vm = conn.vms.get(name=vmname)
use_cloud_init = False
nics = None
nic = None
if hostname or ip or netmask or gateway or domain or dns or rootpw or key:
use_cloud_init = True
if ip and netmask and gateway:
ipinfo = params.IP(address=ip, netmask=netmask, gateway=gateway)
nic = params.GuestNicConfiguration(name='eth0', boot_protocol='STATIC', ip=ipinfo, on_boot=True)
nics = params.Nics()
nics = params.GuestNicsConfiguration(nic_configuration=[nic])
initialization = params.Initialization(regenerate_ssh_keys=True, host_name=hostname, domain=domain, user_name='root',
root_password=rootpw, nic_configurations=nics, dns_servers=dns,
authorized_ssh_keys=key)
action = params.Action(use_cloud_init=use_cloud_init, vm=params.VM(initialization=initialization))
vm.start(action=action)
# Stop instance
def vm_stop(conn, vmname):
vm = conn.vms.get(name=vmname)
vm.stop()
# restart instance
def vm_restart(conn, vmname):
state = vm_status(conn, vmname)
vm = conn.vms.get(name=vmname)
vm.stop()
while conn.vms.get(vmname).get_status().get_state() != 'down':
time.sleep(5)
vm.start()
# remove an instance
def vm_remove(conn, vmname):
vm = conn.vms.get(name=vmname)
vm.delete()
# ------------------------------------------------------------------- #
# VM statuses
#
# Get the VMs status
def vm_status(conn, vmname):
status = conn.vms.get(name=vmname).status.state
return status
# Get VM object and return it's name if object exists
def get_vm(conn, vmname):
vm = conn.vms.get(name=vmname)
if vm is None:
name = "empty"
else:
name = vm.get_name()
return name
# ------------------------------------------------------------------- #
# Hypervisor operations
#
# not available yet
# ------------------------------------------------------------------- #
# Main
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['absent', 'present', 'restart', 'shutdown', 'started']),
user=dict(type='str', required=True),
url=dict(type='str', required=True),
instance_name=dict(type='str', required=True, aliases=['vmname']),
password=dict(type='str', required=True, no_log=True),
image=dict(type='str'),
resource_type=dict(type='str', choices=['new', 'template']),
zone=dict(type='str'),
instance_disksize=dict(type='str', aliases=['vm_disksize']),
instance_cpus=dict(type='str', default=1, aliases=['vmcpus']),
instance_nic=dict(type='str', aliases=['vmnic']),
instance_network=dict(type='str', default='rhevm', aliases=['vmnetwork']),
instance_mem=dict(type='str', aliases=['vmmem']),
instance_type=dict(type='str', default='server', aliases=['vmtype'], choices=['desktop', 'server', 'high_performance']),
disk_alloc=dict(type='str', default='thin', choices=['preallocated', 'thin']),
disk_int=dict(type='str', default='virtio', choices=['ide', 'virtio']),
instance_os=dict(type='str', aliases=['vmos']),
instance_cores=dict(type='str', default=1, aliases=['vmcores']),
instance_hostname=dict(type='str', aliases=['hostname']),
instance_ip=dict(type='str', aliases=['ip']),
instance_netmask=dict(type='str', aliases=['netmask']),
instance_gateway=dict(type='str', aliases=['gateway']),
instance_domain=dict(type='str', aliases=['domain']),
instance_dns=dict(type='str', aliases=['dns']),
instance_rootpw=dict(type='str', aliases=['rootpw']),
instance_key=dict(type='str', aliases=['key']),
sdomain=dict(type='str'),
region=dict(type='str'),
),
)
if not HAS_OVIRTSDK:
module.fail_json(msg='ovirtsdk required for this module')
state = module.params['state']
user = module.params['user']
url = module.params['url']
vmname = module.params['instance_name']
password = module.params['password']
image = module.params['image'] # name of the image to deploy
resource_type = module.params['resource_type'] # template or from scratch
zone = module.params['zone'] # oVirt cluster
vmdisk_size = module.params['instance_disksize'] # disksize
vmcpus = module.params['instance_cpus'] # number of cpu
vmnic = module.params['instance_nic'] # network interface
vmnetwork = module.params['instance_network'] # logical network
vmmem = module.params['instance_mem'] # mem size
vmdisk_alloc = module.params['disk_alloc'] # thin, preallocated
vmdisk_int = module.params['disk_int'] # disk interface virtio or ide
vmos = module.params['instance_os'] # Operating System
vmtype = module.params['instance_type'] # server, desktop or high_performance
vmcores = module.params['instance_cores'] # number of cores
sdomain = module.params['sdomain'] # storage domain to store disk on
region = module.params['region'] # oVirt Datacenter
hostname = module.params['instance_hostname']
ip = module.params['instance_ip']
netmask = module.params['instance_netmask']
gateway = module.params['instance_gateway']
domain = module.params['instance_domain']
dns = module.params['instance_dns']
rootpw = module.params['instance_rootpw']
key = module.params['instance_key']
# initialize connection
try:
c = conn(url + "/api", user, password)
except Exception as e:
module.fail_json(msg='%s' % e)
if state == 'present':
if get_vm(c, vmname) == "empty":
if resource_type == 'template':
try:
create_vm_template(c, vmname, image, zone)
except Exception as e:
module.fail_json(msg='%s' % e)
module.exit_json(changed=True, msg="deployed VM %s from template %s" % (vmname, image))
elif resource_type == 'new':
# FIXME: refactor, use keyword args.
try:
create_vm(c, vmtype, vmname, zone, vmdisk_size, vmcpus, vmnic, vmnetwork, vmmem, vmdisk_alloc, sdomain, vmcores, vmos, vmdisk_int)
except Exception as e:
module.fail_json(msg='%s' % e)
module.exit_json(changed=True, msg="deployed VM %s from scratch" % vmname)
else:
module.exit_json(changed=False, msg="You did not specify a resource type")
else:
module.exit_json(changed=False, msg="VM %s already exists" % vmname)
if state == 'started':
if vm_status(c, vmname) == 'up':
module.exit_json(changed=False, msg="VM %s is already running" % vmname)
else:
# vm_start(c, vmname)
vm_start(c, vmname, hostname, ip, netmask, gateway, domain, dns, rootpw, key)
module.exit_json(changed=True, msg="VM %s started" % vmname)
if state == 'shutdown':
if vm_status(c, vmname) == 'down':
module.exit_json(changed=False, msg="VM %s is already shutdown" % vmname)
else:
vm_stop(c, vmname)
module.exit_json(changed=True, msg="VM %s is shutting down" % vmname)
if state == 'restart':
if vm_status(c, vmname) == 'up':
vm_restart(c, vmname)
module.exit_json(changed=True, msg="VM %s is restarted" % vmname)
else:
module.exit_json(changed=False, msg="VM %s is not running" % vmname)
if state == 'absent':
if get_vm(c, vmname) == "empty":
module.exit_json(changed=False, msg="VM %s does not exist" % vmname)
else:
vm_remove(c, vmname)
module.exit_json(changed=True, msg="VM %s removed" % vmname)
if __name__ == '__main__':
main()
|
brandondrew/bigcouch
|
refs/heads/master
|
couchjs/scons/scons-local-2.0.1/SCons/Platform/posix.py
|
61
|
"""SCons.Platform.posix
Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/posix.py 5134 2010/08/16 23:02:40 bdeegan"
import errno
import os
import os.path
import subprocess
import sys
import select
import SCons.Util
from SCons.Platform import TempFileMunge
exitvalmap = {
2 : 127,
13 : 126,
}
def escape(arg):
"escape shell special characters"
slash = '\\'
special = '"$()'
arg = arg.replace(slash, slash+slash)
for c in special:
arg = arg.replace(c, slash+c)
return '"' + arg + '"'
def exec_system(l, env):
stat = os.system(' '.join(l))
if stat & 0xff:
return stat | 0x80
return stat >> 8
def exec_spawnvpe(l, env):
stat = os.spawnvpe(os.P_WAIT, l[0], l, env)
# os.spawnvpe() returns the actual exit code, not the encoding
# returned by os.waitpid() or os.system().
return stat
def exec_fork(l, env):
pid = os.fork()
if not pid:
# Child process.
exitval = 127
try:
os.execvpe(l[0], l, env)
except OSError, e:
exitval = exitvalmap.get(e[0], e[0])
sys.stderr.write("scons: %s: %s\n" % (l[0], e[1]))
os._exit(exitval)
else:
# Parent process.
pid, stat = os.waitpid(pid, 0)
if stat & 0xff:
return stat | 0x80
return stat >> 8
def _get_env_command(sh, escape, cmd, args, env):
s = ' '.join(args)
if env:
l = ['env', '-'] + \
[escape(t[0])+'='+escape(t[1]) for t in env.items()] + \
[sh, '-c', escape(s)]
s = ' '.join(l)
return s
def env_spawn(sh, escape, cmd, args, env):
return exec_system([_get_env_command( sh, escape, cmd, args, env)], env)
def spawnvpe_spawn(sh, escape, cmd, args, env):
return exec_spawnvpe([sh, '-c', ' '.join(args)], env)
def fork_spawn(sh, escape, cmd, args, env):
return exec_fork([sh, '-c', ' '.join(args)], env)
def process_cmd_output(cmd_stdout, cmd_stderr, stdout, stderr):
stdout_eof = stderr_eof = 0
while not (stdout_eof and stderr_eof):
try:
(i,o,e) = select.select([cmd_stdout, cmd_stderr], [], [])
if cmd_stdout in i:
str = cmd_stdout.read()
if len(str) == 0:
stdout_eof = 1
elif stdout is not None:
stdout.write(str)
if cmd_stderr in i:
str = cmd_stderr.read()
if len(str) == 0:
#sys.__stderr__.write( "stderr_eof=1\n" )
stderr_eof = 1
else:
#sys.__stderr__.write( "str(stderr) = %s\n" % str )
stderr.write(str)
except select.error, (_errno, _strerror):
if _errno != errno.EINTR:
raise
def exec_popen3(l, env, stdout, stderr):
proc = subprocess.Popen(' '.join(l),
stdout=stdout,
stderr=stderr,
shell=True)
stat = proc.wait()
if stat & 0xff:
return stat | 0x80
return stat >> 8
def exec_piped_fork(l, env, stdout, stderr):
# spawn using fork / exec and providing a pipe for the command's
# stdout / stderr stream
if stdout != stderr:
(rFdOut, wFdOut) = os.pipe()
(rFdErr, wFdErr) = os.pipe()
else:
(rFdOut, wFdOut) = os.pipe()
rFdErr = rFdOut
wFdErr = wFdOut
# do the fork
pid = os.fork()
if not pid:
# Child process
os.close( rFdOut )
if rFdOut != rFdErr:
os.close( rFdErr )
os.dup2( wFdOut, 1 ) # is there some symbolic way to do that ?
os.dup2( wFdErr, 2 )
os.close( wFdOut )
if stdout != stderr:
os.close( wFdErr )
exitval = 127
try:
os.execvpe(l[0], l, env)
except OSError, e:
exitval = exitvalmap.get(e[0], e[0])
stderr.write("scons: %s: %s\n" % (l[0], e[1]))
os._exit(exitval)
else:
# Parent process
pid, stat = os.waitpid(pid, 0)
os.close( wFdOut )
if stdout != stderr:
os.close( wFdErr )
childOut = os.fdopen( rFdOut )
if stdout != stderr:
childErr = os.fdopen( rFdErr )
else:
childErr = childOut
process_cmd_output(childOut, childErr, stdout, stderr)
os.close( rFdOut )
if stdout != stderr:
os.close( rFdErr )
if stat & 0xff:
return stat | 0x80
return stat >> 8
def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr):
# spawn using Popen3 combined with the env command
# the command name and the command's stdout is written to stdout
# the command's stderr is written to stderr
return exec_popen3([_get_env_command(sh, escape, cmd, args, env)],
env, stdout, stderr)
def piped_fork_spawn(sh, escape, cmd, args, env, stdout, stderr):
# spawn using fork / exec and providing a pipe for the command's
# stdout / stderr stream
return exec_piped_fork([sh, '-c', ' '.join(args)],
env, stdout, stderr)
def generate(env):
# If os.spawnvpe() exists, we use it to spawn commands. Otherwise
# if the env utility exists, we use os.system() to spawn commands,
# finally we fall back on os.fork()/os.exec().
#
# os.spawnvpe() is prefered because it is the most efficient. But
# for Python versions without it, os.system() is prefered because it
# is claimed that it works better with threads (i.e. -j) and is more
# efficient than forking Python.
#
# NB: Other people on the scons-users mailing list have claimed that
# os.fork()/os.exec() works better than os.system(). There may just
# not be a default that works best for all users.
if 'spawnvpe' in os.__dict__:
spawn = spawnvpe_spawn
elif env.Detect('env'):
spawn = env_spawn
else:
spawn = fork_spawn
if env.Detect('env'):
pspawn = piped_env_spawn
else:
pspawn = piped_fork_spawn
if 'ENV' not in env:
env['ENV'] = {}
env['ENV']['PATH'] = '/usr/local/bin:/opt/bin:/bin:/usr/bin'
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.o'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
env['PROGPREFIX'] = ''
env['PROGSUFFIX'] = ''
env['LIBPREFIX'] = 'lib'
env['LIBSUFFIX'] = '.a'
env['SHLIBPREFIX'] = '$LIBPREFIX'
env['SHLIBSUFFIX'] = '.so'
env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
env['PSPAWN'] = pspawn
env['SPAWN'] = spawn
env['SHELL'] = 'sh'
env['ESCAPE'] = escape
env['TEMPFILE'] = TempFileMunge
env['TEMPFILEPREFIX'] = '@'
#Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion
#Note: specific platforms might rise or lower this value
env['MAXLINELENGTH'] = 128072
# This platform supports RPATH specifications.
env['__RPATH'] = '$_RPATH'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
tanglei528/nova
|
refs/heads/master
|
nova/virt/xenapi/host.py
|
2
|
# Copyright (c) 2012 Citrix Systems, Inc.
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for host-related functions (start, reboot, etc).
"""
import re
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import exception
from nova import objects
from nova.objects import instance as instance_obj
from nova.objects import service as service_obj
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.pci import pci_whitelist
from nova.virt.xenapi import pool_states
from nova.virt.xenapi import vm_utils
LOG = logging.getLogger(__name__)
class Host(object):
"""Implements host related operations."""
def __init__(self, session, virtapi):
self._session = session
self._virtapi = virtapi
def host_power_action(self, _host, action):
"""Reboots or shuts down the host."""
args = {"action": jsonutils.dumps(action)}
methods = {"reboot": "host_reboot", "shutdown": "host_shutdown"}
response = call_xenhost(self._session, methods[action], args)
return response.get("power_action", response)
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
if not mode:
return 'off_maintenance'
host_list = [host_ref for host_ref in
self._session.host.get_all()
if host_ref != self._session.host_ref]
migrations_counter = vm_counter = 0
ctxt = context.get_admin_context()
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
for host_ref in host_list:
try:
# Ensure only guest instances are migrated
uuid = vm_rec['other_config'].get('nova_uuid')
if not uuid:
name = vm_rec['name_label']
uuid = _uuid_find(ctxt, host, name)
if not uuid:
LOG.info(_('Instance %(name)s running on %(host)s'
' could not be found in the database:'
' assuming it is a worker VM and skip'
' ping migration to a new host'),
{'name': name, 'host': host})
continue
instance = instance_obj.Instance.get_by_uuid(ctxt, uuid)
vm_counter = vm_counter + 1
aggregate = objects.AggregateList.get_by_host(
ctxt, host, key=pool_states.POOL_FLAG)
if not aggregate:
msg = _('Aggregate for host %(host)s count not be'
' found.') % dict(host=host)
raise exception.NotFound(msg)
dest = _host_find(ctxt, self._session, aggregate[0],
host_ref)
instance.host = dest
instance.task_state = task_states.MIGRATING
instance.save()
self._session.VM.pool_migrate(vm_ref, host_ref,
{"live": "true"})
migrations_counter = migrations_counter + 1
instance.vm_state = vm_states.ACTIVE
instance.save()
break
except self._session.XenAPI.Failure:
LOG.exception(_('Unable to migrate VM %(vm_ref)s '
'from %(host)s'),
{'vm_ref': vm_ref, 'host': host})
instance.host = host
instance.vm_state = vm_states.ACTIVE
instance.save()
if vm_counter == migrations_counter:
return 'on_maintenance'
else:
raise exception.NoValidHost(reason='Unable to find suitable '
'host for VMs evacuation')
def set_host_enabled(self, host, enabled):
"""Sets the specified host's ability to accept new instances."""
# Since capabilities are gone, use service table to disable a node
# in scheduler
cntxt = context.get_admin_context()
service = service_obj.Service.get_by_args(cntxt, host, 'nova-compute')
service.disabled = not enabled
service.disabled_reason = 'set by xenapi host_state'
service.save()
args = {"enabled": jsonutils.dumps(enabled)}
response = call_xenhost(self._session, "set_host_enabled", args)
return response.get("status", response)
def get_host_uptime(self, _host):
"""Returns the result of calling "uptime" on the target host."""
response = call_xenhost(self._session, "host_uptime", {})
return response.get("uptime", response)
class HostState(object):
"""Manages information about the XenServer host this compute
node is running on.
"""
def __init__(self, session):
super(HostState, self).__init__()
self._session = session
self._stats = {}
self._pci_device_filter = pci_whitelist.get_pci_devices_filter()
self.update_status()
def _get_passthrough_devices(self):
"""Get a list pci devices that are available for pci passthtough.
We use a plugin to get the output of the lspci command runs on dom0.
From this list we will extract pci devices that are using the pciback
kernel driver. Then we compare this list to the pci whitelist to get
a new list of pci devices that can be used for pci passthrough.
:returns: a list of pci devices available for pci passthrough.
"""
def _compile_hex(pattern):
"""Return a compiled regular expression pattern into which we have
replaced occurrences of hex by [\da-fA-F].
"""
return re.compile(pattern.replace("hex", r"[\da-fA-F]"))
def _parse_pci_device_string(dev_string):
"""Exctract information from the device string about the slot, the
vendor and the product ID. The string is as follow:
"Slot:\tBDF\nClass:\txxxx\nVendor:\txxxx\nDevice:\txxxx\n..."
Return a dictionary with informations about the device.
"""
slot_regex = _compile_hex(r"Slot:\t"
r"((?:hex{4}:)?" # Domain: (optional)
r"hex{2}:" # Bus:
r"hex{2}\." # Device.
r"hex{1})") # Function
vendor_regex = _compile_hex(r"\nVendor:\t(hex+)")
product_regex = _compile_hex(r"\nDevice:\t(hex+)")
slot_id = slot_regex.findall(dev_string)
vendor_id = vendor_regex.findall(dev_string)
product_id = product_regex.findall(dev_string)
if not slot_id or not vendor_id or not product_id:
raise exception.NovaException(
_("Failed to parse information about"
" a pci device for passthrough"))
type_pci = self._session.call_plugin_serialized(
'xenhost', 'get_pci_type', slot_id[0])
return {'label': '_'.join(['label',
vendor_id[0],
product_id[0]]),
'vendor_id': vendor_id[0],
'product_id': product_id[0],
'address': slot_id[0],
'dev_id': '_'.join(['pci', slot_id[0]]),
'dev_type': type_pci,
'status': 'available'}
# Devices are separated by a blank line. That is why we
# use "\n\n" as separator.
lspci_out = self._session.call_plugin_serialized(
'xenhost', 'get_pci_device_details')
pci_list = lspci_out.split("\n\n")
# For each device of the list, check if it uses the pciback
# kernel driver and if it does, get informations and add it
# to the list of passthrough_devices. Ignore it if the driver
# is not pciback.
passthrough_devices = []
for dev_string_info in pci_list:
if "Driver:\tpciback" in dev_string_info:
new_dev = _parse_pci_device_string(dev_string_info)
if self._pci_device_filter.device_assignable(new_dev):
passthrough_devices.append(new_dev)
return passthrough_devices
def get_host_stats(self, refresh=False):
"""Return the current state of the host. If 'refresh' is
True, run the update first.
"""
if refresh or not self._stats:
self.update_status()
return self._stats
def update_status(self):
"""Since under Xenserver, a compute node runs on a given host,
we can get host status information using xenapi.
"""
LOG.debug(_("Updating host stats"))
data = call_xenhost(self._session, "host_data", {})
if data:
sr_ref = vm_utils.scan_default_sr(self._session)
sr_rec = self._session.SR.get_record(sr_ref)
total = int(sr_rec["physical_size"])
used = int(sr_rec["physical_utilisation"])
data["disk_total"] = total
data["disk_used"] = used
data["disk_allocated"] = int(sr_rec["virtual_allocation"])
data["disk_available"] = total - used
data["supported_instances"] = to_supported_instances(
data.get("host_capabilities")
)
host_memory = data.get('host_memory', None)
if host_memory:
data["host_memory_total"] = host_memory.get('total', 0)
data["host_memory_overhead"] = host_memory.get('overhead', 0)
data["host_memory_free"] = host_memory.get('free', 0)
data["host_memory_free_computed"] = host_memory.get(
'free-computed', 0)
del data['host_memory']
if (data['host_hostname'] !=
self._stats.get('host_hostname', data['host_hostname'])):
LOG.error(_('Hostname has changed from %(old)s '
'to %(new)s. A restart is required to take effect.'
) % {'old': self._stats['host_hostname'],
'new': data['host_hostname']})
data['host_hostname'] = self._stats['host_hostname']
data['hypervisor_hostname'] = data['host_hostname']
vcpus_used = 0
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
vcpus_used = vcpus_used + int(vm_rec['VCPUs_max'])
data['vcpus_used'] = vcpus_used
data['pci_passthrough_devices'] = self._get_passthrough_devices()
self._stats = data
def to_supported_instances(host_capabilities):
if not host_capabilities:
return []
result = []
for capability in host_capabilities:
try:
ostype, _version, arch = capability.split("-")
result.append((arch, 'xapi', ostype))
except ValueError:
LOG.warning(
_("Failed to extract instance support from %s"), capability)
return result
def call_xenhost(session, method, arg_dict):
"""There will be several methods that will need this general
handling for interacting with the xenhost plugin, so this abstracts
out that behavior.
"""
# Create a task ID as something that won't match any instance ID
try:
result = session.call_plugin('xenhost', method, args=arg_dict)
if not result:
return ''
return jsonutils.loads(result)
except ValueError:
LOG.exception(_("Unable to get updated status"))
return None
except session.XenAPI.Failure as e:
LOG.error(_("The call to %(method)s returned "
"an error: %(e)s."), {'method': method, 'e': e})
return e.details[1]
def _uuid_find(context, host, name_label):
"""Return instance uuid by name_label."""
for i in instance_obj.InstanceList.get_by_host(context, host):
if i.name == name_label:
return i.uuid
return None
def _host_find(context, session, src_aggregate, host_ref):
"""Return the host from the xenapi host reference.
:param src_aggregate: the aggregate that the compute host being put in
maintenance (source of VMs) belongs to
:param host_ref: the hypervisor host reference (destination of VMs)
:return: the compute host that manages host_ref
"""
# NOTE: this would be a lot simpler if nova-compute stored
# CONF.host in the XenServer host's other-config map.
# TODO(armando-migliaccio): improve according the note above
uuid = session.host.get_uuid(host_ref)
for compute_host, host_uuid in src_aggregate.metadetails.iteritems():
if host_uuid == uuid:
return compute_host
raise exception.NoValidHost(reason='Host %(host_uuid)s could not be found '
'from aggregate metadata: %(metadata)s.' %
{'host_uuid': uuid,
'metadata': src_aggregate.metadetails})
|
jacobian/channels-example
|
refs/heads/master
|
chat/tests/test_consumers.py
|
9
|
import json
import pytest
from asgiref.inmemory import ChannelLayer as InMemoryChannelLayer
from channels import Group
from channels.handler import AsgiRequest
from channels.message import Message
from django.contrib.sessions.backends.file import SessionStore as FileSessionStore
from chat.consumers import ws_connect, ws_receive, ws_disconnect
from chat.models import Room
@pytest.fixture
def message_factory(settings, tmpdir):
def factory(name, **content):
channel_layer = InMemoryChannelLayer()
message = Message(content, name, channel_layer)
settings.SESSION_FILE_PATH = str(tmpdir)
message.channel_session = FileSessionStore()
return message
return factory
@pytest.mark.django_db
def test_ws_connect(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test',
path = b'/chat/room1',
client = ['10.0.0.1', 12345],
reply_channel = u'test-reply',
)
ws_connect(message)
assert 'test-reply' in message.channel_layer._groups['chat-room1']
assert message.channel_session['room'] == 'room1'
@pytest.mark.django_db
def test_ws_receive(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test',
text = json.dumps({'handle': 'H', 'message': 'M'})
)
# Normally this would happen when the person joins the room, but mock
# it up manually here.
message.channel_session['room'] = 'room1'
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply')
ws_receive(message)
_, reply = message.channel_layer.receive_many([u'test-reply'])
reply = json.loads(reply['text'])
assert reply['message'] == 'M'
assert reply['handle'] == 'H'
@pytest.mark.django_db
def test_ws_disconnect(message_factory):
r = Room.objects.create(label='room1')
message = message_factory('test', reply_channel=u'test-reply1')
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply1')
Group('chat-room1', channel_layer=message.channel_layer).add(u'test-reply2')
message.channel_session['room'] = 'room1'
ws_disconnect(message)
assert 'test-reply1' not in message.channel_layer._groups['chat-room1']
|
Charlotte-Morgan/inasafe
|
refs/heads/develop
|
safe_extras/pydispatch/errors.py
|
10
|
"""Error types for dispatcher mechanism
"""
class DispatcherError(Exception):
"""Base class for all Dispatcher errors"""
class DispatcherKeyError(KeyError, DispatcherError):
"""Error raised when unknown (sender,signal) set specified"""
class DispatcherTypeError(TypeError, DispatcherError):
"""Error raised when inappropriate signal-type specified (None)"""
|
roopali8/keystone
|
refs/heads/master
|
keystone/tests/unit/test_v3_catalog.py
|
1
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from six.moves import http_client
from testtools import matchers
from keystone import catalog
from keystone.tests import unit
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit import test_v3
class CatalogTestCase(test_v3.RestfulTestCase):
"""Test service & endpoint CRUD."""
# region crud tests
def test_create_region_with_id(self):
"""Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
ref = self.new_region_ref()
region_id = ref.pop('id')
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_matching_ids(self):
"""Call ``PUT /regions/{region_id}`` with an ID in the request body."""
ref = self.new_region_ref()
region_id = ref['id']
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_duplicate_id(self):
"""Call ``PUT /regions/{region_id}``."""
ref = dict(description="my region")
self.put(
'/regions/myregion',
body={'region': ref}, expected_status=http_client.CREATED)
# Create region again with duplicate id
self.put(
'/regions/myregion',
body={'region': ref}, expected_status=409)
def test_create_region(self):
"""Call ``POST /regions`` with an ID in the request body."""
# the ref will have an ID defined on it
ref = self.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
# we should be able to get the region, having defined the ID ourselves
r = self.get(
'/regions/%(region_id)s' % {
'region_id': ref['id']})
self.assertValidRegionResponse(r, ref)
def test_create_region_with_empty_id(self):
"""Call ``POST /regions`` with an empty ID in the request body."""
ref = self.new_region_ref()
ref['id'] = ''
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.assertNotEmpty(r.result['region'].get('id'))
def test_create_region_without_id(self):
"""Call ``POST /regions`` without an ID in the request body."""
ref = self.new_region_ref()
# instead of defining the ID ourselves...
del ref['id']
# let the service define the ID
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
def test_create_region_without_description(self):
"""Call ``POST /regions`` without description in the request body."""
ref = self.new_region_ref()
del ref['description']
r = self.post('/regions', body={'region': ref})
# Create the description in the reference to compare to since the
# response should now have a description, even though we didn't send
# it with the original reference.
ref['description'] = ''
self.assertValidRegionResponse(r, ref)
def test_create_regions_with_same_description_string(self):
"""Call ``POST /regions`` with same description in the request bodies.
"""
# NOTE(lbragstad): Make sure we can create two regions that have the
# same description.
ref1 = self.new_region_ref()
ref2 = self.new_region_ref()
region_desc = 'Some Region Description'
ref1['description'] = region_desc
ref2['description'] = region_desc
resp1 = self.post('/regions', body={'region': ref1})
self.assertValidRegionResponse(resp1, ref1)
resp2 = self.post('/regions', body={'region': ref2})
self.assertValidRegionResponse(resp2, ref2)
def test_create_regions_without_descriptions(self):
"""Call ``POST /regions`` with no description in the request bodies.
"""
# NOTE(lbragstad): Make sure we can create two regions that have
# no description in the request body. The description should be
# populated by Catalog Manager.
ref1 = self.new_region_ref()
ref2 = self.new_region_ref()
del ref1['description']
ref2['description'] = None
resp1 = self.post('/regions', body={'region': ref1})
resp2 = self.post('/regions', body={'region': ref2})
# Create the descriptions in the references to compare to since the
# responses should now have descriptions, even though we didn't send
# a description with the original references.
ref1['description'] = ''
ref2['description'] = ''
self.assertValidRegionResponse(resp1, ref1)
self.assertValidRegionResponse(resp2, ref2)
def test_create_region_with_conflicting_ids(self):
"""Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
# the region ref is created with an ID
ref = self.new_region_ref()
# but instead of using that ID, make up a new, conflicting one
self.put(
'/regions/%s' % uuid.uuid4().hex,
body={'region': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_regions(self):
"""Call ``GET /regions``."""
r = self.get('/regions')
self.assertValidRegionListResponse(r, ref=self.region)
def _create_region_with_parent_id(self, parent_id=None):
ref = self.new_region_ref()
ref['parent_region_id'] = parent_id
return self.post(
'/regions',
body={'region': ref})
def test_list_regions_filtered_by_parent_region_id(self):
"""Call ``GET /regions?parent_region_id={parent_region_id}``."""
new_region = self._create_region_with_parent_id()
parent_id = new_region.result['region']['id']
new_region = self._create_region_with_parent_id(parent_id)
new_region = self._create_region_with_parent_id(parent_id)
r = self.get('/regions?parent_region_id=%s' % parent_id)
for region in r.result['regions']:
self.assertEqual(parent_id, region['parent_region_id'])
def test_get_region(self):
"""Call ``GET /regions/{region_id}``."""
r = self.get('/regions/%(region_id)s' % {
'region_id': self.region_id})
self.assertValidRegionResponse(r, self.region)
def test_update_region(self):
"""Call ``PATCH /regions/{region_id}``."""
region = self.new_region_ref()
del region['id']
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
self.assertValidRegionResponse(r, region)
def test_update_region_without_description_keeps_original(self):
"""Call ``PATCH /regions/{region_id}``."""
region_ref = self.new_region_ref()
resp = self.post('/regions', body={'region': region_ref})
region_updates = {
# update with something that's not the description
'parent_region_id': self.region_id,
}
resp = self.patch('/regions/%s' % region_ref['id'],
body={'region': region_updates})
# NOTE(dstanek): Keystone should keep the original description.
self.assertEqual(region_ref['description'],
resp.result['region']['description'])
def test_update_region_with_null_description(self):
"""Call ``PATCH /regions/{region_id}``."""
region = self.new_region_ref()
del region['id']
region['description'] = None
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
# NOTE(dstanek): Keystone should turn the provided None value into
# an empty string before storing in the backend.
region['description'] = ''
self.assertValidRegionResponse(r, region)
def test_delete_region(self):
"""Call ``DELETE /regions/{region_id}``."""
ref = self.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.delete('/regions/%(region_id)s' % {
'region_id': ref['id']})
# service crud tests
def test_create_service(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
def test_create_service_no_name(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
del ref['name']
r = self.post(
'/services',
body={'service': ref})
ref['name'] = ''
self.assertValidServiceResponse(r, ref)
def test_create_service_no_enabled(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
del ref['enabled']
r = self.post(
'/services',
body={'service': ref})
ref['enabled'] = True
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_false(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = False
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(False, r.result['service']['enabled'])
def test_create_service_enabled_true(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = True
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_str_true(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'True'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_false(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'False'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_random(self):
"""Call ``POST /services``."""
ref = self.new_service_ref()
ref['enabled'] = 'puppies'
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_services(self):
"""Call ``GET /services``."""
r = self.get('/services')
self.assertValidServiceListResponse(r, ref=self.service)
def _create_random_service(self):
ref = self.new_service_ref()
ref['enabled'] = True
response = self.post(
'/services',
body={'service': ref})
return response.json['service']
def test_filter_list_services_by_type(self):
"""Call ``GET /services?type=<some type>``."""
target_ref = self._create_random_service()
# create unrelated services
self._create_random_service()
self._create_random_service()
response = self.get('/services?type=' + target_ref['type'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['type'], filtered_service['type'])
def test_filter_list_services_by_name(self):
"""Call ``GET /services?name=<some name>``."""
target_ref = self._create_random_service()
# create unrelated services
self._create_random_service()
self._create_random_service()
response = self.get('/services?name=' + target_ref['name'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['name'], filtered_service['name'])
def test_get_service(self):
"""Call ``GET /services/{service_id}``."""
r = self.get('/services/%(service_id)s' % {
'service_id': self.service_id})
self.assertValidServiceResponse(r, self.service)
def test_update_service(self):
"""Call ``PATCH /services/{service_id}``."""
service = self.new_service_ref()
del service['id']
r = self.patch('/services/%(service_id)s' % {
'service_id': self.service_id},
body={'service': service})
self.assertValidServiceResponse(r, service)
def test_delete_service(self):
"""Call ``DELETE /services/{service_id}``."""
self.delete('/services/%(service_id)s' % {
'service_id': self.service_id})
# endpoint crud tests
def test_list_endpoints(self):
"""Call ``GET /endpoints``."""
r = self.get('/endpoints')
self.assertValidEndpointListResponse(r, ref=self.endpoint)
def _create_random_endpoint(self, interface='public',
parent_region_id=None):
region = self._create_region_with_parent_id(
parent_id=parent_region_id)
service = self._create_random_service()
ref = self.new_endpoint_ref(
service_id=service['id'],
interface=interface,
region_id=region.result['region']['id'])
response = self.post(
'/endpoints',
body={'endpoint': ref})
return response.json['endpoint']
def test_list_endpoints_filtered_by_interface(self):
"""Call ``GET /endpoints?interface={interface}``."""
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % ref['interface'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
def test_list_endpoints_filtered_by_service_id(self):
"""Call ``GET /endpoints?service_id={service_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?service_id=%s' % ref['service_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_filtered_by_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?region_id=%s' % ref['region_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
def test_list_endpoints_filtered_by_parent_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``.
Ensure passing the parent_region_id as filter returns an
empty list.
"""
parent_region = self._create_region_with_parent_id()
parent_region_id = parent_region.result['region']['id']
self._create_random_endpoint(parent_region_id=parent_region_id)
response = self.get('/endpoints?region_id=%s' % parent_region_id)
self.assertEqual(0, len(response.json['endpoints']))
def test_list_endpoints_with_multiple_filters(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing different combinations of interface, region_id and
service_id as filters will return the correct result.
"""
# interface and region_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s®ion_id=%s' %
(ref['interface'], ref['region_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
# interface and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s&service_id=%s' %
(ref['interface'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?region_id=%s&service_id=%s' %
(ref['region_id'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# interface, region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get(('/endpoints?interface=%s®ion_id=%s'
'&service_id=%s') %
(ref['interface'], ref['region_id'],
ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_with_random_filter_values(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing random values for: interface, region_id and
service_id will return an empty list.
"""
self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
def test_create_endpoint_no_enabled(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
r = self.post(
'/endpoints',
body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_true(self):
"""Call ``POST /endpoints`` with enabled: true."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled=True)
r = self.post(
'/endpoints',
body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_false(self):
"""Call ``POST /endpoints`` with enabled: false."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled=False)
r = self.post(
'/endpoints',
body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_str_true(self):
"""Call ``POST /endpoints`` with enabled: 'True'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='True')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_false(self):
"""Call ``POST /endpoints`` with enabled: 'False'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='False')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_random(self):
"""Call ``POST /endpoints`` with enabled: 'puppies'."""
ref = self.new_endpoint_ref(service_id=self.service_id,
enabled='puppies')
self.post(
'/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_invalid_region_id(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["region_id"] = uuid.uuid4().hex
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_region(self):
"""EndpointV3 creates the region before creating the endpoint, if
endpoint is provided with 'region' and no 'region_id'
"""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["region"] = uuid.uuid4().hex
ref.pop('region_id')
self.post('/endpoints', body={'endpoint': ref})
# Make sure the region is created
self.get('/regions/%(region_id)s' % {
'region_id': ref["region"]})
def test_create_endpoint_with_no_region(self):
"""EndpointV3 allows to creates the endpoint without region."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref.pop('region_id')
self.post('/endpoints', body={'endpoint': ref})
def test_create_endpoint_with_empty_url(self):
"""Call ``POST /endpoints``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
ref["url"] = ''
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_get_endpoint(self):
"""Call ``GET /endpoints/{endpoint_id}``."""
r = self.get(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
self.assertValidEndpointResponse(r, self.endpoint)
def test_update_endpoint(self):
"""Call ``PATCH /endpoints/{endpoint_id}``."""
ref = self.new_endpoint_ref(service_id=self.service_id)
del ref['id']
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_update_endpoint_enabled_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': True}})
self.assertValidEndpointResponse(r, self.endpoint)
def test_update_endpoint_enabled_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': False}})
exp_endpoint = copy.copy(self.endpoint)
exp_endpoint['enabled'] = False
self.assertValidEndpointResponse(r, exp_endpoint)
def test_update_endpoint_enabled_str_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'True'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'False'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_random(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'kitties'}},
expected_status=http_client.BAD_REQUEST)
def test_delete_endpoint(self):
"""Call ``DELETE /endpoints/{endpoint_id}``."""
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
def test_create_endpoint_on_v2(self):
# clear the v3 endpoint so we only have endpoints created on v2
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
# create a v3 endpoint ref, and then tweak it back to a v2-style ref
ref = self.new_endpoint_ref(service_id=self.service['id'])
del ref['id']
del ref['interface']
ref['publicurl'] = ref.pop('url')
ref['internalurl'] = None
ref['region'] = ref['region_id']
del ref['region_id']
# don't set adminurl to ensure it's absence is handled like internalurl
# create the endpoint on v2 (using a v3 token)
r = self.admin_request(
method='POST',
path='/v2.0/endpoints',
token=self.get_scoped_token(),
body={'endpoint': ref})
endpoint_v2 = r.result['endpoint']
# test the endpoint on v3
r = self.get('/endpoints')
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(1, len(endpoints))
endpoint_v3 = endpoints.pop()
# these attributes are identical between both APIs
self.assertEqual(ref['region'], endpoint_v3['region_id'])
self.assertEqual(ref['service_id'], endpoint_v3['service_id'])
self.assertEqual(ref['description'], endpoint_v3['description'])
# a v2 endpoint is not quite the same concept as a v3 endpoint, so they
# receive different identifiers
self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
# v2 has a publicurl; v3 has a url + interface type
self.assertEqual(ref['publicurl'], endpoint_v3['url'])
self.assertEqual('public', endpoint_v3['interface'])
# tests for bug 1152632 -- these attributes were being returned by v3
self.assertNotIn('publicurl', endpoint_v3)
self.assertNotIn('adminurl', endpoint_v3)
self.assertNotIn('internalurl', endpoint_v3)
# test for bug 1152635 -- this attribute was being returned by v3
self.assertNotIn('legacy_endpoint_id', endpoint_v3)
self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
def test_deleting_endpoint_with_space_in_url(self):
# create a v3 endpoint ref
ref = self.new_endpoint_ref(service_id=self.service['id'])
# add a space to all urls (intentional "i d" to test bug)
url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
ref['publicurl'] = url_with_space
ref['internalurl'] = url_with_space
ref['adminurl'] = url_with_space
ref['url'] = url_with_space
# add the endpoint to the database
self.catalog_api.create_endpoint(ref['id'], ref)
# delete the endpoint
self.delete('/endpoints/%s' % ref['id'])
# make sure it's deleted (GET should return 404)
self.get('/endpoints/%s' % ref['id'],
expected_status=http_client.NOT_FOUND)
def test_endpoint_create_with_valid_url(self):
"""Create endpoint with valid url should be tested,too."""
# list one valid url is enough, no need to list too much
valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s'
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = valid_url
self.post('/endpoints', body={'endpoint': ref})
def test_endpoint_create_with_invalid_url(self):
"""Test the invalid cases: substitutions is not exactly right.
"""
invalid_urls = [
# using a substitution that is not whitelisted - KeyError
'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
# invalid formatting - ValueError
'http://127.0.0.1:8774/v1.1/$(tenant_id)',
'http://127.0.0.1:8774/v1.1/$(tenant_id)t',
'http://127.0.0.1:8774/v1.1/$(tenant_id',
# invalid type specifier - TypeError
# admin_url is a string not an int
'http://127.0.0.1:8774/v1.1/$(admin_url)d',
]
ref = self.new_endpoint_ref(self.service_id)
for invalid_url in invalid_urls:
ref['url'] = invalid_url
self.post('/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
class TestCatalogAPISQL(unit.TestCase):
"""Tests for the catalog Manager against the SQL backend.
"""
def setUp(self):
super(TestCatalogAPISQL, self).setUp()
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
self.service_id = uuid.uuid4().hex
service = {'id': self.service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(self.service_id, service)
endpoint = self.new_endpoint_ref(service_id=self.service_id)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
def config_overrides(self):
super(TestCatalogAPISQL, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'region': uuid.uuid4().hex,
}
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
# the only endpoint in the catalog is the one created in setUp
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# it's also the only endpoint in the backend
self.assertEqual(1, len(self.catalog_api.list_endpoints()))
# create a new, invalid endpoint - malformed type declaration
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(tenant_id)'
self.catalog_api.create_endpoint(ref['id'], ref)
# create a new, invalid endpoint - nonexistent key
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(you_wont_find_me)s'
self.catalog_api.create_endpoint(ref['id'], ref)
# verify that the invalid endpoints don't appear in the catalog
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertEqual(1, len(catalog[0]['endpoints']))
# all three appear in the backend
self.assertEqual(3, len(self.catalog_api.list_endpoints()))
# create another valid endpoint - tenant_id will be replaced
ref = self.new_endpoint_ref(self.service_id)
ref['url'] = 'http://keystone/%(tenant_id)s'
self.catalog_api.create_endpoint(ref['id'], ref)
# there are two valid endpoints, positive check
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
# If the URL has no 'tenant_id' to substitute, we will skip the
# endpoint which contains this kind of URL, negative check.
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id=None)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
def test_get_catalog_always_returns_service_name(self):
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
# create a service, with a name
named_svc = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
}
self.catalog_api.create_service(named_svc['id'], named_svc)
endpoint = self.new_endpoint_ref(service_id=named_svc['id'])
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
# create a service, with no name
unnamed_svc = {
'id': uuid.uuid4().hex,
'type': uuid.uuid4().hex
}
self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
endpoint = self.new_endpoint_ref(service_id=unnamed_svc['id'])
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
named_endpoint = [ep for ep in catalog
if ep['type'] == named_svc['type']][0]
self.assertEqual(named_svc['name'], named_endpoint['name'])
unnamed_endpoint = [ep for ep in catalog
if ep['type'] == unnamed_svc['type']][0]
self.assertEqual('', unnamed_endpoint['name'])
# TODO(dstanek): this needs refactoring with the test above, but we are in a
# crunch so that will happen in a future patch.
class TestCatalogAPISQLRegions(unit.TestCase):
"""Tests for the catalog Manager against the SQL backend.
"""
def setUp(self):
super(TestCatalogAPISQLRegions, self).setUp()
self.useFixture(database.Database())
self.catalog_api = catalog.Manager()
def config_overrides(self):
super(TestCatalogAPISQLRegions, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def new_endpoint_ref(self, service_id):
return {
'id': uuid.uuid4().hex,
'name': uuid.uuid4().hex,
'description': uuid.uuid4().hex,
'interface': uuid.uuid4().hex[:8],
'service_id': service_id,
'url': uuid.uuid4().hex,
'region_id': uuid.uuid4().hex,
}
def test_get_catalog_returns_proper_endpoints_with_no_region(self):
service_id = uuid.uuid4().hex
service = {'id': service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(service_id, service)
endpoint = self.new_endpoint_ref(service_id=service_id)
del endpoint['region_id']
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def test_get_catalog_returns_proper_endpoints_with_region(self):
service_id = uuid.uuid4().hex
service = {'id': service_id, 'name': uuid.uuid4().hex}
self.catalog_api.create_service(service_id, service)
endpoint = self.new_endpoint_ref(service_id=service_id)
self.catalog_api.create_region({'id': endpoint['region_id']})
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
endpoint = self.catalog_api.get_endpoint(endpoint['id'])
user_id = uuid.uuid4().hex
tenant_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id)
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def assertValidCatalogEndpoint(self, entity, ref=None):
keys = ['description', 'id', 'interface', 'name', 'region_id', 'url']
for k in keys:
self.assertEqual(ref.get(k), entity[k], k)
self.assertEqual(entity['region_id'], entity['region'])
|
bijandhakal/pattern
|
refs/heads/master
|
pattern/web/cache/__init__.py
|
21
|
#### PATTERN | CACHE ###############################################################################
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <tom@organisms.be>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
try:
import hashlib; md5=hashlib.md5
except:
import md5; md5=md5.new
#### UNICODE #######################################################################################
def decode_string(v, encoding="utf-8"):
""" Returns the given value as a Unicode string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, str):
for e in encoding:
try: return v.decode(*e)
except:
pass
return v
return unicode(v)
def encode_string(v, encoding="utf-8"):
""" Returns the given value as a Python byte string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, unicode):
for e in encoding:
try: return v.encode(*e)
except:
pass
return v
return str(v)
decode_utf8 = decode_string
encode_utf8 = encode_string
#### CACHE #########################################################################################
# Caching is implemented in URL.download(), which is used by all other downloaders.
import os
import glob
import tempfile
import codecs
import datetime
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
TMP = os.path.join(tempfile.gettempdir(), "pattern_web")
def date_now():
return datetime.datetime.today()
def date_modified(path):
return datetime.datetime.fromtimestamp(os.stat(path)[8])
class Cache(object):
def __init__(self, path=os.path.join(MODULE, "tmp")):
""" Cache with data stored as files with hashed filenames.
Content retrieved from URLs and search engines are stored in cache for performance.
The path where the cache is stored can be given. This way you can manage persistent
sets of downloaded data. If path=TMP, cached items are stored in a temporary folder.
"""
self.path = path
def _get_path(self):
return self._path
def _set_path(self, path):
if not os.path.isdir(path):
os.makedirs(path)
self._path = path
path = property(_get_path, _set_path)
def _hash(self, k):
k = encode_utf8(k) # MD5 works on Python byte strings.
return os.path.join(self.path, md5(k).hexdigest())
def __len__(self):
return len(glob.glob(os.path.join(self.path, "*")))
def __contains__(self, k):
return os.path.exists(self._hash(k))
def __getitem__(self, k):
return self.get(k)
def __setitem__(self, k, v):
f = open(self._hash(k), "wb")
f.write(codecs.BOM_UTF8)
f.write(encode_utf8(v))
f.close()
def __delitem__(self, k):
try: os.unlink(self._hash(k))
except OSError:
pass
def get(self, k, unicode=True):
""" Returns the data stored with the given id.
With unicode=True, returns a Unicode string.
"""
if k in self:
f = open(self._hash(k), "rb"); v=f.read().lstrip(codecs.BOM_UTF8)
f.close()
if unicode is True:
return decode_utf8(v)
else:
return v
raise KeyError(k)
def age(self, k):
""" Returns the age of the cached item, in days.
"""
p = self._hash(k)
return os.path.exists(p) and (date_now() - date_modified(p)).days or 0
def clear(self, age=None):
""" Clears all items from the cache (whose age is the given amount of days or older).
"""
n = date_now()
for p in glob.glob(os.path.join(self.path, "*")):
if age is None or (n - date_modified(p)).days >= age:
os.unlink(p)
cache = Cache()
|
berkeley-stat159/project-delta
|
refs/heads/master
|
code/utils/plot_tool.py
|
1
|
"""
This script contains tools that will be used to plot findings from statistical
analyses. Future Python scripts can take advantage of these utilities by
including the command
sys.path.append("code/utils")
from plot_tool import *
"""
from __future__ import division, print_function, absolute_import
import matplotlib.pyplot as plt
import nibabel as nib
import numpy as np
def plot_volume(data, volume=None, backdrop=0):
"""
Plots all horizontal slices of a fMRI volume.
Parameters
----------
data : np.ndarray
3- or 4-D array containing data imported from a .nii file
volume : int, optional
The index (with respect to time) of the volume of interest
backdrop : float, optional
Value that determines the color of the backdrop: typical will select
from 0 or np.nan
Return
-------
canvas : 2-D array
Canvas depicting BOLD signal intensities of a given brain volume,
organized left-to-right and top-to-bottom respectively in grid format
"""
# Check assertions
assert type(data) == np.ndarray, "data must be of type np.ndarray"
if data.ndim == 4:
assert volume != None and volume <= data.shape[3], "volume out of range"
data = data[..., volume]
elif data.ndim != 3:
raise AssertionError("incorrect number of dimensions")
# Extract data to be used for plotting
length, width, depth = data.shape
# Canvas is a grid: compute the number of slices to plot per side
side_length = int(np.ceil(np.sqrt(depth)))
canvas = np.empty((length * side_length, width * side_length))
canvas.fill(backdrop)
# Plot slices iteratively: depth_i is the ith slice with respect to depth
depth_i = 0
for row in range(side_length):
column = 0
while column < side_length and depth_i < depth:
canvas[length * row:length * (row + 1),
width * column:width * (column + 1)] = data[..., depth_i]
column += 1
depth_i += 1
return canvas
|
makerplane/pyEfis
|
refs/heads/master
|
pyefis/gui.py
|
2
|
# Copyright (c) 2016 Phil Birkelbach
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import importlib
import logging
import sys
from pyefis import hooks
from pyefis import hmi
screens = []
# This class is just a structure to hold information about a single
# screen that will be loaded.
class Screen(QObject):
screenShow = pyqtSignal()
screenHide = pyqtSignal()
def __init__(self, name, module, config):
super(Screen, self).__init__()
self.name = name
self.module = importlib.import_module(module)
self.config = config
# This would hold the instantiated Screen object from the module.
self.object = None
self.default = False
def show(self):
self.object.show()
self.screenShow.emit()
def hide(self):
self.object.hide()
self.screenHide.emit()
class Main(QMainWindow):
keyPress = pyqtSignal(QEvent)
keyRelease = pyqtSignal(QEvent)
windowShow = pyqtSignal(QEvent)
windowClose = pyqtSignal(QEvent)
#change_asd_mode = pyqtSignal(QEvent)
def __init__(self, config, parent=None):
super(Main, self).__init__(parent)
self.screenWidth = int(config["main"]["screenWidth"])
self.screenHeight = int(config["main"]["screenHeight"])
self.screenColor = config["main"]["screenColor"]
self.setObjectName("EFIS")
self.resize(self.screenWidth, self.screenHeight)
w = QWidget(self)
w.setGeometry(0, 0, self.screenWidth, self.screenHeight)
p = w.palette()
if self.screenColor:
p.setColor(w.backgroundRole(), QColor(self.screenColor))
w.setPalette(p)
w.setAutoFillBackground(True)
for idx, scr in enumerate(screens):
scr.object = scr.module.Screen(self)
log.debug("Loading Screen {0}".format(scr.name))
# TODO Figure out how to have different size screens
scr.object.resize(self.width(), self.height())
scr.object.move(0,0)
if scr.default:
scr.show()
self.running_screen = idx
else:
scr.hide()
def showScreen(self, scr):
found = None
if type(scr) == int:
if scr >= 0 and scr < len(screens):
found = scr
else:
for i, s in enumerate(screens):
if s.name == scr:
found = i
break
if found is not None:
if found != self.running_screen: # Make sure it's different.
screens[found].show()
screens[self.running_screen].hide()
self.running_screen = found
else:
raise KeyError("Screen {0} Not Found".format(scr))
def showNextScreen(self, s=""):
if self.running_screen == len(screens)-1:
self.showScreen(0)
else:
self.showScreen(self.running_screen + 1)
def showPrevScreen(self, s=""):
if self.running_screen == 0:
self.showScreen(len(screens)-1)
else:
self.showScreen(self.running_screen-1)
# We send signals for these events so everybody can play.
def showEvent(self, event):
self.windowShow.emit(event)
def closeEvent(self, event):
log.debug("Window Close event received")
self.windowClose.emit(event)
def keyPressEvent(self, event):
self.keyPress.emit(event)
def keyReleaseEvent(self, event):
self.keyRelease.emit(event)
# def change_asd_mode_event (self, event):
# self.change_asd_mode.emit(event)
def get_config_item(self, child, key):
for s in screens:
if s.object == child:
return s.config.get(key)
else:
return None
def setDefaultScreen(s):
found = False
if type(s) == int:
for i, scr in enumerate(screens):
if i == s:
found = True
scr.default = True
log.debug("setting screen {0} to default".format(s))
else:
scr.default = False
else:
for scr in screens:
if scr.name == s:
found = True
scr.default = True
log.debug("setting screen {0} to default".format(s))
else:
scr.default = False
return found
def initialize(config):
global mainWindow
global log
log = logging.getLogger(__name__)
log.info("Initializing Graphics")
# Load the Screens
for each in config['screens']:
module = config['screens'][each]["module"]
try:
name = each
screens.append(Screen(name, module, config['screens'][each]))
log.debug("Creating Screen {0}".format(name))
except Exception as e:
log.critical("Unable to load module - " + module + ": " + str(e))
raise
try:
d = config["main"]["defaultScreen"]
except KeyError:
d = 0
setDefaultScreen(d)
mainWindow = Main(config)
hmi.actions.showNextScreen.connect(mainWindow.showNextScreen)
hmi.actions.showPrevScreen.connect(mainWindow.showPrevScreen)
hmi.actions.showScreen.connect(mainWindow.showScreen)
if 'menu' in config:
menu = hmi.menu.Menu(mainWindow, config["menu"])
menu.start()
if 'FMS' in config:
sys.path.insert(0, config["FMS"]["module_dir"])
ui = importlib.import_module ("qtui")
uiwidget = ui.FMSUI(config["FMS"]["flight_plan_dir"], mainWindow)
ui_width = 1000
if 'ui_width' in config['FMS']:
ui_width = config['FMS']['ui_width']
uiwidget.resize (ui_width, 65)
uiwidget.move (30, 32)
menu.register_target ("FMS", uiwidget)
screen = bool(config["main"]["screenFullSize"])
if screen:
log.debug("Setting Screen to Full Size")
mainWindow.showFullScreen()
else:
mainWindow.width = int(config["main"]["screenWidth"])
mainWindow.height = int(config["main"]["screenHeight"])
mainWindow.show()
|
archf/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/facts/hardware/dragonfly.py
|
232
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.hardware.base import HardwareCollector
from ansible.module_utils.facts.hardware.freebsd import FreeBSDHardware
class DragonFlyHardwareCollector(HardwareCollector):
# Note: This uses the freebsd fact class, there is no dragonfly hardware fact class
_fact_class = FreeBSDHardware
_platform = 'DragonFly'
|
samthor/intellij-community
|
refs/heads/master
|
python/testData/completion/isInstanceTuple.py
|
83
|
class Foo:
def test(self): pass
class Foo2:
def test(self): pass
def x(p):
if isinstance(p, (Foo, Foo2)):
p.te<caret>
|
ahmadRagheb/goldenHR
|
refs/heads/master
|
erpnext/patches/v7_0/update_refdoc_in_landed_cost_voucher.py
|
54
|
from __future__ import unicode_literals
import frappe
def execute():
if "purchase_receipt" not in frappe.db.get_table_columns("Landed Cost Purchase Receipt"):
return
frappe.reload_doctype("Landed Cost Purchase Receipt")
frappe.db.sql("""
update `tabLanded Cost Purchase Receipt`
set receipt_document_type = 'Purchase Receipt', receipt_document = purchase_receipt
where (receipt_document is null or receipt_document = '')
and (purchase_receipt is not null and purchase_receipt != '')
""")
|
abhishekgahlot/inbox
|
refs/heads/master
|
tests/imap/conftest.py
|
2
|
""" Fixtures don't go here; see util/base.py and friends. """
# fixtures that are available by default
from tests.util.base import config, db, log, absolute_path
def pytest_generate_tests(metafunc):
if 'db' in metafunc.fixturenames:
dumpfile = absolute_path(config()['BASE_DUMP'])
savedb = False
metafunc.parametrize('db', [(dumpfile, savedb)], indirect=True)
def pytest_report_header():
return "\nIMPORTANT: Remember to run py.test -s test_filename.py, "\
"OAuth may need to be re-run!\n"
|
fangeugene/trajopt
|
refs/heads/master
|
src/sensorsim/test_sensorsim.py
|
8
|
import openravepy, sensorsimpy, trajoptpy
env = openravepy.Environment()
env.StopSimulation()
env.Load("robots/pr2-beta-static.zae")
viewer = trajoptpy.GetViewer(env)
viewer.Idle()
sensor = sensorsimpy.CreateFakeKinect(env)
sensor.SetPose([0,0,2], [0,0,1,0])
sensor.SetIntrinsics(525)
sensor.Update()
d = sensor.GetDepthImage()
|
vic3t3chn0/kernel_ubuntu_togari
|
refs/heads/vic3t3chn0_ubuntu_patch
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
mSenyor/sl4a
|
refs/heads/master
|
python/src/Tools/bgen/bgen/bgenGeneratorGroup.py
|
50
|
from bgenOutput import *
class GeneratorGroup:
def __init__(self, prefix):
self.prefix = prefix
self.generators = []
def add(self, g, dupcheck=0):
if dupcheck:
if g in self.generators:
print 'DUP', g.name
return
g.setprefix(self.prefix)
self.generators.append(g)
def generate(self):
for g in self.generators:
g.generate()
Output()
Output("static PyMethodDef %s_methods[] = {", self.prefix)
IndentLevel()
for g in self.generators:
g.reference()
Output("{NULL, NULL, 0}")
DedentLevel()
Output("};")
def _test():
void = None
from bgenGenerator import FunctionGenerator
group = GeneratorGroup("spam")
eggs = FunctionGenerator(void, "eggs")
group.add(eggs)
print "/* START */"
group.generate()
if __name__ == "__main__":
_test()
|
smartmob-project/smartmob-agent
|
refs/heads/master
|
tests/test_middleware.py
|
1
|
# -*- coding: utf-8 -*-
import asyncio
import aiohttp
import aiohttp.web
import logging
import pytest
import testfixtures
from aiohttp import web
from smartmob_agent import (
access_log_middleware,
echo_request_id,
inject_request_id,
)
from unittest import mock
class HTTPServer:
"""Run an aiohttp application as an asynchronous context manager."""
def __init__(self, app, host='0.0.0.0', port=80, loop=None):
self._app = app
self._loop = loop or asyncio.get_event_loop()
self._handler = app.make_handler()
self._server = None
self._host = host
self._port = port
async def __aenter__(self):
assert not self._server
self._server = await self._loop.create_server(
self._handler, self._host, self._port,
)
async def __aexit__(self, *args):
assert self._server
self._server.close()
await self._server.wait_closed()
await self._app.shutdown()
await self._handler.finish_connections(1.0)
await self._app.cleanup()
self._server = None
@pytest.mark.asyncio
async def test_access_log_success_200(event_loop, unused_tcp_port):
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
return aiohttp.web.Response(body=b'...')
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
async with client.get(index_url) as rep:
assert rep.status == 200
request_id = rep.headers['x-request-id']
assert request_id
body = await rep.read()
assert body == b'...'
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=200,
duration=1.0,
request=request_id,
**{'@timestamp': mock.ANY}
)
@pytest.mark.parametrize('status', [
201,
204,
302,
])
@pytest.mark.asyncio
async def test_access_log_success_other(status, event_loop, unused_tcp_port):
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
return aiohttp.web.Response(status=status, body=b'')
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
async with client.get(index_url, allow_redirects=False) as rep:
assert rep.status == status
request_id = rep.headers['x-request-id']
assert request_id
body = await rep.read()
assert body == b''
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=status,
duration=1.0,
request=request_id,
**{'@timestamp': mock.ANY}
)
@pytest.mark.parametrize('exc_class,expected_status', [
(web.HTTPBadRequest, 400),
(web.HTTPNotFound, 404),
(web.HTTPConflict, 409),
])
@pytest.mark.asyncio
async def test_access_log_failure_http_exception(exc_class, expected_status,
event_loop, unused_tcp_port):
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
raise exc_class(body=b'...')
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
async with client.get(index_url) as rep:
assert rep.status == expected_status
request_id = rep.headers['x-request-id']
assert request_id
body = await rep.read()
assert body == b'...'
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=expected_status,
duration=1.0,
request=request_id,
**{'@timestamp': mock.ANY}
)
@pytest.mark.parametrize('exc_class', [
ValueError,
OSError,
KeyError,
])
@pytest.mark.asyncio
async def test_access_log_failure_other_exception(exc_class, event_loop,
unused_tcp_port):
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
raise exc_class()
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
with testfixtures.LogCapture(level=logging.WARNING) as capture:
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
async with client.get(index_url) as rep:
assert rep.status == 500
# request_id = rep.headers['x-request-id']
# assert request_id
body = await rep.read()
assert body # HTML content.
capture.check(('aiohttp.web', 'ERROR', mock.ANY))
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=500,
duration=1.0,
request=mock.ANY, # aiohttp doesn't seem to execute our signal...
**{'@timestamp': mock.ANY}
)
@pytest.mark.asyncio
async def test_access_log_custom_request_id(event_loop, unused_tcp_port):
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
request_id = 'My very own request ID!'
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
return aiohttp.web.Response(body=b'...')
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
head = {
'X-Request-Id': request_id
}
async with client.get(index_url, headers=head) as rep:
assert rep.status == 200
assert rep.headers['x-request-id'] == request_id
body = await rep.read()
assert body == b'...'
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=200,
duration=1.0,
request=request_id,
**{'@timestamp': mock.ANY}
)
@pytest.mark.asyncio
async def test_access_log_no_signal(event_loop, unused_tcp_port):
"""Middleware doesn't cause side-effects when misused."""
event_log = mock.MagicMock()
clock = mock.MagicMock()
clock.side_effect = [0.0, 1.0]
app = aiohttp.web.Application(
loop=event_loop,
middlewares=[
inject_request_id,
access_log_middleware,
],
)
# Suppose someone forgot to register the signal handler.
# app.on_response_prepare.append(echo_request_id)
app['smartmob.event_log'] = event_log
app['smartmob.clock'] = clock
async def index(request):
return aiohttp.web.Response(body=b'...')
app.router.add_route('GET', '/', index)
# Given the server is running.
async with HTTPServer(app, '127.0.0.1', unused_tcp_port):
# When I access the index.
index_url = 'http://127.0.0.1:%d' % (unused_tcp_port,)
async with aiohttp.ClientSession(loop=event_loop) as client:
async with client.get(index_url) as rep:
assert rep.status == 200
assert 'x-request-id' not in rep.headers
body = await rep.read()
assert body == b'...'
# Then the request is logged in the access log.
event_log.info.assert_called_once_with(
'http.access',
path='/',
outcome=200,
duration=1.0,
request=mock.ANY, # Not echoed in response, so value doesn't matter.
**{'@timestamp': mock.ANY}
)
|
qstokkink/py-ipv8
|
refs/heads/master
|
ipv8/test/attestation/wallet/bonehexact/test_attestation.py
|
1
|
from binascii import unhexlify
import asynctest
from .....attestation.wallet.bonehexact.attestation import (attest, binary_relativity, binary_relativity_certainty,
binary_relativity_match, create_challenge,
create_challenge_response, create_empty_relativity_map,
decode, generate_modular_additive_inverse,
process_challenge_response)
from .....attestation.wallet.primitives.structs import BonehPrivateKey
class TestAttestation(asynctest.TestCase):
private_key = BonehPrivateKey.unserialize(unhexlify('01011d01011401011101011c01011c01010f010103'))
def test_generate_minverse_group(self):
"""
Check if additive inverse group generation modulo (p + 1) is correct.
"""
p = 12253454
n = 20
group = generate_modular_additive_inverse(p, n)
self.assertEqual(20, len(group))
self.assertEqual(0, sum(group) % (p + 1))
def test_attest(self):
"""
Check if Attestations can be created correctly.
"""
attestations = [
attest(self.private_key, 0, 2),
attest(self.private_key, 1, 2),
attest(self.private_key, 2, 2),
attest(self.private_key, 3, 2)
]
self.assertListEqual([0, 1, 1, 2],
[decode(self.private_key, [0, 1, 2, 3], a.bitpairs[0].compress()) for a in attestations])
def test_empty_relativity_map(self):
"""
Check if a new relativity map is empty.
"""
relativity_map = create_empty_relativity_map()
self.assertSetEqual({0, 1, 2, 3}, set(relativity_map.keys()))
self.assertEqual(0, sum(relativity_map.values()))
def test_binary_relativity(self):
"""
Check if the binary relativity of several numbers is calculated correctly.
"""
values = [
({0: 0, 1: 1, 2: 1, 3: 0}, 7, 4), # 0111
({0: 0, 1: 1, 2: 2, 3: 0}, 55, 6), # 110111
({0: 1, 1: 1, 2: 1, 3: 0}, 199, 6) # 11000111
]
for expected, value, bitspace in values:
self.assertDictEqual(expected, binary_relativity(value, bitspace))
def test_binary_relativity_match(self):
"""
Check if matching percentages between maps are relatively correct.
"""
a = {0: 0, 1: 1, 2: 1, 3: 0}
b = {0: 0, 1: 1, 2: 2, 3: 0}
c = {0: 1, 1: 1, 2: 1, 3: 0}
self.assertLess(0, binary_relativity_match(b, a))
self.assertEqual(0, binary_relativity_match(a, b))
self.assertLess(0, binary_relativity_match(c, a))
self.assertEqual(0, binary_relativity_match(a, c))
self.assertEqual(0, binary_relativity_match(b, c))
self.assertEqual(0, binary_relativity_match(c, b))
def test_binary_relativity_certainty(self):
"""
Check if matching certainties between maps are relatively correct.
"""
a = {0: 0, 1: 1, 2: 1, 3: 0}
b = {0: 0, 1: 1, 2: 2, 3: 0}
# (1 * 1 * .5 * 1)*(1 - .25) = .5 * .75 = .375
self.assertEqual(0.375, binary_relativity_certainty(b, a))
def test_create_challenge(self):
"""
Check if challenges can be created and are properly responded to.
"""
pk = self.private_key.public_key()
challenges = [
create_challenge(pk, attest(pk, 0, 2).bitpairs[0]),
create_challenge(pk, attest(pk, 1, 2).bitpairs[0]),
create_challenge(pk, attest(pk, 2, 2).bitpairs[0]),
create_challenge(pk, attest(pk, 3, 2).bitpairs[0])
]
self.assertListEqual([0, 1, 1, 2], [create_challenge_response(self.private_key, c) for c in challenges])
def test_process_challenge_response(self):
"""
Check if a map is properly updates when a challenge response comes in.
"""
a = {0: 0, 1: 1, 2: 1, 3: 0}
b = {0: 0, 1: 1, 2: 2, 3: 0}
process_challenge_response(a, 2)
self.assertDictEqual(a, b)
|
Laurawly/tvm-1
|
refs/heads/master
|
tests/python/relay/test_pass_simplify_inference.py
|
4
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from tvm.ir import IRModule, structural_equal
from tvm import relay as rly
from tvm.relay.transform import SimplifyInference, InferType
def test_simplify_batchnorm(dtype="float32"):
def simple_bn(x, gamma, beta, moving_mean, moving_var, axis=1, epsilon=1e-5, shape=None):
# expect = (x - moving_mean) / sqrt(moving_var + eps) * gamma + beta
scale = rly.multiply(
rly.const(1, dtype) / rly.sqrt(moving_var + rly.const(epsilon, dtype)), gamma
)
shift = rly.add(rly.multiply(rly.negative(moving_mean), scale), beta)
num_newaxis = len(shape) - (axis + 1)
if num_newaxis:
scale = rly.expand_dims(scale, axis=1, num_newaxis=num_newaxis)
shift = rly.expand_dims(shift, axis=1, num_newaxis=num_newaxis)
return x * scale + shift
def check(dim, axis, nstep):
eps = 0.01
ttype1 = rly.TensorType(tuple(10 for i in range(dim)), dtype)
ttype2 = rly.TensorType((10,), dtype)
x = rly.var("x", ttype1)
beta = rly.var("beta", ttype2)
gamma = rly.var("gamma", ttype2)
moving_var = rly.var("moving_var", ttype2)
moving_mean = rly.var("moving_mean", ttype2)
y1, y2 = x, x
for _ in range(nstep):
y1, _, _ = rly.nn.batch_norm(
y1 + rly.const(1, dtype),
gamma,
beta,
moving_mean,
moving_var,
epsilon=eps,
axis=axis,
)
y1 = rly.nn.dropout(y1)
y2 = simple_bn(
y2 + rly.const(1, dtype),
gamma,
beta,
moving_mean,
moving_var,
epsilon=eps,
axis=axis,
shape=ttype1.shape,
)
mod = IRModule.from_expr(y1)
simplify = SimplifyInference()
mod = InferType()(mod)
mod = simplify(mod)
y1 = mod["main"].body
assert structural_equal(y1, y2, map_free_vars=True)
check(2, 1, 1)
check(4, 1, 1)
check(4, 0, 3)
if __name__ == "__main__":
test_simplify_batchnorm(dtype="float32")
test_simplify_batchnorm(dtype="float16")
|
boberfly/gaffer
|
refs/heads/master
|
python/GafferCortexUITest/CompoundParameterValueWidgetTest.py
|
11
|
##########################################################################
#
# Copyright (c) 2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import weakref
import GafferUI
import GafferUITest
import GafferCortex
import GafferCortexTest
import GafferCortexUI
class CompoundParameterValueWidgetTest( GafferUITest.TestCase ) :
def testLifetime( self ) :
n = GafferCortex.OpHolder()
opSpec = GafferCortexTest.ParameterisedHolderTest.classSpecification( "files/sequenceRenumber", "IECORE_OP_PATHS" )[:-1]
n.setOp( *opSpec )
ui = GafferCortexUI.CompoundParameterValueWidget( n.parameterHandler() )
w = weakref.ref( ui )
del ui
self.assertEqual( w(), None )
if __name__ == "__main__":
unittest.main()
|
citrix-openstack/build-python-troveclient
|
refs/heads/ctx-nova-network-smoke-latest
|
troveclient/client.py
|
2
|
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib2
import logging
import os
import time
import urlparse
import sys
try:
import json
except ImportError:
import simplejson as json
# Python 2.5 compat fix
if not hasattr(urlparse, 'parse_qsl'):
import cgi
urlparse.parse_qsl = cgi.parse_qsl
from troveclient import auth
from troveclient import exceptions
_logger = logging.getLogger(__name__)
RDC_PP = os.environ.get("RDC_PP", "False") == "True"
expected_errors = (400, 401, 403, 404, 408, 409, 413, 422, 500, 501)
def log_to_streamhandler(stream=None):
stream = stream or sys.stderr
ch = logging.StreamHandler(stream)
_logger.setLevel(logging.DEBUG)
_logger.addHandler(ch)
if 'REDDWARFCLIENT_DEBUG' in os.environ and os.environ['REDDWARFCLIENT_DEBUG']:
log_to_streamhandler()
class TroveHTTPClient(httplib2.Http):
USER_AGENT = 'python-troveclient'
def __init__(self, user, password, tenant, auth_url, service_name,
service_url=None,
auth_strategy=None, insecure=False,
timeout=None, proxy_tenant_id=None,
proxy_token=None, region_name=None,
endpoint_type='publicURL', service_type=None,
timings=False):
super(TroveHTTPClient, self).__init__(timeout=timeout)
self.username = user
self.password = password
self.tenant = tenant
if auth_url:
self.auth_url = auth_url.rstrip('/')
else:
self.auth_url = None
self.region_name = region_name
self.endpoint_type = endpoint_type
self.service_url = service_url
self.service_type = service_type
self.service_name = service_name
self.timings = timings
self.times = [] # [("item", starttime, endtime), ...]
self.auth_token = None
self.proxy_token = proxy_token
self.proxy_tenant_id = proxy_tenant_id
# httplib2 overrides
self.force_exception_to_status_code = True
self.disable_ssl_certificate_validation = insecure
auth_cls = auth.get_authenticator_cls(auth_strategy)
self.authenticator = auth_cls(self, auth_strategy,
self.auth_url, self.username,
self.password, self.tenant,
region=region_name,
service_type=service_type,
service_name=service_name,
service_url=service_url)
def get_timings(self):
return self.times
def http_log(self, args, kwargs, resp, body):
if not RDC_PP:
self.simple_log(args, kwargs, resp, body)
else:
self.pretty_log(args, kwargs, resp, body)
def simple_log(self, args, kwargs, resp, body):
if not _logger.isEnabledFor(logging.DEBUG):
return
string_parts = ['curl -i']
for element in args:
if element in ('GET', 'POST'):
string_parts.append(' -X %s' % element)
else:
string_parts.append(' %s' % element)
for element in kwargs['headers']:
header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
string_parts.append(header)
_logger.debug("REQ: %s\n" % "".join(string_parts))
if 'body' in kwargs:
_logger.debug("REQ BODY: %s\n" % (kwargs['body']))
_logger.debug("RESP:%s %s\n", resp, body)
def pretty_log(self, args, kwargs, resp, body):
if not _logger.isEnabledFor(logging.DEBUG):
return
string_parts = ['curl -i']
for element in args:
if element in ('GET', 'POST'):
string_parts.append(' -X %s' % element)
else:
string_parts.append(' %s' % element)
for element in kwargs['headers']:
header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
string_parts.append(header)
curl_cmd = "".join(string_parts)
_logger.debug("REQUEST:")
if 'body' in kwargs:
_logger.debug("%s -d '%s'" % (curl_cmd, kwargs['body']))
try:
req_body = json.dumps(json.loads(kwargs['body']),
sort_keys=True, indent=4)
except:
req_body = kwargs['body']
_logger.debug("BODY: %s\n" % (req_body))
else:
_logger.debug(curl_cmd)
try:
resp_body = json.dumps(json.loads(body), sort_keys=True, indent=4)
except:
resp_body = body
_logger.debug("RESPONSE HEADERS: %s" % resp)
_logger.debug("RESPONSE BODY : %s" % resp_body)
def request(self, *args, **kwargs):
kwargs.setdefault('headers', kwargs.get('headers', {}))
kwargs['headers']['User-Agent'] = self.USER_AGENT
self.morph_request(kwargs)
resp, body = super(TroveHTTPClient, self).request(*args, **kwargs)
# Save this in case anyone wants it.
self.last_response = (resp, body)
self.http_log(args, kwargs, resp, body)
if body:
try:
body = self.morph_response_body(body)
except exceptions.ResponseFormatError:
# Acceptable only if the response status is an error code.
# Otherwise its the API or client misbehaving.
self.raise_error_from_status(resp, None)
raise # Not accepted!
else:
body = None
if resp.status in expected_errors:
raise exceptions.from_response(resp, body)
return resp, body
def raise_error_from_status(self, resp, body):
if resp.status in expected_errors:
raise exceptions.from_response(resp, body)
def morph_request(self, kwargs):
kwargs['headers']['Accept'] = 'application/json'
kwargs['headers']['Content-Type'] = 'application/json'
if 'body' in kwargs:
kwargs['body'] = json.dumps(kwargs['body'])
def morph_response_body(self, body_string):
try:
return json.loads(body_string)
except ValueError:
raise exceptions.ResponseFormatError()
def _time_request(self, url, method, **kwargs):
start_time = time.time()
resp, body = self.request(url, method, **kwargs)
self.times.append(("%s %s" % (method, url),
start_time, time.time()))
return resp, body
def _cs_request(self, url, method, **kwargs):
def request():
kwargs.setdefault('headers', {})['X-Auth-Token'] = self.auth_token
if self.tenant:
kwargs['headers']['X-Auth-Project-Id'] = self.tenant
resp, body = self._time_request(self.service_url + url, method,
**kwargs)
return resp, body
if not self.auth_token or not self.service_url:
self.authenticate()
# Perform the request once. If we get a 401 back then it
# might be because the auth token expired, so try to
# re-authenticate and try again. If it still fails, bail.
try:
return request()
except exceptions.Unauthorized, ex:
self.authenticate()
return request()
def get(self, url, **kwargs):
return self._cs_request(url, 'GET', **kwargs)
def post(self, url, **kwargs):
return self._cs_request(url, 'POST', **kwargs)
def put(self, url, **kwargs):
return self._cs_request(url, 'PUT', **kwargs)
def delete(self, url, **kwargs):
return self._cs_request(url, 'DELETE', **kwargs)
def authenticate(self):
"""Auths the client and gets a token. May optionally set a service url.
The client will get auth errors until the authentication step
occurs. Additionally, if a service_url was not explicitly given in
the clients __init__ method, one will be obtained from the auth
service.
"""
catalog = self.authenticator.authenticate()
if self.service_url:
possible_service_url = None
else:
if self.endpoint_type == "publicURL":
possible_service_url = catalog.get_public_url()
elif self.endpoint_type == "adminURL":
possible_service_url = catalog.get_management_url()
self.authenticate_with_token(catalog.get_token(), possible_service_url)
def authenticate_with_token(self, token, service_url=None):
self.auth_token = token
if not self.service_url:
if not service_url:
raise exceptions.ServiceUrlNotGiven()
else:
self.service_url = service_url
class Dbaas(object):
"""
Top-level object to access the Rackspace Database as a Service API.
Create an instance with your creds::
>>> red = Dbaas(USERNAME, API_KEY, TENANT, AUTH_URL, SERVICE_NAME, \
SERVICE_URL)
Then call methods on its managers::
>>> red.instances.list()
...
>>> red.flavors.list()
...
&c.
"""
def __init__(self, username, api_key, tenant=None, auth_url=None,
service_type='database', service_name='trove',
service_url=None, insecure=False, auth_strategy='keystone',
region_name=None, client_cls=TroveHTTPClient):
from troveclient.versions import Versions
from troveclient.databases import Databases
from troveclient.flavors import Flavors
from troveclient.instances import Instances
from troveclient.limits import Limits
from troveclient.users import Users
from troveclient.root import Root
from troveclient.hosts import Hosts
from troveclient.quota import Quotas
from troveclient.backups import Backups
from troveclient.security_groups import SecurityGroups
from troveclient.security_groups import SecurityGroupRules
from troveclient.storage import StorageInfo
from troveclient.management import Management
from troveclient.management import MgmtFlavors
from troveclient.accounts import Accounts
from troveclient.diagnostics import DiagnosticsInterrogator
from troveclient.diagnostics import HwInfoInterrogator
self.client = client_cls(username, api_key, tenant, auth_url,
service_type=service_type,
service_name=service_name,
service_url=service_url,
insecure=insecure,
auth_strategy=auth_strategy,
region_name=region_name)
self.versions = Versions(self)
self.databases = Databases(self)
self.flavors = Flavors(self)
self.instances = Instances(self)
self.limits = Limits(self)
self.users = Users(self)
self.root = Root(self)
self.hosts = Hosts(self)
self.quota = Quotas(self)
self.backups = Backups(self)
self.security_groups = SecurityGroups(self)
self.security_group_rules = SecurityGroupRules(self)
self.storage = StorageInfo(self)
self.management = Management(self)
self.mgmt_flavor = MgmtFlavors(self)
self.accounts = Accounts(self)
self.diagnostics = DiagnosticsInterrogator(self)
self.hwinfo = HwInfoInterrogator(self)
class Mgmt(object):
def __init__(self, dbaas):
self.instances = dbaas.management
self.hosts = dbaas.hosts
self.accounts = dbaas.accounts
self.storage = dbaas.storage
self.mgmt = Mgmt(self)
def set_management_url(self, url):
self.client.management_url = url
def get_timings(self):
return self.client.get_timings()
def authenticate(self):
"""
Authenticate against the server.
This is called to perform an authentication to retrieve a token.
Returns on success; raises :exc:`exceptions.Unauthorized` if the
credentials are wrong.
"""
self.client.authenticate()
|
zenefits/sentry
|
refs/heads/master
|
tests/sentry/rules/conditions/test_event_attribute.py
|
6
|
from __future__ import absolute_import
from sentry.testutils.cases import RuleTestCase
from sentry.rules.conditions.event_attribute import (
EventAttributeCondition, MatchType
)
class EventAttributeConditionTest(RuleTestCase):
rule_cls = EventAttributeCondition
def get_event(self):
event = self.create_event(
message='hello world',
platform='php',
data={
'type': 'error',
'sentry.interfaces.Http': {
'method': 'GET',
'url': 'http://example.com',
},
'sentry.interfaces.User': {
'id': '1',
'ip_address': '127.0.0.1',
'email': 'foo@example.com',
'username': 'foo',
},
'sentry.interfaces.Exception': {
'values': [
{
'type': 'SyntaxError',
'value': 'hello world',
'stacktrace': {
'frames': [
{
'filename': 'example.php',
'module': 'example',
'context_line': 'echo "hello";',
}
]
}
},
],
},
'tags': [('environment', 'production')],
'extra': {
'foo': {
'bar': 'baz',
},
'biz': ['baz'],
'bar': 'foo',
}
},
)
return event
def test_render_label(self):
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': u'\xc3',
'value': u'\xc4',
})
assert rule.render_label() == u'An event\'s \xc3 value equals \xc4'
def test_equals(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'platform',
'value': 'php',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'platform',
'value': 'python',
})
self.assertDoesNotPass(rule, event)
def test_does_not_equal(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.NOT_EQUAL,
'attribute': 'platform',
'value': 'php',
})
self.assertDoesNotPass(rule, event)
rule = self.get_rule({
'match': MatchType.NOT_EQUAL,
'attribute': 'platform',
'value': 'python',
})
self.assertPasses(rule, event)
def test_starts_with(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.STARTS_WITH,
'attribute': 'platform',
'value': 'ph',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.STARTS_WITH,
'attribute': 'platform',
'value': 'py',
})
self.assertDoesNotPass(rule, event)
def test_ends_with(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.ENDS_WITH,
'attribute': 'platform',
'value': 'hp',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.ENDS_WITH,
'attribute': 'platform',
'value': 'thon',
})
self.assertDoesNotPass(rule, event)
def test_contains(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.CONTAINS,
'attribute': 'platform',
'value': 'p',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.CONTAINS,
'attribute': 'platform',
'value': 'z',
})
self.assertDoesNotPass(rule, event)
def test_does_not_contain(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.NOT_CONTAINS,
'attribute': 'platform',
'value': 'p',
})
self.assertDoesNotPass(rule, event)
rule = self.get_rule({
'match': MatchType.NOT_CONTAINS,
'attribute': 'platform',
'value': 'z',
})
self.assertPasses(rule, event)
def test_message(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'message',
'value': 'hello world',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'message',
'value': 'php',
})
self.assertDoesNotPass(rule, event)
def test_environment(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'environment',
'value': 'production',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'environment',
'value': 'staging',
})
self.assertDoesNotPass(rule, event)
def test_http_method(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'http.method',
'value': 'get',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'http.method',
'value': 'post',
})
self.assertDoesNotPass(rule, event)
def test_http_url(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'http.url',
'value': 'http://example.com',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'http.url',
'value': 'http://foo.com',
})
self.assertDoesNotPass(rule, event)
def test_user_id(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.id',
'value': '1',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.id',
'value': '2',
})
self.assertDoesNotPass(rule, event)
def test_user_ip_address(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.ip_address',
'value': '127.0.0.1',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.ip_address',
'value': '2',
})
self.assertDoesNotPass(rule, event)
def test_user_email(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.email',
'value': 'foo@example.com',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.email',
'value': '2',
})
self.assertDoesNotPass(rule, event)
def test_user_username(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.username',
'value': 'foo',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'user.username',
'value': '2',
})
self.assertDoesNotPass(rule, event)
def test_exception_type(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'exception.type',
'value': 'SyntaxError',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'exception.type',
'value': 'TypeError',
})
self.assertDoesNotPass(rule, event)
def test_exception_value(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'exception.value',
'value': 'hello world',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'exception.value',
'value': 'foo bar',
})
self.assertDoesNotPass(rule, event)
def test_stacktrace_filename(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.filename',
'value': 'example.php',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.filename',
'value': 'foo.php',
})
self.assertDoesNotPass(rule, event)
def test_stacktrace_module(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.module',
'value': 'example',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.module',
'value': 'foo',
})
self.assertDoesNotPass(rule, event)
def test_stacktrace_code(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.code',
'value': 'echo "hello";',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'stacktrace.code',
'value': 'foo',
})
self.assertDoesNotPass(rule, event)
def test_extra_simple_value(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.bar',
'value': 'foo',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.bar',
'value': 'bar',
})
self.assertDoesNotPass(rule, event)
def test_extra_nested_value(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.foo.bar',
'value': 'baz',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.foo.bar',
'value': 'bar',
})
self.assertDoesNotPass(rule, event)
def test_extra_nested_list(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.biz',
'value': 'baz',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'extra.biz',
'value': 'bar',
})
self.assertDoesNotPass(rule, event)
def test_event_type(self):
event = self.get_event()
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'type',
'value': 'error',
})
self.assertPasses(rule, event)
rule = self.get_rule({
'match': MatchType.EQUAL,
'attribute': 'type',
'value': 'csp',
})
self.assertDoesNotPass(rule, event)
|
mkolar/pyblish-kredenc
|
refs/heads/master
|
pyblish_kredenc/plugins/archive/select_all.py
|
2
|
import pyblish.api
@pyblish.api.log
class SelectAll(pyblish.api.Selector):
"""
"""
hosts = ['maya', 'modo']
version = (0, 1, 0)
def process_context(self, context):
"""
"""
instance = context.create_instance(name='all')
instance.set_data('family', value='all')
|
AMechler/AliPhysics
|
refs/heads/master
|
PWGJE/EMCALJetTasks/Tracks/analysis/base/FileHandler.py
|
41
|
#**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
from ROOT import TFile,TIter,TObject,gDirectory,gROOT
from PWGJE.EMCALJetTasks.Tracks.analysis.base.DataSet import DataSet
from PWGJE.EMCALJetTasks.Tracks.analysis.base.FileResults import ResultData
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.ParticleTHnSparse import ParticleTHnSparse
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.DataContainerFactory import DataContainerFactory
class FileReader(object):
class FileReaderException(Exception):
"""
Exception class handling root files which are
either not found or not readable.
"""
def __init__(self, filename):
"""
Constructor, assigning the name of the file which
failed to be read.
"""
self.filename = filename
def __str__(self):
"""
Create string representation of the error message
"""
return "Could not open file %s" %(self.filename)
def __init__(self, filename, isMC = False):
"""
Initialise file reader with name of the file to read
"""
self.__filename = filename
self.__directory = None
self.__isMC = isMC
self.__isReadWeights = False
self.__weightlist = None
self.__datafactory = DataContainerFactory("new")
self._histlist = "histosPtEMCalTriggerHistograms"
self._trackCutsTag = ""
def SetHistList(self, histlist):
self._histlist = histlist
def SetTrackCuts(self, tc):
self._trackCutsTag = tc
def SetReadWeights(self):
"""
Read also histograms for the weight calculation
"""
self.__isReadWeights = True
def GetDataFormat(self):
return "old" if self._histlist == "histosPtEMCalTriggerHistograms" else "new"
def GetWeightHistograms(self):
"""
Access to weight histograms
"""
return self.__weightlist
def SetDirectory(self, dirname):
"""
Set directory inside the rootfile
"""
self.__directory = dirname
def ReadFile(self):
"""
Read rootfile and create a ResultData structure with all the histograms sorted according
to trigger classes. Raising FileReaderExceptions if the file can't be opened, doesn't contain
the directory or list, or has an empty histogram list
"""
filecontent = self.__ReadHistList()
if self.__isReadWeights:
self.__weightlist = filecontent["weights"]
hlist = filecontent["spectra"]
if not hlist.GetEntries():
raise self.FileReaderException("Empty list of histograms in file %s" %(self.__filename))
result = ResultData("result")
# build list of histograms and extract trigger names
histnames = []
for oID in range(0, hlist.GetEntries()):
histnames.append(hlist.At(oID).GetName())
triggers = []
for hname in histnames:
if not "hEventHist" in hname:
continue
triggers.append(hname.replace("hEventHist",""))
print "Found the following triggers:"
print "================================="
isFirst = True
trgstring = ""
for trg in triggers:
if isFirst:
trgstring += trg
isFirst = False
else:
trgstring += ", %s" %(trg)
print trgstring
self.__datafactory.SetDataFormat(self.GetDataFormat())
# Handle MC-truth data
if self.__isMC:
result.SetMCTruth(self.__datafactory.CreateParticleContainer(hlist.FindObject("hMCtrueParticles")))
# Add the result hists to the result container
for trigger in triggers:
eventhist = hlist.FindObject("hEventHist%s" %(trigger))
trackhist = hlist.FindObject("hTrackHist%s" %(trigger))
#eventhist.Sumw2()
#trackhist.Sumw2()
triggerdata = DataSet()
triggerdata.AddEventHistForJets(eventhist)
triggerdata.AddTrackContainer("tracksAll", self.__datafactory.CreateTrackContainer(eventhist, trackhist))
tracksWithClusters = hlist.FindObject("hTrackInAcceptanceHist%s" %(trigger))
if tracksWithClusters:
triggerdata.AddTrackContainer("tracksWithClusters", self.__datafactory.CreateTrackContainer(eventhist, tracksWithClusters))
tracksMCKine = hlist.FindObject("hMCTrackHist%s" %(trigger))
if tracksMCKine:
triggerdata.AddTrackContainer("tracksMCKineAll", self.__datafactory.CreateTrackContainer(eventhist, tracksMCKine))
tracksMCKineWithClusters = hlist.FindObject("hMCTrackInAcceptanceHist%s" %(trigger))
if tracksMCKineWithClusters:
triggerdata.AddTrackContainer("tracksMCKineWithClusters", self.__datafactory.CreateTrackContainer(eventhist, tracksMCKineWithClusters))
clusterhists = ["hClusterCalibHist","hClusterUncalibHist"]
for clust in clusterhists:
clhist = hlist.FindObject("%s%s" %(clust, trigger))
if clhist:
tag = clust.replace("hCluster","").replace("Hist","")
#clhist.Sumw2()
triggerdata.AddClusterContainer(tag, self.__datafactory.CreateClusterContainer(eventhist, clhist))
self.ProcessJets(trigger, triggerdata, hlist)
result.SetData(trigger, triggerdata)
return result
def ProcessJets(self, triggerclass, dataset, histlist):
"""
Fill jet hists to the histogram container
1. find all histograms for the given trigger class that contain the trigger class name and Jet
2. Group them according to jet pt and histogram type
"""
histiter = TIter(histlist)
histfound = histiter.Next()
histlist = []
while histfound:
histname = str(histfound.GetName())
if triggerclass in histname and "TrackJetHist" in histname:
histlist.append(histfound)
histfound = histiter.Next()
for jethist in histlist:
histname = str(jethist.GetName())
jetpt = self.__GetJetPt(histname)
dataset.AddJetSpectrum(jethist,jetpt, True if "hMC" in histname else False)
def __GetJetPt(self, histname):
start = histname.index("jetPt") + 5
ptstring = histname[start:start + 3]
return int(ptstring)
def __ReadHistList(self):
"""
Read the list of histograms from a given rootfile
optionally the list can be wihtin a directory (i.e. when analysing lego train output)
"""
result = {"spectra":None, "weights":None}
inputfile = TFile.Open(self.__filename)
if not inputfile or inputfile.IsZombie():
raise self.FileReaderException(self.__filename)
mydirectory = None
path = self.__filename
if self.__directory:
path += "#%s" %(self.__directory)
if not inputfile.cd(self.__directory):
inputfile.Close()
raise self.FileReaderException(path)
else:
mydirectory = gDirectory
else:
mydirectory = inputfile
path += "#"
path += "#"
rlist = mydirectory.Get("results") # old structure
if not rlist:
rlist = mydirectory.Get("TriggerTracksResults%s" %(self._trackCutsTag))
if self.__isReadWeights:
result["weights"] = {"crosssection":rlist.FindObject("fHistXsection"), "trials":rlist.FindObject("fHistTrials")}
hlist = rlist.FindObject(self._histlist)
inputfile.Close()
if not hlist:
raise self.FileReaderException("%s/%s" %(path, self._histlist))
result["spectra"] = hlist
return result
class LegoTrainFileReader(FileReader):
"""
File reader adapted to the file format in the lego train
"""
def __init__(self, filename, trackCuts = "standard", isMC = False, isNew = True):
"""
Initialise file reader with filename and set the directory according to the definition in
the lego train
"""
FileReader.__init__(self, filename, isMC)
self.SetDirectory("PtEMCalTriggerTask%s" %(trackCuts))
self.SetTrackCuts(trackCuts)
if isNew:
self.SetHistList("histosptemcaltriggertask%s" %(trackCuts))
class ResultStructureReader(object):
def __init__(self, filename):
self.__filename = filename
def ReadFile(self):
return ResultData.BuildFromRootFile(self.__filename, "read")
def TestFileReader(filename):
"""
Test procedure for the lego train file reader
"""
testreader = LegoTrainFileReader(filename)
return testreader.ReadFile()
|
tarzasai/Flexget
|
refs/heads/develop
|
flexget/plugins/input/tail.py
|
3
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import io
import os
import re
import logging
from sqlalchemy import Column, Integer, Unicode
from flexget import options, plugin
from flexget.db_schema import versioned_base
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
log = logging.getLogger('tail')
Base = versioned_base('tail', 0)
class TailPosition(Base):
__tablename__ = 'tail'
id = Column(Integer, primary_key=True)
task = Column(Unicode)
filename = Column(Unicode)
position = Column(Integer)
class InputTail(object):
"""
Parse any text for entries using regular expression.
::
file: <file>
entry:
<field>: <regexp to match value>
format:
<field>: <python string formatting>
Note: each entry must have at least two fields, title and url
You may wish to specify encoding used by file so file can be properly
decoded. List of encodings
at http://docs.python.org/library/codecs.html#standard-encodings.
Example::
tail:
file: ~/irclogs/some/log
entry:
title: 'TITLE: (.*) URL:'
url: 'URL: (.*)'
encoding: utf8
"""
schema = {
'type': 'object',
'properties': {
'file': {'type': 'string', 'format': 'file'},
'encoding': {'type': 'string'},
'entry': {
'type': 'object',
'properties': {
'url': {'type': 'string', 'format': 'regex'},
'title': {'type': 'string', 'format': 'regex'}
},
'required': ['url', 'title']
},
'format': {
'type': 'object',
'additionalProperties': {'type': 'string'}
}
},
'required': ['file', 'entry'],
'additionalProperties': False
}
def format_entry(self, entry, d):
for k, v in d.items():
entry[k] = v % entry
def on_task_input(self, task, config):
# Let details plugin know that it is ok if this task doesn't produce any entries
task.no_entries_ok = True
filename = os.path.expanduser(config['file'])
encoding = config.get('encoding', 'utf-8')
with Session() as session:
db_pos = (session.query(TailPosition).
filter(TailPosition.task == task.name).filter(TailPosition.filename == filename).first())
if db_pos:
last_pos = db_pos.position
else:
last_pos = 0
with io.open(filename, 'r', encoding=encoding, errors='replace') as file:
if task.options.tail_reset == filename or task.options.tail_reset == task.name:
if last_pos == 0:
log.info('Task %s tail position is already zero' % task.name)
else:
log.info('Task %s tail position (%s) reset to zero' % (task.name, last_pos))
last_pos = 0
if os.path.getsize(filename) < last_pos:
log.info('File size is smaller than in previous execution, resetting to beginning of the file')
last_pos = 0
file.seek(last_pos)
log.debug('continuing from last position %s' % last_pos)
entry_config = config.get('entry')
format_config = config.get('format', {})
# keep track what fields have been found
used = {}
entries = []
entry = Entry()
# now parse text
for line in file:
if not line:
break
for field, regexp in entry_config.items():
# log.debug('search field: %s regexp: %s' % (field, regexp))
match = re.search(regexp, line)
if match:
# check if used field detected, in such case start with new entry
if field in used:
if entry.isvalid():
log.info('Found field %s again before entry was completed. \
Adding current incomplete, but valid entry and moving to next.' % field)
self.format_entry(entry, format_config)
entries.append(entry)
else:
log.info(
'Invalid data, entry field %s is already found once. Ignoring entry.' % field)
# start new entry
entry = Entry()
used = {}
# add field to entry
entry[field] = match.group(1)
used[field] = True
log.debug('found field: %s value: %s' % (field, entry[field]))
# if all fields have been found
if len(used) == len(entry_config):
# check that entry has at least title and url
if not entry.isvalid():
log.info('Invalid data, constructed entry is missing mandatory fields (title or url)')
else:
self.format_entry(entry, format_config)
entries.append(entry)
log.debug('Added entry %s' % entry)
# start new entry
entry = Entry()
used = {}
last_pos = file.tell()
if db_pos:
db_pos.position = last_pos
else:
session.add(TailPosition(task=task.name, filename=filename, position=last_pos))
return entries
@event('plugin.register')
def register_plugin():
plugin.register(InputTail, 'tail', api_ver=2)
@event('options.register')
def register_parser_arguments():
options.get_parser('execute').add_argument('--tail-reset', action='store', dest='tail_reset', default=False,
metavar='FILE|TASK', help='reset tail position for a file')
|
ivanalejandro0/eip-fsm-test
|
refs/heads/master
|
eip/tests.py
|
1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
from pprint import pprint
from .machine import EIPMachine
from .manager import EIPManager
def debug():
"""
This shows us some details about the internals of the xworkflows object.
"""
em = EIPMachine()
print dir(em)
print
print dir(em.state)
print
pprint(list(em.state.transitions()))
print
def simple_test():
"""A simple test of the transitions/states"""
em = EIPMachine()
print em.state.title
em.start()
print em.state.title
em.fw_ok()
print em.state.title
em.eip_error()
print em.state.title
em.eip_failed()
print em.state.title
def test_eip_manager():
eip_manager = EIPManager()
if eip_manager.start():
print "EIP is started"
else:
print "EIP has failed starting - reached state:", eip_manager.status()
def test_random_paths():
"""
This run a random sequence of valid transitions for each state until
reaches a final state (error/on) or no more transitions can be done.
"""
em = EIPMachine()
# go randomly through available transitions
retries = 0
max_retries = 3
while retries <= max_retries:
try:
# get available transitions from here
ts = list(em.state.transitions())
state = em.state.name
print "State:", state
# Stop in case of final state reached
if state == 'error':
retries += 1
if retries <= max_retries:
print "Retrying!"
if state == 'on':
print "-"*50
print "Final state reached. ON"
break
if len(ts) == 0:
print "-"*50
print "ERROR: no available transitions. State:", state
break
# pick one transition randomly
random.shuffle(ts)
transition = ts[0].name
# do the transition
t = em.__getattribute__(transition)
t()
except Exception as e:
print "Exception caught:", repr(e)
if state == 'error':
print "-"*50
print "Final state reached. Error"
if __name__ == '__main__':
# test_random_paths()
test_eip_manager()
|
shsfre09/valijson
|
refs/heads/master
|
thirdparty/gtest-1.7.0/test/gtest_list_tests_unittest.py
|
1898
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking gtest_list_tests_unittest_ (a program written with
Google Test) the command line flags.
"""
__author__ = 'phanna@google.com (Patrick Hanna)'
import gtest_test_utils
import re
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the gtest_list_tests_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
# Utilities.
def Run(args):
"""Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Runs gtest_list_tests_unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag;
None if the flag should not be present.
expected_output_re: regular expression that matches the expected
output after running command;
other_flag: a different flag to be passed to command
along with gtest_list_tests;
None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"'%
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the
--gtest_filter flag."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
|
scottferg/web-console
|
refs/heads/master
|
django/contrib/gis/gdal/geomtype.py
|
12
|
from django.contrib.gis.gdal.error import OGRException
#### OGRGeomType ####
class OGRGeomType(object):
"Encapulates OGR Geometry Types."
wkb25bit = -2147483648
# Dictionary of acceptable OGRwkbGeometryType s and their string names.
_types = {0 : 'Unknown',
1 : 'Point',
2 : 'LineString',
3 : 'Polygon',
4 : 'MultiPoint',
5 : 'MultiLineString',
6 : 'MultiPolygon',
7 : 'GeometryCollection',
100 : 'None',
101 : 'LinearRing',
1 + wkb25bit: 'Point25D',
2 + wkb25bit: 'LineString25D',
3 + wkb25bit: 'Polygon25D',
4 + wkb25bit: 'MultiPoint25D',
5 + wkb25bit : 'MultiLineString25D',
6 + wkb25bit : 'MultiPolygon25D',
7 + wkb25bit : 'GeometryCollection25D',
}
# Reverse type dictionary, keyed by lower-case of the name.
_str_types = dict([(v.lower(), k) for k, v in _types.items()])
def __init__(self, type_input):
"Figures out the correct OGR Type based upon the input."
if isinstance(type_input, OGRGeomType):
num = type_input.num
elif isinstance(type_input, basestring):
type_input = type_input.lower()
if type_input == 'geometry': type_input='unknown'
num = self._str_types.get(type_input, None)
if num is None:
raise OGRException('Invalid OGR String Type "%s"' % type_input)
elif isinstance(type_input, int):
if not type_input in self._types:
raise OGRException('Invalid OGR Integer Type: %d' % type_input)
num = type_input
else:
raise TypeError('Invalid OGR input type given.')
# Setting the OGR geometry type number.
self.num = num
def __str__(self):
"Returns the value of the name property."
return self.name
def __eq__(self, other):
"""
Does an equivalence test on the OGR type with the given
other OGRGeomType, the short-hand string, or the integer.
"""
if isinstance(other, OGRGeomType):
return self.num == other.num
elif isinstance(other, basestring):
return self.name.lower() == other.lower()
elif isinstance(other, int):
return self.num == other
else:
return False
def __ne__(self, other):
return not (self == other)
@property
def name(self):
"Returns a short-hand string form of the OGR Geometry type."
return self._types[self.num]
@property
def django(self):
"Returns the Django GeometryField for this OGR Type."
s = self.name.replace('25D', '')
if s in ('LinearRing', 'None'):
return None
elif s == 'Unknown':
s = 'Geometry'
return s + 'Field'
|
agaffney/ansible
|
refs/heads/devel
|
test/integration/targets/plugin_loader/override/filter_plugins/core.py
|
147
|
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def do_flag(myval):
return 'flagged'
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
# jinja2 overrides
'flag': do_flag,
'flatten': do_flag,
}
|
aristanetworks/arista-ovs-nova
|
refs/heads/master
|
nova/tests/api/openstack/compute/test_servers.py
|
1
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack LLC.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import datetime
import urlparse
import uuid
import iso8601
from lxml import etree
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.api.openstack import xmlutil
from nova.compute import api as compute_api
from nova.compute import instance_types
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.network import manager
from nova.network.quantumv2 import api as quantum_api
from nova.openstack.common import cfg
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova.openstack.common import rpc
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.config')
CONF.import_opt('scheduler_topic', 'nova.config')
FAKE_UUID = fakes.FAKE_UUID
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
XPATH_NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/compute/api/v1.1'
}
INSTANCE_IDS = {FAKE_UUID: 1}
def fake_gen_uuid():
return FAKE_UUID
def return_servers_by_reservation(context, reservation_id=""):
return [fakes.stub_instance(i + 1,
reservation_id=reservation_id) for i in xrange(5)]
def return_servers_by_reservation_empty(context, reservation_id=""):
return []
def return_security_group(context, instance_id, security_group_id):
pass
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
return (inst, inst)
def fake_compute_api(cls, req, id):
return True
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertEqual(result, None)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, None)
class QuantumV2Subclass(quantum_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ServersControllerTest(test.TestCase):
def setUp(self):
super(ServersControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_get_all_by_project',
return_servers)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.ips_controller = ips.Controller()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs,
spectacular=True)
def test_can_check_loaded_extensions(self):
self.ext_mgr.extensions = {'os-fake': None}
self.assertTrue(self.controller.ext_mgr.is_loaded('os-fake'))
self.assertFalse(self.controller.ext_mgr.is_loaded('os-not-loaded'))
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertTrue((uuid, None) in res)
def test_requested_networks_quantumv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(network, None, None)])
def test_requested_networks_quantumv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.quantumv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_requested_networks_quantumv2_subclass_with_port(self):
cls = 'nova.tests.api.openstack.compute.test_servers.QuantumV2Subclass'
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEquals(res, [(None, None, port)])
def test_get_server_by_uuid(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the hostId's are unique"""
def return_instance_with_host(self, *args):
project_id = str(uuid.uuid4())
return fakes.stub_instance(id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid',
return_instance_with_host)
self.stubs.Set(db, 'instance_get',
return_instance_with_host)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/1"
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "server1",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "server1",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/fake/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/fake/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "server1",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_instance_get(nw_cache=nw_cache)
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_get_server_addresses_nonexistent_network(self):
url = '/v2/fake/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequest.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound()
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_with_reservation_id(self):
self.stubs.Set(db, 'instance_get_all_by_reservation',
return_servers_by_reservation)
req = fakes.HTTPRequest.blank('/v2/fake/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
self.stubs.Set(db, 'instance_get_all_by_reservation',
return_servers_by_reservation_empty)
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
self.stubs.Set(db, 'instance_get_all_by_reservation',
return_servers_by_reservation)
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s.get('image', None), None)
expected_links = [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail'
'?limit=3&blah=2:t')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=30')
res_dict = self.controller.index(req)
self.assertTrue('servers_links' not in res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v2/fake/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v2/fake/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('image' in search_opts)
self.assertEqual(search_opts['image'], '12345')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_tenant_id_filter_converts_to_project_id_for_admin(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?tenant_id=fake',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_admin_restricted_tenant(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_all_tenants_pass_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
self.assertTrue('project_id' not in filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
common_policy.set_rules(common_policy.Rules(rules))
req = fakes.HTTPRequest.blank('/v2/fake/servers?all_tenants=1')
res = self.controller.index(req)
self.assertTrue('servers' in res)
def test_all_tenants_fail_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None):
self.assertNotEqual(filters, None)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
common_policy.set_rules(common_policy.Rules(rules))
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('flavor' in search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('vm_state' in search_opts)
self.assertEqual(search_opts['vm_state'], vm_states.ACTIVE)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
"""Test getting servers by invalid status"""
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertTrue('vm_state' in search_opts)
self.assertEqual(search_opts['vm_state'], 'deleted')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('name' in search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('changes-since' in search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertTrue('deleted' not in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
# Allowed by user
self.assertTrue('name' in search_opts)
# OSAPI converts status to vm_state
self.assertTrue('vm_state' in search_opts)
# Allowed only by admins with admin API on
self.assertFalse('ip' in search_opts)
self.assertFalse('unknown_option' in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
# Allowed by user
self.assertTrue('name' in search_opts)
# OSAPI converts status to vm_state
self.assertTrue('vm_state' in search_opts)
# Allowed only by admins with admin API on
self.assertTrue('ip' in search_opts)
self.assertTrue('unknown_option' in search_opts)
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/v2/fake/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip(self):
"""Test getting servers by ip with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('ip' in search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?ip=10\..*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None):
self.assertNotEqual(search_opts, None)
self.assertTrue('ip6' in search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return [fakes.stub_instance(100, uuid=server_uuid)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/v2/fake/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_update_server_all_attributes(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test',
access_ipv4='0.0.0.0',
access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {
'name': 'server_test',
'accessIPv4': '0.0.0.0',
'accessIPv6': 'beef::0123',
}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['accessIPv4'], '0.0.0.0')
self.assertEqual(res_dict['server']['accessIPv6'], 'beef::0123')
def test_update_server_invalid_xml_raises_lookup(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/xml'
#xml request which raises LookupError
req.body = """<?xml version="1.0" encoding="TF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_invalid_xml_raises_expat(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/xml'
#xml request which raises ExpatError
req.body = """<?xml version="1.0" encoding="UTF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_name(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'x' * 256}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv4(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': '0.0.0.0'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '0.0.0.0')
def test_update_server_access_ipv4_bad_format(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': 'bad_format'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv4_none(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': None}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '')
def test_update_server_access_ipv4_blank(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv4='0.0.0.0'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv4': ''}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv4'], '')
def test_update_server_access_ipv6(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': 'beef::0123'}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], 'beef::0123')
def test_update_server_access_ipv6_bad_format(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': 'bad_format'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_access_ipv6_none(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': None}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], '')
def test_update_server_access_ipv6_blank(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(access_ipv6='beef::0123'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'accessIPv6': ''}}
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['accessIPv6'], '')
def test_update_server_adminPass_ignored(self):
inst_dict = dict(name='server_test', adminPass='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
# FIXME (comstud)
# self.stubs.Set(db, 'instance_get',
# return_server_with_attributes(name='server_test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound()
self.stubs.Set(compute_api.API, 'get', fake_get)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound()
self.stubs.Set(compute_api.API, 'update', fake_update)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': 'server_test'}}
req.body = jsonutils.dumps(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_rebuild_instance_with_access_ipv4_bad_format(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = 'bad_format'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_blank_metadata_key(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
('a' * 260): 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '0.0.0.0'
access_ipv6 = 'fead::1234'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'key1': ('a' * 260),
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
# make min_ram larger than our instance ram size
def fake_get_image(self, context, image_href):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(compute_api.API, '_get_image',
fake_get_image)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
# make min_disk larger than our instance disk size
def fake_get_image(self, context, image_href):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(compute_api.API, '_get_image',
fake_get_image)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_rebuild_instance_with_access_ipv6_bad_format(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
access_ipv4 = '1.2.3.4'
access_ipv6 = 'bad_format'
body = {
'rebuild': {
'name': 'new_name',
'imageRef': image_href,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/a/action')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, req, FAKE_UUID, body)
def test_get_all_server_details(self):
expected_flavor = {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/flavors/1',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/images/10',
},
],
}
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
'''
We want to make sure that if two instances are on the same host, then
they return the same hostId. If two instances are on different hosts,
they should return different hostId's. In this test, there are 5
instances - 2 on one host and 3 on another.
'''
def return_servers_with_host(context, *args, **kwargs):
return [fakes.stub_instance(i + 1, 'fake', 'fake', host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in xrange(5)]
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_with_host)
req = fakes.HTTPRequest.blank('/v2/fake/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_delete_server_instance(self):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
self.assertEqual(self.server_delete_called, True)
def test_delete_server_instance_while_building(self):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
self.assertEqual(self.server_delete_called, True)
def test_delete_server_instance_while_resize(self):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# Delete shoud be allowed in any case, even during resizing,
# because it may get stuck.
self.assertEqual(self.server_delete_called, True)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def _get_with_state(self, vm_state, task_state=None):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance"""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def instance_create(context, inst):
inst_type = instance_types.get_instance_type_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = {
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': dict(inst_type),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
}
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def rpc_call_wrapper(context, topic, msg, timeout=None):
"""Stub out the scheduler creating the instance entry"""
if (topic == CONF.scheduler_topic and
msg['method'] == 'run_instance'):
request_spec = msg['args']['request_spec']
num_instances = request_spec.get('num_instances', 1)
instances = []
for x in xrange(num_instances):
instances.append(instance_create(context,
request_spec['instance_properties']))
return instances
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(rpc, 'cast', fake_method)
self.stubs.Set(rpc, 'call', rpc_call_wrapper)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(rpc, 'queue_get_for', queue_get_for)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _check_admin_pass_len(self, server_dict):
""" utility function - check server_dict for adminPass
length.
"""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_pass_missing(self, server_dict):
""" utility function - check server_dict for absence
of adminPass
"""
self.assertTrue("adminPass" not in server_dict)
def _test_create_instance(self):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
body = dict(server=dict(
name='server_test', imageRef=image_uuid, flavorRef=2,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
server = self.controller.create(req, body).obj['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_server_bad_image_href(self):
image_href = 1
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_negative_min(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_negative_max(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_alpha_min(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_instance_invalid_alpha_max(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
self.flags(enable_instance_password=False)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': []
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_missing(res["server"])
def test_create_multiple_instances_resv_id_return(self):
"""Test creating multiple instances with asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'personality': [],
'return_reservation_id': True
}
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body)
reservation_id = res.get('reservation_id')
self.assertNotEqual(reservation_id, "")
self.assertNotEqual(reservation_id, None)
self.assertTrue(len(reservation_id) > 1)
def test_create_instance_image_ref_is_bookmark(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
if no_image:
server.pop('imageRef', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
server = self.controller.create(req, body).obj['server']
def test_create_instance_with_security_group_enabled(self):
self.ext_mgr.extensions = {'os-security-groups': 'fake'}
group = 'foo'
params = {'security_groups': [{'name': group}]}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['security_group'], [group])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_security_group_disabled(self):
group = 'foo'
params = {'security_groups': [{'name': group}]}
old_create = compute_api.API.create
def create(*args, **kwargs):
# NOTE(vish): if the security groups extension is not
# enabled, then security groups passed in
# are ignored.
self.assertEqual(kwargs['security_group'], ['default'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_enabled(self):
self.ext_mgr.extensions = {'OS-DCF': 'fake'}
# NOTE(vish): the extension converts OS-DCF:disk_config into
# auto_disk_config, so we are testing with
# the_internal_value
params = {'auto_disk_config': True}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], True)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_disabled(self):
params = {'auto_disk_config': True}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], False)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_enabled(self):
self.ext_mgr.extensions = {'OS-SCH-HNT': 'fake'}
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hints)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_disabled(self):
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], {})
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_volumes_enabled(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_volumes_enabled_no_image(self):
"""
Test that the create will fail if there is no image
and no bdms supplied in the request
"""
self.ext_mgr.extensions = {'os-volumes': 'fake'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {}, no_image=True)
def test_create_instance_with_volumes_enabled_and_bdms_no_image(self):
"""
Test that the create works if there is no image supplied but
os-volumes extension is enabled and bdms are supplied
"""
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params, no_image=True)
def test_create_instance_with_volumes_disabled(self):
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_bdm_delete_on_termination(self):
self.ext_mgr.extensions = {'os-volumes': 'fake'}
bdm = [{'device_name': 'foo1', 'delete_on_termination': 1},
{'device_name': 'foo2', 'delete_on_termination': True},
{'device_name': 'foo3', 'delete_on_termination': 'invalid'},
{'device_name': 'foo4', 'delete_on_termination': 0},
{'device_name': 'foo5', 'delete_on_termination': False}]
expected_dbm = [
{'device_name': 'foo1', 'delete_on_termination': True},
{'device_name': 'foo2', 'delete_on_termination': True},
{'device_name': 'foo3', 'delete_on_termination': False},
{'device_name': 'foo4', 'delete_on_termination': False},
{'device_name': 'foo5', 'delete_on_termination': False}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], expected_dbm)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data_enabled(self):
self.ext_mgr.extensions = {'os-user-data': 'fake'}
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['user_data'], user_data)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data_disabled(self):
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['user_data'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_enabled(self):
self.ext_mgr.extensions = {'os-keypairs': 'fake'}
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
# NOTE(sdague): key pair goes back to the database,
# so we need to stub it out for tests
def key_pair_get(context, user_id, name):
return {'public_key': 'FAKE_KEY',
'fingerprint': 'FAKE_FINGERPRINT',
'name': name}
def create(*args, **kwargs):
self.assertEqual(kwargs['key_name'], key_name)
return old_create(*args, **kwargs)
self.stubs.Set(db, 'key_pair_get', key_pair_get)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_disabled(self):
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['key_name'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_availability_zone_enabled(self):
self.ext_mgr.extensions = {'os-availability-zone': 'fake'}
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], availability_zone)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_availability_zone_disabled(self):
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_disabled(self):
ret_res_id = True
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 1)
self.assertEqual(kwargs['max_count'], 1)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_enabled(self):
self.ext_mgr.extensions = {'os-networks': 'fake'}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
self.assertEqual(kwargs['requested_networks'], result)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
self.ext_mgr.extensions = {}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['requested_networks'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_access_ip(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_access_ip_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_bad_format_access_ip_v4(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = 'bad_format'
access_ipv6 = 'fead::1234'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_bad_format_access_ip_v6(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
flavor_ref = 'http://localhost/fake/flavors/3'
access_ipv4 = '1.2.3.4'
access_ipv6 = 'bad_format'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'accessIPv4': access_ipv4,
'accessIPv6': access_ipv6,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance_name_too_long(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'X' * 256,
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
def test_create_instance(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self._check_admin_pass_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
'vote': 'fiddletown',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
('a' * 260): '12345',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'key1': ('a' * 260),
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, req, body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'': '12345',
},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_invalid_key_name(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
key_name='nonexistentkey'))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_valid_key_name(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
key_name='key'))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/asdf'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_invalid_flavor_id_int(self):
image_href = 'http://localhost/v2/fake/images/2'
flavor_ref = -1
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_bad_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/17'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_with_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': True,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_as_id(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': image_href,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
'config_drive': 'asdf',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_without_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/fake/flavors/3'
body = {
'server': {
'name': 'config_drive_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': {},
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_disabled(self):
config_drive = [{'config_drive': 'foo'}]
params = {'config_drive': config_drive}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['config_drive'], None)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_bad_href(self):
image_href = 'asdf'
flavor_ref = 'http://localhost/v2/flavors/3'
body = dict(server=dict(
name='server_test', imageRef=image_href, flavorRef=flavor_ref,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_instance_local_href(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': flavor_ref,
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_pass(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': 'testpass',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertEqual(server['adminPass'], body['server']['adminPass'])
def test_create_instance_admin_pass_pass_disabled(self):
self.flags(enable_instance_password=False)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': 'testpass',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
res = self.controller.create(req, body).obj
server = res['server']
self.assertTrue('adminPass' in body['server'])
self.assertTrue('adminPass' not in server)
def test_create_instance_admin_pass_empty(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': 3,
'adminPass': '',
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = "application/json"
# The fact that the action doesn't raise is enough validation
self.controller.create(req, body)
def test_create_instance_invalid_personality(self):
def fake_create(*args, **kwargs):
codec = 'utf8'
content = 'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA=='
start_position = 19
end_position = 20
msg = 'invalid start byte'
raise UnicodeDecodeError(codec, content, start_position,
end_position, msg)
self.stubs.Set(compute_api.API,
'create',
fake_create)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/v2/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_uuid,
'flavorRef': flavor_ref,
'personality': [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
bookhref = 'http://localhost/fake/servers/%s' % FAKE_UUID
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
robj = self.controller.create(req, body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
body = dict(server=dict(
name='server_test', imageRef=image_uuid, flavorRef=3,
metadata={'hello': 'world', 'open': 'stack'},
personality={}))
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
try:
server = self.controller.create(req, body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPRequestEntityTooLarge as e:
self.assertEquals(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = _('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = _('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = _('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
class TestServerCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestServerCreateRequestXMLDeserializer, self).setUp()
self.deserializer = servers.CreateDeserializer()
def test_minimal_request(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
},
}
self.assertEquals(request['body'], expected)
def test_request_with_alternate_namespace_prefix(self):
serial_request = """
<ns2:server xmlns:ns2="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<ns2:metadata><ns2:meta key="hello">world</ns2:meta></ns2:metadata>
</ns2:server>
"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
'metadata': {"hello": "world"},
},
}
self.assertEquals(request['body'], expected)
def test_request_with_scheduler_hints_and_alternate_namespace_prefix(self):
serial_request = """
<ns2:server xmlns:ns2="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<ns2:metadata><ns2:meta key="hello">world</ns2:meta></ns2:metadata>
<os:scheduler_hints
xmlns:os="http://docs.openstack.org/compute/ext/scheduler-hints/api/v2">
<hypervisor>xen</hypervisor>
<near>eb999657-dd6b-464e-8713-95c532ac3b18</near>
</os:scheduler_hints>
</ns2:server>
"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
'OS-SCH-HNT:scheduler_hints': {
'hypervisor': ['xen'],
'near': ['eb999657-dd6b-464e-8713-95c532ac3b18']
},
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {
"hello": "world"
}
}
}
self.assertEquals(request['body'], expected)
def test_access_ipv4(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv4="1.2.3.4"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv4": "1.2.3.4",
},
}
self.assertEquals(request['body'], expected)
def test_access_ipv6(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv6="fead::1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv6": "fead::1234",
},
}
self.assertEquals(request['body'], expected)
def test_access_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
accessIPv4="1.2.3.4"
accessIPv6="fead::1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
},
}
self.assertEquals(request['body'], expected)
def test_admin_pass(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2"
adminPass="1234"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"adminPass": "1234",
},
}
self.assertEquals(request['body'], expected)
def test_image_link(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="http://localhost:8774/v2/images/2"
flavorRef="3"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "http://localhost:8774/v2/images/2",
"flavorRef": "3",
},
}
self.assertEquals(request['body'], expected)
def test_flavor_link(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="http://localhost:8774/v2/flavors/3"/>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "http://localhost:8774/v2/flavors/3",
},
}
self.assertEquals(request['body'], expected)
def test_empty_metadata_personality(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<metadata/>
<personality/>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {},
"personality": [],
},
}
self.assertEquals(request['body'], expected)
def test_multiple_metadata_items(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<metadata>
<meta key="one">two</meta>
<meta key="open">snack</meta>
</metadata>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"metadata": {"one": "two", "open": "snack"},
},
}
self.assertEquals(request['body'], expected)
def test_multiple_personality_files(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test"
imageRef="1"
flavorRef="2">
<personality>
<file path="/etc/banner.txt">MQ==</file>
<file path="/etc/hosts">Mg==</file>
</personality>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "2",
"personality": [
{"path": "/etc/banner.txt", "contents": "MQ=="},
{"path": "/etc/hosts", "contents": "Mg=="},
],
},
}
self.assertThat(request['body'], matchers.DictMatches(expected))
def test_spec_request(self):
image_bookmark_link = ("http://servers.api.openstack.org/1234/"
"images/52415800-8b69-11e0-9b19-734f6f006e54")
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
imageRef="%s"
flavorRef="52415800-8b69-11e0-9b19-734f1195ff37"
name="new-server-test">
<metadata>
<meta key="My Server Name">Apache1</meta>
</metadata>
<personality>
<file path="/etc/banner.txt">Mg==</file>
</personality>
</server>""" % (image_bookmark_link)
request = self.deserializer.deserialize(serial_request)
expected = {
"server": {
"name": "new-server-test",
"imageRef": ("http://servers.api.openstack.org/1234/"
"images/52415800-8b69-11e0-9b19-734f6f006e54"),
"flavorRef": "52415800-8b69-11e0-9b19-734f1195ff37",
"metadata": {"My Server Name": "Apache1"},
"personality": [
{
"path": "/etc/banner.txt",
"contents": "Mg==",
},
],
},
}
self.assertEquals(request['body'], expected)
def test_request_with_empty_networks(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks/>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_two_networks(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
<network uuid="2" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"},
{"uuid": "2", "fixed_ip": "10.0.2.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_second_network_node_ignored(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
</networks>
<networks>
<network uuid="2" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_missing_id(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_missing_fixed_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_empty_id(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="" fixed_ip="10.0.1.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "", "fixed_ip": "10.0.1.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_one_network_empty_fixed_ip(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip=""/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": ""}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_networks_duplicate_ids(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<networks>
<network uuid="1" fixed_ip="10.0.1.12"/>
<network uuid="1" fixed_ip="10.0.2.12"/>
</networks>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"networks": [{"uuid": "1", "fixed_ip": "10.0.1.12"},
{"uuid": "1", "fixed_ip": "10.0.2.12"}],
}}
self.assertEquals(request['body'], expected)
def test_request_with_availability_zone(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1"
availability_zone="some_zone:some_host">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"availability_zone": "some_zone:some_host",
}}
self.assertEquals(request['body'], expected)
def test_request_with_multiple_create_args(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1"
min_count="1" max_count="3" return_reservation_id="True">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"min_count": "1",
"max_count": "3",
"return_reservation_id": True,
}}
self.assertEquals(request['body'], expected)
def test_request_with_disk_config(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
xmlns:OS-DCF="http://docs.openstack.org/compute/ext/disk_config/api/v1.1"
name="new-server-test" imageRef="1" flavorRef="1"
OS-DCF:diskConfig="True">
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"OS-DCF:diskConfig": True,
}}
self.assertEquals(request['body'], expected)
def test_request_with_scheduler_hints(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
xmlns:OS-SCH-HNT=
"http://docs.openstack.org/compute/ext/scheduler-hints/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<OS-SCH-HNT:scheduler_hints>
<different_host>
7329b667-50c7-46a6-b913-cb2a09dfeee0
</different_host>
<different_host>
f31efb24-34d2-43e1-8b44-316052956a39
</different_host>
</OS-SCH-HNT:scheduler_hints>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"OS-SCH-HNT:scheduler_hints": {
"different_host": [
"7329b667-50c7-46a6-b913-cb2a09dfeee0",
"f31efb24-34d2-43e1-8b44-316052956a39",
]
}
}}
self.assertEquals(request['body'], expected)
def test_request_with_block_device_mapping(self):
serial_request = """
<server xmlns="http://docs.openstack.org/compute/api/v2"
name="new-server-test" imageRef="1" flavorRef="1">
<block_device_mapping>
<mapping volume_id="7329b667-50c7-46a6-b913-cb2a09dfeee0"
device_name="/dev/vda" virtual_name="root"
delete_on_termination="False" />
<mapping snapshot_id="f31efb24-34d2-43e1-8b44-316052956a39"
device_name="/dev/vdb" virtual_name="ephemeral0"
delete_on_termination="False" />
<mapping device_name="/dev/vdc" no_device="True" />
</block_device_mapping>
</server>"""
request = self.deserializer.deserialize(serial_request)
expected = {"server": {
"name": "new-server-test",
"imageRef": "1",
"flavorRef": "1",
"block_device_mapping": [
{
"volume_id": "7329b667-50c7-46a6-b913-cb2a09dfeee0",
"device_name": "/dev/vda",
"virtual_name": "root",
"delete_on_termination": False,
},
{
"snapshot_id": "f31efb24-34d2-43e1-8b44-316052956a39",
"device_name": "/dev/vdb",
"virtual_name": "ephemeral0",
"delete_on_termination": False,
},
{
"device_name": "/dev/vdc",
"no_device": True,
},
]
}}
self.assertEquals(request['body'], expected)
class TestAddressesXMLSerialization(test.TestCase):
index_serializer = ips.AddressesTemplate()
show_serializer = ips.NetworkTemplate()
def test_xml_declaration(self):
fixture = {
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
}
output = self.show_serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
fixture = {
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
}
output = self.show_serializer.serialize(fixture)
root = etree.XML(output)
network = fixture['network_2']
self.assertEqual(str(root.get('id')), 'network_2')
ip_elems = root.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_index(self):
fixture = {
'addresses': {
'network_1': [
{'addr': '192.168.0.3', 'version': 4},
{'addr': '192.168.0.5', 'version': 4},
],
'network_2': [
{'addr': '192.168.0.1', 'version': 4},
{'addr': 'fe80::beef', 'version': 6},
],
},
}
output = self.index_serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'addresses')
addresses_dict = fixture['addresses']
network_elems = root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
self.flags(use_ipv6=True)
self.instance = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
def floaters(*args, **kwargs):
return []
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
fakes.stub_out_nw_api_get_floating_ips_by_fixed_address(self.stubs,
floaters)
self.uuid = self.instance['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequest.blank("/v2")
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/fake/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_get_flavor_deleted_instance_type(self):
self.instance['instance_type'] = {}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, {})
def test_build_server(self):
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % self.uuid,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_no_image(self):
self.instance["image_ref"] = ""
output = self.view_builder.show(self.request, self.instance)
self.assertEqual(output['server']['image'], "")
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 404,
'instance_uuid': self.uuid,
'message': "HTTPNotFound",
'details': "Stock details for test",
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': 'Stock details for test',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': 'Stock details for test',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.context = context.get_admin_context()
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = {
'code': 500,
'instance_uuid': self.uuid,
'message': "Error",
'details': '',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.get_admin_context()
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = {
'code': 404,
'instance_uuid': self.uuid,
'message': "HTTPNotFound",
'details': "Stock details for test",
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
}
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
output = self.view_builder.show(self.request, self.instance)
self.assertFalse('fault' in output['server'])
def test_build_server_detail_active_status(self):
#set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_accessipv4(self):
self.instance['access_ip_v4'] = '1.2.3.4'
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"accessIPv4": "1.2.3.4",
"accessIPv6": "",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_accessipv6(self):
self.instance['access_ip_v6'] = 'fead::1234'
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"accessIPv4": "",
"accessIPv6": "fead::1234",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServerXMLSerializationTest(test.TestCase):
TIMESTAMP = "2010-10-11T10:30:22Z"
SERVER_HREF = 'http://localhost/v2/servers/%s' % FAKE_UUID
SERVER_NEXT = 'http://localhost/v2/servers?limit=%s&marker=%s'
SERVER_BOOKMARK = 'http://localhost/servers/%s' % FAKE_UUID
IMAGE_BOOKMARK = 'http://localhost/images/5'
FLAVOR_BOOKMARK = 'http://localhost/flavors/1'
def test_xml_declaration(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
'id': FAKE_UUID,
'user_id': 'fake_user_id',
'tenant_id': 'fake_tenant_id',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
print output
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
print output
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_create(self):
serializer = servers.FullServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
"adminPass": "test_password",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
print output
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6', 'adminPass']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_index(self):
serializer = servers.MinimalServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
uuid2 = fakes.get_fake_uuid(2)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"name": "test_server",
'links': [
{
'href': expected_server_href,
'rel': 'self',
},
{
'href': expected_server_bookmark,
'rel': 'bookmark',
},
],
},
{
"id": fakes.get_fake_uuid(2),
"name": "test_server_2",
'links': [
{
'href': expected_server_href_2,
'rel': 'self',
},
{
'href': expected_server_bookmark_2,
'rel': 'bookmark',
},
],
},
]}
output = serializer.serialize(fixture)
print output
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers_index')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index_with_servers_links(self):
serializer = servers.MinimalServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
uuid2 = fakes.get_fake_uuid(2)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_next = self.SERVER_NEXT % (2, 2)
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"name": "test_server",
'links': [
{
'href': expected_server_href,
'rel': 'self',
},
{
'href': expected_server_bookmark,
'rel': 'bookmark',
},
],
},
{
"id": fakes.get_fake_uuid(2),
"name": "test_server_2",
'links': [
{
'href': expected_server_href_2,
'rel': 'self',
},
{
'href': expected_server_bookmark_2,
'rel': 'bookmark',
},
],
},
],
"servers_links": [
{
'rel': 'next',
'href': expected_server_next,
},
]}
output = serializer.serialize(fixture)
print output
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers_index')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
# Check servers_links
servers_links = root.findall('{0}link'.format(ATOMNS))
for i, link in enumerate(fixture['servers_links']):
for key, value in link.items():
self.assertEqual(servers_links[i].get(key), value)
def test_detail(self):
serializer = servers.ServersTemplate()
uuid1 = fakes.get_fake_uuid(1)
expected_server_href = 'http://localhost/v2/servers/%s' % uuid1
expected_server_bookmark = 'http://localhost/servers/%s' % uuid1
expected_image_bookmark = self.IMAGE_BOOKMARK
expected_flavor_bookmark = self.FLAVOR_BOOKMARK
uuid2 = fakes.get_fake_uuid(2)
expected_server_href_2 = 'http://localhost/v2/servers/%s' % uuid2
expected_server_bookmark_2 = 'http://localhost/servers/%s' % uuid2
fixture = {"servers": [
{
"id": fakes.get_fake_uuid(1),
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": expected_image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": expected_flavor_bookmark,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
},
"metadata": {
"Number": "1",
},
"links": [
{
"href": expected_server_href,
"rel": "self",
},
{
"href": expected_server_bookmark,
"rel": "bookmark",
},
],
},
{
"id": fakes.get_fake_uuid(2),
"user_id": 'fake',
"tenant_id": 'fake',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 100,
"name": "test_server_2",
"status": "ACTIVE",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": expected_image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": expected_flavor_bookmark,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
},
"metadata": {
"Number": "2",
},
"links": [
{
"href": expected_server_href_2,
"rel": "self",
},
{
"href": expected_server_bookmark_2,
"rel": "bookmark",
},
],
},
]}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'servers')
server_elems = root.findall('{0}server'.format(NS))
self.assertEqual(len(server_elems), 2)
for i, server_elem in enumerate(server_elems):
server_dict = fixture['servers'][i]
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(server_elem.get(key), str(server_dict[key]))
link_nodes = server_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = server_elem.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(),
str(meta_value))
image_root = server_elem.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = server_elem.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'),
server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = server_elem.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
def test_update(self):
serializer = servers.ServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": 'e4d909c290d0fb1ca068ffaddf22cbd0',
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
"fault": {
"code": 500,
"created": self.TIMESTAMP,
"message": "Error Message",
"details": "Fault details",
}
}
}
output = serializer.serialize(fixture)
print output
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
fault_root = root.find('{0}fault'.format(NS))
fault_dict = server_dict['fault']
self.assertEqual(fault_root.get("code"), str(fault_dict["code"]))
self.assertEqual(fault_root.get("created"), fault_dict["created"])
msg_elem = fault_root.find('{0}message'.format(NS))
self.assertEqual(msg_elem.text, fault_dict["message"])
det_elem = fault_root.find('{0}details'.format(NS))
self.assertEqual(det_elem.text, fault_dict["details"])
def test_action(self):
serializer = servers.FullServerTemplate()
fixture = {
"server": {
"id": FAKE_UUID,
"user_id": "fake",
"tenant_id": "fake",
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "1.2.3.4",
"accessIPv6": "fead::1234",
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
"adminPass": "test_password",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.IMAGE_BOOKMARK,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.FLAVOR_BOOKMARK,
},
],
},
"addresses": {
"network_one": [
{
"version": 4,
"addr": "67.23.10.138",
},
{
"version": 6,
"addr": "::babe:67.23.10.138",
},
],
"network_two": [
{
"version": 4,
"addr": "67.23.10.139",
},
{
"version": 6,
"addr": "::babe:67.23.10.139",
},
],
},
"metadata": {
"Open": "Stack",
"Number": "1",
},
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
}
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'server')
server_dict = fixture['server']
for key in ['name', 'id', 'created', 'accessIPv4',
'updated', 'progress', 'status', 'hostId',
'accessIPv6', 'adminPass']:
self.assertEqual(root.get(key), str(server_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(server_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 2)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = server_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
image_root = root.find('{0}image'.format(NS))
self.assertEqual(image_root.get('id'), server_dict['image']['id'])
link_nodes = image_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['image']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
flavor_root = root.find('{0}flavor'.format(NS))
self.assertEqual(flavor_root.get('id'), server_dict['flavor']['id'])
link_nodes = flavor_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 1)
for i, link in enumerate(server_dict['flavor']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
addresses_root = root.find('{0}addresses'.format(NS))
addresses_dict = server_dict['addresses']
network_elems = addresses_root.findall('{0}network'.format(NS))
self.assertEqual(len(network_elems), 2)
for i, network_elem in enumerate(network_elems):
network = addresses_dict.items()[i]
self.assertEqual(str(network_elem.get('id')), str(network[0]))
ip_elems = network_elem.findall('{0}ip'.format(NS))
for z, ip_elem in enumerate(ip_elems):
ip = network[1][z]
self.assertEqual(str(ip_elem.get('version')),
str(ip['version']))
self.assertEqual(str(ip_elem.get('addr')),
str(ip['addr']))
class ServersAllExtensionsTestCase(test.TestCase):
"""
Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, ServerDiskConfigController extends servers.Controller:
@wsgi.extends
def create(self, req, body):
if 'server' in body:
self._set_disk_config(body['server'])
resp_obj = (yield)
self._show(req, resp_obj)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouter()
def test_create_missing_server(self):
"""Test create with malformed body"""
def fake_create(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_create)
req = fakes.HTTPRequest.blank('/fake/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(422, res.status_int)
def test_update_missing_server(self):
"""Test create with malformed body"""
def fake_update(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_update)
req = fakes.HTTPRequest.blank('/fake/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(422, res.status_int)
class ServersUnprocessableEntityTestCase(test.TestCase):
"""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self):
super(ServersUnprocessableEntityTestCase, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def _unprocessable_server_create(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/servers')
req.method = 'POST'
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, body)
def test_create_server_no_body(self):
self._unprocessable_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._unprocessable_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.update, req, FAKE_UUID, body)
def test_update_server_no_body(self):
self._unprocessable_server_update(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_server_update(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._unprocessable_server_update(body=body)
|
RafaelCosman/pybrain
|
refs/heads/master
|
pybrain/rl/environments/shipsteer/viewer.py
|
25
|
from __future__ import print_function
__author__ = 'Frank Sehnke, sehnke@in.tum.de'
#@PydevCodeAnalysisIgnore
#########################################################################
# OpenGL viewer for the FlexCube Environment
#
# The FlexCube Environment is a Mass-Spring-System composed of 8 mass points.
# These resemble a cube with flexible edges.
#
# This viewer uses an UDP connection found in tools/networking/udpconnection.py
#
# The viewer recieves the position matrix of the 8 masspoints and the center of gravity.
# With this information it renders a Glut based 3d visualization of teh FlexCube
#
# Options:
# - serverIP: The ip of the server to which the viewer should connect
# - ownIP: The IP of the computer running the viewer
# - port: The starting port (2 adjacent ports will be used)
#
# Saving the images is possible by setting self.savePics=True.
# Changing the point and angle of view is possible by using the mouse
# while button 1 or 2 pressed.
#
# Requirements: OpenGL
#
#########################################################################
from OpenGL.GLUT import *
from OpenGL.GL import *
from OpenGL.GLE import *
from OpenGL.GLU import *
from time import sleep
from scipy import ones, array, cos, sin
from pybrain.tools.networking.udpconnection import UDPClient
class FlexCubeRenderer(object):
#Options: ServerIP(default:localhost), OwnIP(default:localhost), Port(default:21560)
def __init__(self, servIP="127.0.0.1", ownIP="127.0.0.1", port="21580"):
self.oldScreenValues = None
self.view = 0
self.worldRadius = 400
# Start of mousepointer
self.lastx = 0
self.lasty = 15
self.lastz = 300
self.zDis = 1
# Start of cube
self.cube = [0.0, 0.0, 0.0]
self.bmpCount = 0
self.actCount = 0
self.calcPhysics = 0
self.newPic = 1
self.picCount = 0
self.sensors = [0.0, 0.0, 0.0]
self.centerOfGrav = array([0.0, 5.0, 0.0])
self.savePics = False
self.drawCounter = 0
self.fps = 50
self.dt = 1.0 / float(self.fps)
self.step = 0
self.client = UDPClient(servIP, ownIP, port)
# If self.savePics=True this method saves the produced images
def saveTo(self, filename, format="JPEG"):
import Image # get PIL's functionality...
width, height = 800, 600
glPixelStorei(GL_PACK_ALIGNMENT, 1)
data = glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE)
image = Image.fromstring("RGB", (width, height), data)
image = image.transpose(Image.FLIP_TOP_BOTTOM)
image.save(filename, format)
print(('Saved image to ', filename))
return image
# the render method containing the Glut mainloop
def _render(self):
# Call init: Parameter(Window Position -> x, y, height, width)
self.init_GL(self, 300, 300, 800, 600)
self.quad = gluNewQuadric()
glutMainLoop()
# The Glut idle function
def drawIdleScene(self):
#recive data from server and update the points of the cube
try: self.sensors = self.client.listen(self.sensors)
except: pass
if self.sensors == ["r", "r", "r"]: self.centerOfGrav = array([0.0, 5.0, 0.0])
else:
self.step += 1
a = self.sensors[0] / 360.0 * 3.1428
dir = array([cos(a), 0.0, -sin(a)])
self.centerOfGrav += self.sensors[2] * dir * 0.02
self.drawScene()
if self.savePics:
self.saveTo("./screenshots/image_jump" + repr(10000 + self.picCount) + ".jpg")
self.picCount += 1
else: sleep(self.dt)
def drawScene(self):
''' This methode describes the complete scene.'''
# clear the buffer
if self.zDis < 10: self.zDis += 0.25
if self.lastz > 100: self.lastz -= self.zDis
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
# Point of view
glRotatef(self.lastx, 0.0, 1.0, 0.0)
glRotatef(self.lasty, 1.0, 0.0, 0.0)
#glRotatef(15, 0.0, 0.0, 1.0)
# direction of view is aimed to the center of gravity of the cube
glTranslatef(-self.centerOfGrav[0], -self.centerOfGrav[1] - 50.0, -self.centerOfGrav[2] - self.lastz)
#Objects
#Massstab
for lk in range(41):
if float(lk - 20) / 10.0 == (lk - 20) / 10:
glColor3f(0.75, 0.75, 0.75)
glPushMatrix()
glRotatef(90, 1, 0, 0)
glTranslate(self.worldRadius / 40.0 * float(lk) - self.worldRadius / 2.0, -40.0, -30)
quad = gluNewQuadric()
gluCylinder(quad, 2, 2, 60, 4, 1)
glPopMatrix()
else:
if float(lk - 20) / 5.0 == (lk - 20) / 5:
glColor3f(0.75, 0.75, 0.75)
glPushMatrix()
glRotatef(90, 1, 0, 0)
glTranslate(self.worldRadius / 40.0 * float(lk) - self.worldRadius / 2.0, -40.0, -15.0)
quad = gluNewQuadric()
gluCylinder(quad, 1, 1, 30, 4, 1)
glPopMatrix()
else:
glColor3f(0.75, 0.75, 0.75)
glPushMatrix()
glRotatef(90, 1, 0, 0)
glTranslate(self.worldRadius / 40.0 * float(lk) - self.worldRadius / 2.0, -40.0, -7.5)
quad = gluNewQuadric()
gluCylinder(quad, 0.5, 0.5, 15, 4, 1)
glPopMatrix()
# Floor
tile = self.worldRadius / 40.0
glEnable (GL_BLEND)
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glColor3f(0.8, 0.8, 0.5)
glPushMatrix()
glTranslatef(0.0, -3.0, 0.0)
glBegin(GL_QUADS)
glNormal(0.0, 1.0, 0.0)
glVertex3f(-self.worldRadius, 0.0, -self.worldRadius)
glVertex3f(-self.worldRadius, 0.0, self.worldRadius)
glVertex3f(self.worldRadius, 0.0, self.worldRadius)
glVertex3f(self.worldRadius, 0.0, -self.worldRadius)
glEnd()
glPopMatrix()
#Water
for xF in range(40):
for yF in range(40):
if float(xF + yF) / 2.0 == (xF + yF) / 2: glColor4f(0.7, 0.7, 1.0, 0.5)
else: glColor4f(0.9, 0.9, 1.0, 0.5)
glPushMatrix()
glTranslatef(0.0, -0.03, 0.0)
glBegin(GL_QUADS)
glNormal(0.5 + sin(float(xF) + float(self.step) / 4.0) * 0.5, 0.5 + cos(float(xF) + float(self.step) / 4.0) * 0.5, 0.0)
for i in range(2):
for k in range(2):
glVertex3f((i + xF - 20) * tile, sin(float(xF + i) + float(self.step) / 4.0) * 3.0, ((k ^ i) + yF - 20) * tile)
glEnd()
glPopMatrix()
self.ship()
# swap the buffer
glutSwapBuffers()
def ship(self):
glColor3f(0.4, 0.1, 0.2)
glPushMatrix()
glTranslate(self.centerOfGrav[0] + 14, self.centerOfGrav[1], self.centerOfGrav[2])
glRotatef(180 - self.sensors[0], 0.0, 1.0, 0.0)
self.cuboid(0, 0, 0, 20, 5, 5)
#bow of ship
glBegin(GL_TRIANGLES)
glNormal3fv(self.calcNormal(self.points2Vector([-5, 6, 2.5], [0, 5, 5]), self.points2Vector([-5, 6, 2.5], [0, 5, 0])))
glVertex3f(-5, 6, 2.5), glVertex3f(0, 5, 0), glVertex3f(0, 5, 5)
glNormal3fv(self.calcNormal(self.points2Vector([-5, 6, 2.5], [0, 0, 5]), self.points2Vector([-5, 6, 2.5], [0, 5, 5])))
glVertex3f(-5, 6, 2.5), glVertex3f(0, 0, 5), glVertex3f(0, 5, 5)
glNormal3fv(self.calcNormal(self.points2Vector([-5, 6, 2.5], [0, 0, 0]), self.points2Vector([-5, 6, 2.5], [0, 0, 5])))
glVertex3f(-5, 6, 2.5), glVertex3f(0, 0, 5), glVertex3f(0, 0, 0)
glNormal3fv(self.calcNormal(self.points2Vector([-5, 6, 2.5], [0, 5, 0]), self.points2Vector([-5, 6, 2.5], [0, 0, 0])))
glVertex3f(-5, 6, 2.5), glVertex3f(0, 0, 0), glVertex3f(0, 5, 0)
glEnd()
# stern
glPushMatrix()
glRotatef(-90, 1.0, 0.0, 0.0)
glTranslatef(15, -2.5, 0)
gluCylinder(self.quad, 2.5, 2.5, 5, 10, 1)
glTranslatef(0, 0, 5)
gluDisk(self.quad, 0, 2.5, 10, 1)
glPopMatrix()
# deck
if abs(self.sensors[0]) < 5.0: reward = (self.sensors[2] + 10.0) / 50.0
else: reward = 0.2
glColor3f(1.0 - reward, reward, 0)
self.cuboid(5, 5, 1, 10, 8, 4)
glPushMatrix()
glRotatef(-90, 1.0, 0.0, 0.0)
glTranslatef(13, -2.5, 5)
glColor3f(1, 1, 1)
gluCylinder(self.quad, 1, 0.8, 5, 20, 1)
glPopMatrix()
glPopMatrix()
def cuboid(self, x0, y0, z0, x1, y1, z1):
glBegin(GL_QUADS)
glNormal(0, 0, 1)
glVertex3f(x0, y0, z1); glVertex3f(x0, y1, z1); glVertex3f(x1, y1, z1); glVertex3f(x1, y0, z1) #front
glNormal(-1, 0, 0)
glVertex3f(x0, y0, z0); glVertex3f(x0, y0, z1); glVertex3f(x0, y1, z1); glVertex3f(x0, y1, z0) # left
glNormal(0, -1, 0)
glVertex3f(x0, y0, z0); glVertex3f(x0, y0, z1); glVertex3f(x1, y0, z1); glVertex3f(x1, y0, z0) # bottom
glNormal(0, 0, -1)
glVertex3f(x0, y0, z0); glVertex3f(x1, y0, z0); glVertex3f(x1, y1, z0); glVertex3f(x0, y1, z0) # back
glNormal(0, 1, 0)
glVertex3f(x0, y1, z0); glVertex3f(x1, y1, z0); glVertex3f(x1, y1, z1); glVertex3f(x0, y1, z1) # top
glNormal(1, 0, 0)
glVertex3f(x1, y0, z0); glVertex3f(x1, y0, z1); glVertex3f(x1, y1, z1); glVertex3f(x1, y1, z0) # right
glEnd()
def calcNormal(self, xVector, yVector):
result = [0, 0, 0]
result[0] = xVector[1] * yVector[2] - yVector[1] * xVector[2]
result[1] = -xVector[0] * yVector[2] + yVector[0] * xVector[2]
result[2] = xVector[0] * yVector[1] - yVector[0] * xVector[1]
return [result[0], result[1], result[2]]
def points2Vector(self, startPoint, endPoint):
result = [0, 0, 0]
result[0] = endPoint[0] - startPoint[0]
result[1] = endPoint[1] - startPoint[1]
result[2] = endPoint[2] - startPoint[2]
return [result[0], result[1], result[2]]
def resizeScene(self, width, height):
'''Needed if window size changes.'''
if height == 0: # Prevent A Divide By Zero If The Window Is Too Small
height = 1
glViewport(0, 0, width, height) # Reset The Current Viewport And Perspective Transformation
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(width) / float(height), 0.1, 700.0)
glMatrixMode(GL_MODELVIEW)
def activeMouse(self, x, y):
#Returns mouse coordinates while any mouse button is pressed.
# store the mouse coordinate
if self.mouseButton == GLUT_LEFT_BUTTON:
self.lastx = x - self.xOffset
self.lasty = y - self.yOffset
if self.mouseButton == GLUT_RIGHT_BUTTON:
self.lastz = y - self.zOffset
# redisplay
glutPostRedisplay()
def passiveMouse(self, x, y):
'''Returns mouse coordinates while no mouse button is pressed.'''
pass
def completeMouse(self, button, state, x, y):
#Returns mouse coordinates and which button was pressed resp. released.
self.mouseButton = button
if state == GLUT_DOWN:
self.xOffset = x - self.lastx
self.yOffset = y - self.lasty
self.zOffset = y - self.lastz
# redisplay
glutPostRedisplay()
#Initialise an OpenGL windows with the origin at x, y and size of height, width.
def init_GL(self, pyWorld, x, y, height, width):
# initialize GLUT
glutInit([])
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH)
glutInitWindowSize(height, width)
glutInitWindowPosition(x, y)
glutCreateWindow("The Curious Cube")
glClearDepth(1.0)
glEnable(GL_DEPTH_TEST)
glClearColor(0.0, 0.0, 0.0, 0.0)
glShadeModel(GL_SMOOTH)
glMatrixMode(GL_MODELVIEW)
# initialize lighting */
glLightfv(GL_LIGHT0, GL_DIFFUSE, [1, 1, 1, 1.0])
glLightModelfv(GL_LIGHT_MODEL_AMBIENT, [1.0, 1.0, 1.0, 1.0])
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
#
glColorMaterial(GL_FRONT, GL_DIFFUSE)
glEnable(GL_COLOR_MATERIAL)
# Automatic vector normalise
glEnable(GL_NORMALIZE)
### Instantiate the virtual world ###
glutDisplayFunc(pyWorld.drawScene)
glutMotionFunc(pyWorld.activeMouse)
glutMouseFunc(pyWorld.completeMouse)
glutReshapeFunc(pyWorld.resizeScene)
glutIdleFunc(pyWorld.drawIdleScene)
if __name__ == '__main__':
s = sys.argv[1:]
r = FlexCubeRenderer(*s)
r._render()
|
prefetchnta/questlab
|
refs/heads/master
|
bin/x64bin/python/37/Lib/xml/parsers/__init__.py
|
88
|
"""Python interfaces to XML parsers.
This package contains one module:
expat -- Python wrapper for James Clark's Expat parser, with namespace
support.
"""
|
yongtang/tensorflow
|
refs/heads/master
|
tensorflow/python/estimator/model_fn.py
|
40
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""model_fn python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python.estimator import model_fn
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
model_fn.__all__ = [s for s in dir(model_fn) if not s.startswith('__')]
from tensorflow_estimator.python.estimator.model_fn import *
|
perezg/infoxchange
|
refs/heads/master
|
BASE/lib/python2.7/site-packages/setuptools/__init__.py
|
5
|
"""Extensions to the 'distutils' for large or complex distributions"""
from setuptools.extension import Extension, Library
from setuptools.dist import Distribution, Feature, _get_unpatched
import distutils.core, setuptools.command
from setuptools.depends import Require
from distutils.core import Command as _Command
from distutils.util import convert_path
import os
import sys
__version__ = '0.9.7'
__all__ = [
'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
'find_packages'
]
bootstrap_install_from = None
# If we run 2to3 on .py files, should we also convert docstrings?
# Default: yes; assume that we can detect doctests reliably
run_2to3_on_doctests = True
# Standard package names for fixer packages
lib2to3_fixer_packages = ['lib2to3.fixes']
def find_packages(where='.', exclude=()):
"""Return a list all Python packages found within directory 'where'
'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
will be converted to the appropriate local path syntax. 'exclude' is a
sequence of package names to exclude; '*' can be used as a wildcard in the
names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
'foo' itself).
"""
out = []
stack=[(convert_path(where), '')]
while stack:
where,prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
if ('.' not in name and os.path.isdir(fn) and
os.path.isfile(os.path.join(fn,'__init__.py'))
):
out.append(prefix+name); stack.append((fn,prefix+name+'.'))
for pat in list(exclude)+['ez_setup']:
from fnmatch import fnmatchcase
out = [item for item in out if not fnmatchcase(item,pat)]
return out
setup = distutils.core.setup
_Command = _get_unpatched(_Command)
class Command(_Command):
__doc__ = _Command.__doc__
command_consumes_arguments = False
def __init__(self, dist, **kw):
# Add support for keyword arguments
_Command.__init__(self,dist)
for k,v in kw.items():
setattr(self,k,v)
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
for k,v in kw.items():
setattr(cmd,k,v) # update command with keywords
return cmd
import distutils.core
distutils.core.Command = Command # we can't patch distutils.cmd, alas
def findall(dir = os.curdir):
"""Find all files under 'dir' and return the list of full filenames
(relative to 'dir').
"""
all_files = []
for base, dirs, files in os.walk(dir):
if base==os.curdir or base.startswith(os.curdir+os.sep):
base = base[2:]
if base:
files = [os.path.join(base, f) for f in files]
all_files.extend(filter(os.path.isfile, files))
return all_files
import distutils.filelist
distutils.filelist.findall = findall # fix findall bug in distutils.
# sys.dont_write_bytecode was introduced in Python 2.6.
if ((hasattr(sys, "dont_write_bytecode") and sys.dont_write_bytecode) or
(not hasattr(sys, "dont_write_bytecode") and os.environ.get("PYTHONDONTWRITEBYTECODE"))):
_dont_write_bytecode = True
else:
_dont_write_bytecode = False
|
AnderEnder/ansible-modules-extras
|
refs/heads/devel
|
system/filesystem.py
|
35
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Alexander Bulimov <lazywolf0@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: "Alexander Bulimov (@abulimov)"
module: filesystem
short_description: Makes file system on block device
description:
- This module creates file system.
version_added: "1.2"
options:
fstype:
description:
- File System type to be created.
- reiserfs support was added in 2.2.
required: true
dev:
description:
- Target block device.
required: true
force:
choices: [ "yes", "no" ]
default: "no"
description:
- If yes, allows to create new filesystem on devices that already has filesystem.
required: false
resizefs:
choices: [ "yes", "no" ]
default: "no"
description:
- If yes, if the block device and filessytem size differ, grow the filesystem into the space. Note, XFS Will only grow if mounted.
required: false
version_added: "2.0"
opts:
description:
- List of options to be passed to mkfs command.
notes:
- uses mkfs command
'''
EXAMPLES = '''
# Create a ext2 filesystem on /dev/sdb1.
- filesystem: fstype=ext2 dev=/dev/sdb1
# Create a ext4 filesystem on /dev/sdb1 and check disk blocks.
- filesystem: fstype=ext4 dev=/dev/sdb1 opts="-cc"
'''
def _get_dev_size(dev, module):
""" Return size in bytes of device. Returns int """
blockdev_cmd = module.get_bin_path("blockdev", required=True)
rc, devsize_in_bytes, err = module.run_command("%s %s %s" % (blockdev_cmd, "--getsize64", dev))
return int(devsize_in_bytes)
def _get_fs_size(fssize_cmd, dev, module):
""" Return size in bytes of filesystem on device. Returns int """
cmd = module.get_bin_path(fssize_cmd, required=True)
if 'tune2fs' == fssize_cmd:
# Get Block count and Block size
rc, size, err = module.run_command("%s %s %s" % (cmd, '-l', dev))
if rc == 0:
for line in size.splitlines():
if 'Block count:' in line:
block_count = int(line.split(':')[1].strip())
elif 'Block size:' in line:
block_size = int(line.split(':')[1].strip())
break
else:
module.fail_json(msg="Failed to get block count and block size of %s with %s" % (dev, cmd), rc=rc, err=err )
elif 'xfs_info' == fssize_cmd:
# Get Block count and Block size
rc, size, err = module.run_command("%s %s" % (cmd, dev))
if rc == 0:
for line in size.splitlines():
#if 'data' in line:
if 'data ' in line:
block_size = int(line.split('=')[2].split()[0])
block_count = int(line.split('=')[3].split(',')[0])
break
else:
module.fail_json(msg="Failed to get block count and block size of %s with %s" % (dev, cmd), rc=rc, err=err )
elif 'btrfs' == fssize_cmd:
#ToDo
# There is no way to get the blocksize and blockcount for btrfs filesystems
block_size = 1
block_count = 1
return block_size*block_count
def main():
module = AnsibleModule(
argument_spec = dict(
fstype=dict(required=True, aliases=['type']),
dev=dict(required=True, aliases=['device']),
opts=dict(),
force=dict(type='bool', default='no'),
resizefs=dict(type='bool', default='no'),
),
supports_check_mode=True,
)
# There is no "single command" to manipulate filesystems, so we map them all out and their options
fs_cmd_map = {
'ext2' : {
'mkfs' : 'mkfs.ext2',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'ext3' : {
'mkfs' : 'mkfs.ext3',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'ext4' : {
'mkfs' : 'mkfs.ext4',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'reiserfs' : {
'mkfs' : 'mkfs.reiserfs',
'grow' : 'resize_reiserfs',
'grow_flag' : None,
'force_flag' : '-f',
'fsinfo': 'reiserfstune',
},
'ext4dev' : {
'mkfs' : 'mkfs.ext4',
'grow' : 'resize2fs',
'grow_flag' : None,
'force_flag' : '-F',
'fsinfo': 'tune2fs',
},
'xfs' : {
'mkfs' : 'mkfs.xfs',
'grow' : 'xfs_growfs',
'grow_flag' : None,
'force_flag' : '-f',
'fsinfo': 'xfs_info',
},
'btrfs' : {
'mkfs' : 'mkfs.btrfs',
'grow' : 'btrfs',
'grow_flag' : 'filesystem resize',
'force_flag' : '-f',
'fsinfo': 'btrfs',
}
}
dev = module.params['dev']
fstype = module.params['fstype']
opts = module.params['opts']
force = module.boolean(module.params['force'])
resizefs = module.boolean(module.params['resizefs'])
changed = False
try:
_ = fs_cmd_map[fstype]
except KeyError:
module.exit_json(changed=False, msg="WARNING: module does not support this filesystem yet. %s" % fstype)
mkfscmd = fs_cmd_map[fstype]['mkfs']
force_flag = fs_cmd_map[fstype]['force_flag']
growcmd = fs_cmd_map[fstype]['grow']
fssize_cmd = fs_cmd_map[fstype]['fsinfo']
if not os.path.exists(dev):
module.fail_json(msg="Device %s not found."%dev)
cmd = module.get_bin_path('blkid', required=True)
rc,raw_fs,err = module.run_command("%s -c /dev/null -o value -s TYPE %s" % (cmd, dev))
fs = raw_fs.strip()
if fs == fstype and resizefs == False and not force:
module.exit_json(changed=False)
elif fs == fstype and resizefs == True:
# Get dev and fs size and compare
devsize_in_bytes = _get_dev_size(dev, module)
fssize_in_bytes = _get_fs_size(fssize_cmd, dev, module)
if fssize_in_bytes < devsize_in_bytes:
fs_smaller = True
else:
fs_smaller = False
if module.check_mode and fs_smaller:
module.exit_json(changed=True, msg="Resizing filesystem %s on device %s" % (fstype,dev))
elif module.check_mode and not fs_smaller:
module.exit_json(changed=False, msg="%s filesystem is using the whole device %s" % (fstype, dev))
elif fs_smaller:
cmd = module.get_bin_path(growcmd, required=True)
rc,out,err = module.run_command("%s %s" % (cmd, dev))
# Sadly there is no easy way to determine if this has changed. For now, just say "true" and move on.
# in the future, you would have to parse the output to determine this.
# thankfully, these are safe operations if no change is made.
if rc == 0:
module.exit_json(changed=True, msg=out)
else:
module.fail_json(msg="Resizing filesystem %s on device '%s' failed"%(fstype,dev), rc=rc, err=err)
else:
module.exit_json(changed=False, msg="%s filesystem is using the whole device %s" % (fstype, dev))
elif fs and not force:
module.fail_json(msg="'%s' is already used as %s, use force=yes to overwrite"%(dev,fs), rc=rc, err=err)
### create fs
if module.check_mode:
changed = True
else:
mkfs = module.get_bin_path(mkfscmd, required=True)
cmd = None
if opts is None:
cmd = "%s %s '%s'" % (mkfs, force_flag, dev)
else:
cmd = "%s %s %s '%s'" % (mkfs, force_flag, opts, dev)
rc,_,err = module.run_command(cmd)
if rc == 0:
changed = True
else:
module.fail_json(msg="Creating filesystem %s on device '%s' failed"%(fstype,dev), rc=rc, err=err)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
leighpauls/k2cro4
|
refs/heads/master
|
remoting/tools/verify_resources.py
|
14
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Verifies that GRD resource files define all the strings used by a given
set of source files. For file formats where it is not possible to infer which
strings represent message identifiers, localized strings should be explicitly
annotated with the string "i18n-content", for example:
LocalizeString(/*i18n-content*/"PRODUCT_NAME");
This script also recognises localized strings in HTML and manifest.json files:
HTML: <span i18n-content="PRODUCT_NAME"></span>
or ...i18n-value-name-1="BUTTON_NAME"...
manifest.json: __MSG_PRODUCT_NAME__
Note that these forms must be exact; extra spaces are not permitted, though
either single or double quotes are recognized.
In addition, the script checks that all the messages are still in use; if
this is not the case then a warning is issued, but the script still succeeds.
"""
import json
import os
import optparse
import re
import sys
import xml.dom.minidom as minidom
WARNING_MESSAGE = """
To remove this warning, either remove the unused tags from
resource files, add the files that use the tags listed above to
remoting.gyp, or annotate existing uses of those tags with the
prefix /*i18n-content*/
"""
def LoadTagsFromGrd(filename):
xml = minidom.parse(filename)
tags = []
msgs_and_structs = xml.getElementsByTagName("message")
msgs_and_structs.extend(xml.getElementsByTagName("structure"))
for res in msgs_and_structs:
name = res.getAttribute("name")
if not name or not name.startswith("IDR_"):
raise Exception("Tag name doesn't start with IDR_: %s" % name)
tags.append(name[4:])
return tags
def ExtractTagFromLine(file_type, line):
"""Extract a tag from a line of HTML, C++, JS or JSON."""
if file_type == "html":
# HTML-style (tags)
m = re.search('i18n-content=[\'"]([^\'"]*)[\'"]', line)
if m: return m.group(1)
# HTML-style (substitutions)
m = re.search('i18n-value-name-[1-9]=[\'"]([^\'"]*)[\'"]', line)
if m: return m.group(1)
elif file_type == 'js':
# Javascript style
m = re.search('/\*i18n-content\*/[\'"]([^\`"]*)[\'"]', line)
if m: return m.group(1)
elif file_type == 'cc':
# C++ style
m = re.search('IDR_([A-Z0-9_]*)', line)
if m: return m.group(1)
m = re.search('/\*i18n-content\*/["]([^\`"]*)["]', line)
if m: return m.group(1)
elif file_type == 'json':
# Manifest style
m = re.search('__MSG_(.*)__', line)
if m: return m.group(1)
return None
def VerifyFile(filename, messages, used_tags):
"""
Parse |filename|, looking for tags and report any that are not included in
|messages|. Return True if all tags are present and correct, or False if
any are missing. If no tags are found, print a warning message and return
True.
"""
base_name, extension = os.path.splitext(filename)
extension = extension[1:]
if extension not in ['js', 'cc', 'html', 'json']:
raise Exception("Unknown file type: %s" % extension)
result = True
matches = False
f = open(filename, 'r')
lines = f.readlines()
for i in xrange(0, len(lines)):
tag = ExtractTagFromLine(extension, lines[i])
if tag:
tag = tag.upper()
used_tags.add(tag)
matches = True
if not tag in messages:
result = False
print '%s/%s:%d: error: Undefined tag: %s' % \
(os.getcwd(), filename, i + 1, tag)
if not matches:
print '%s/%s:0: warning: No tags found' % (os.getcwd(), filename)
f.close()
return result
def main():
parser = optparse.OptionParser(
usage='Usage: %prog [options...] [source_file...]')
parser.add_option('-t', '--touch', dest='touch',
help='File to touch when finished.')
parser.add_option('-r', '--grd', dest='grd', action='append',
help='grd file')
options, args = parser.parse_args()
if not options.touch:
print '-t is not specified.'
return 1
if len(options.grd) == 0 or len(args) == 0:
print 'At least one GRD file needs to be specified.'
return 1
resources = []
for f in options.grd:
resources.extend(LoadTagsFromGrd(f))
used_tags = set([])
exit_code = 0
for f in args:
if not VerifyFile(f, resources, used_tags):
exit_code = 1
warnings = False
for tag in resources:
if tag not in used_tags:
print ('%s/%s:0: warning: %s is defined but not used') % \
(os.getcwd(), sys.argv[2], tag)
warnings = True
if warnings:
print WARNING_MESSAGE
if exit_code == 0:
f = open(options.touch, 'a')
f.close()
os.utime(options.touch, None)
return exit_code
if __name__ == '__main__':
sys.exit(main())
|
Nick-Hall/gramps
|
refs/heads/master
|
gramps/gui/plug/quick/_quickreports.py
|
5
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007 B. Malengier
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
This module provides the functions to build the quick report context menu's
"""
#------------------------------------------------------------------------
#
# python modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from io import StringIO
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
from collections import abc
log = logging.getLogger(".quickreports")
#-------------------------------------------------------------------------
#
# GNOME modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ...pluginmanager import GuiPluginManager
from ...uimanager import valid_action_name
from gramps.gen.plug import (CATEGORY_QR_PERSON, CATEGORY_QR_FAMILY, CATEGORY_QR_MEDIA,
CATEGORY_QR_EVENT, CATEGORY_QR_SOURCE, CATEGORY_QR_MISC,
CATEGORY_QR_PLACE, CATEGORY_QR_REPOSITORY,
CATEGORY_QR_NOTE, CATEGORY_QR_CITATION,
CATEGORY_QR_SOURCE_OR_CITATION)
from ._textbufdoc import TextBufDoc
from gramps.gen.simple import make_basic_stylesheet
MENUITEM = ('<item>\n'
'<attribute name="action">{prefix}.{action}</attribute>\n'
'<attribute name="label">'
'{label}</attribute>\n'
'</item>\n')
def flatten(L):
"""
Flattens a possibly nested list. Removes None results, too.
"""
retval = []
if isinstance(L, (list, tuple)):
for item in L:
fitem = flatten(item)
if fitem is not None:
retval.extend(fitem)
elif L is not None:
retval.append(L)
return retval
def create_web_connect_menu(dbstate, uistate, nav_group, handle, prefix):
"""
This functions querries the registered web connects. It collects
the connects of the requested category, which must be one of
nav_group.
It constructs the ui string of the menu, and it's actions. The
action callback function is constructed, using the dbstate and the
handle as input method. A tuple is returned, containing the ui
string of the menu, and its associated actions.
"""
top = ("<placeholder id='WebConnect'><submenu>\n"
'<attribute name="label" translatable="yes">'
'Web Connection</attribute>\n')
ofile = StringIO()
ofile.write(top)
#select the web connects to show
pmgr = GuiPluginManager.get_instance()
plugins = pmgr.process_plugin_data('WebConnect')
try:
connections = [plug(nav_group) if isinstance(plug, abc.Callable) else
plug for plug in plugins]
except BaseException:
import traceback
traceback.print_exc()
connections = []
connections = flatten(connections)
connections.sort(key=lambda plug: plug.name)
actions = []
for indx, connect in enumerate(connections):
# action would be better with "connect.key", but it seems to be
# non-ASCII sometimes. So we use an action number instead.
action = "web-con-%d" % indx
ofile.write(MENUITEM.format(prefix=prefix, action=action,
label=connect.name))
callback = connect(dbstate, uistate, nav_group, handle)
actions.append((action, make_web_connect_callback(callback)))
ofile.write('</submenu></placeholder>\n')
return (ofile.getvalue(), actions)
def make_web_connect_callback(func):
return lambda x, y: func(x)
def create_quickreport_menu(category, dbstate, uistate, handle, prefix, track=[]):
""" This functions querries the registered quick reports with
quick_report_list of _PluginMgr.py
It collects the reports of the requested category, which must be one of
CATEGORY_QR_PERSON, CATEGORY_QR_FAMILY,
CATEGORY_QR_EVENT, CATEGORY_QR_SOURCE, CATEGORY_QR_MEDIA,
CATEGORY_QR_PLACE, CATEGORY_QR_REPOSITORY,
CATEGORY_QR_CITATION, CATEGORY_QR_SOURCE_OR_CITATION
It constructs the ui string of the quick report menu, and it's actions
The action callback function is constructed, using the dbstate and the
handle as input method.
A tuple is returned, containing the ui string of the quick report menu,
and its associated actions
"""
top = ("<submenu>\n"
'<attribute name="label" translatable="yes">'
'Quick View</attribute>\n')
actions = []
ofile = StringIO()
ofile.write(top)
#select the reports to show
showlst = []
pmgr = GuiPluginManager.get_instance()
for pdata in pmgr.get_reg_quick_reports():
if pdata.supported and pdata.category == category :
showlst.append(pdata)
showlst.sort(key=lambda x: x.name)
for pdata in showlst:
new_key = valid_action_name("qr-%s" % pdata.id)
ofile.write(MENUITEM.format(prefix=prefix, action=new_key,
label=pdata.name))
actions.append((new_key, make_quick_report_callback(
pdata, category, dbstate, uistate, handle, track=track)))
ofile.write('</submenu>\n')
return (ofile.getvalue(), actions)
def make_quick_report_callback(pdata, category, dbstate, uistate, handle,
track=[]):
return lambda x, y: run_report(dbstate, uistate, category, handle, pdata,
track=track)
def get_quick_report_list(qv_category=None):
"""
Returns a list of PluginData of quick views of category qv_category
CATEGORY_QR_PERSON, CATEGORY_QR_FAMILY, CATEGORY_QR_EVENT,
CATEGORY_QR_SOURCE, CATEGORY_QR_MISC, CATEGORY_QR_PLACE,
CATEGORY_QR_REPOSITORY, CATEGORY_QR_MEDIA,
CATEGORY_QR_CITATION, CATEGORY_QR_SOURCE_OR_CITATION or None for all
"""
names = []
pmgr = GuiPluginManager.get_instance()
for pdata in pmgr.get_reg_quick_reports():
if qv_category == pdata.category or qv_category is None:
names.append(pdata) # (see below for item struct)
return names
def run_quick_report_by_name(dbstate, uistate, report_name, handle,
container=None, track=[], **kwargs):
"""
Run a QuickView by name.
**kwargs provides a way of passing special quick views additional
arguments.
"""
report = None
pmgr = GuiPluginManager.get_instance()
for pdata in pmgr.get_reg_quick_reports():
if pdata.id == report_name:
report = pdata
break
if report:
return run_report(dbstate, uistate, report.category,
handle, report, container=container,
track=track, **kwargs)
else:
raise AttributeError("No such quick report '%s'" % report_name)
def run_quick_report_by_name_direct(report_name, database, document, handle):
"""
Useful for running one quick report from another
"""
report = None
pmgr = GuiPluginManager.get_instance()
for pdata in pmgr.get_reg_quick_reports():
if pdata.id == report_name:
report = pdata
break
if report:
# FIXME: allow auto lookup of obj like below?
d = TextBufDoc(make_basic_stylesheet(), None)
d.dbstate = document.dbstate
d.uistate = document.uistate
d.open("")
mod = pmgr.load_plugin(report)
if mod:
reportfunc = getattr(mod, report.runfunc)
retval = reportfunc(database, d, handle)
d.close()
return retval
else:
raise ImportError("Quick report id = '%s' could not be loaded"
% report_name)
else:
raise AttributeError("No such quick report id = '%s'" % report_name)
def run_report(dbstate, uistate, category, handle, pdata, container=None,
track=[], **kwargs):
"""
Run a Quick Report.
Optionally container can be passed, rather than putting the report
in a new window.
**kwargs are only used for special quick views that allow additional
arguments, and that are run by run_quick_report_by_name().
"""
pmgr = GuiPluginManager.get_instance()
mod = pmgr.load_plugin(pdata)
if not mod:
print("QuickView Error: plugin does not load")
return
func = getattr(mod, pdata.runfunc)
if handle:
d = TextBufDoc(make_basic_stylesheet(), None, track=track)
d.dbstate = dbstate
d.uistate = uistate
if isinstance(handle, str): # a handle
if category == CATEGORY_QR_PERSON :
obj = dbstate.db.get_person_from_handle(handle)
elif category == CATEGORY_QR_FAMILY :
obj = dbstate.db.get_family_from_handle(handle)
elif category == CATEGORY_QR_EVENT :
obj = dbstate.db.get_event_from_handle(handle)
elif category == CATEGORY_QR_SOURCE :
obj = dbstate.db.get_source_from_handle(handle)
elif category == CATEGORY_QR_CITATION :
obj = dbstate.db.get_citation_from_handle(handle)
elif category == CATEGORY_QR_SOURCE_OR_CITATION :
if dbstate.db.has_source_handle(handle):
obj = dbstate.db.get_source_from_handle(handle)
elif dbstate.db.has_citation_handle(handle):
obj = dbstate.db.get_citation_from_handle(handle)
else:
raise ValueError("selection must be either source or citation")
elif category == CATEGORY_QR_PLACE :
obj = dbstate.db.get_place_from_handle(handle)
elif category == CATEGORY_QR_MEDIA :
obj = dbstate.db.get_media_from_handle(handle)
elif category == CATEGORY_QR_REPOSITORY :
obj = dbstate.db.get_repository_from_handle(handle)
elif category == CATEGORY_QR_NOTE :
obj = dbstate.db.get_note_from_handle(handle)
elif category == CATEGORY_QR_MISC:
obj = handle
else:
obj = None
else: # allow caller to send object directly
obj = handle
if obj:
if container:
result = d.open("", container=container)
func(dbstate.db, d, obj, **kwargs)
return result
else:
d.open("")
retval = func(dbstate.db, d, obj, **kwargs)
d.close()
return retval
else:
print("QuickView Error: failed to run report: no obj")
else:
print("QuickView Error: handle is not set")
|
lbeltrame/letsencrypt
|
refs/heads/master
|
letsencrypt-compatibility-test/letsencrypt_compatibility_test/configurators/apache/__init__.py
|
47
|
"""Let's Encrypt compatibility test Apache configurators"""
|
SergiosKar/Deep-Learning-models
|
refs/heads/master
|
Q-table.py
|
1
|
import numpy as np
import tensorflow as tf
import gym
lr = 0.8
g = 0.9
episodes = 2000
env = gym.make('FrozenLake-v0')
#initalize Q tabe with zeros
Q = np.zeros([env.observation_space.n,env.action_space.n])
rList = []
for i in range(episodes):
s = env.reset()
reward = 0
goal_flag = False
for j in range(200):
# greedy action
a = np.argmax(Q[s,:] + np.random.randn(1,env.action_space.n)*(1./(i+1)))
s_new,r,goal_flag,_ = env.step(a)#state and reward
maxQ=np.max(Q[s_new,:])
# Belmann
Q[s,a] += lr*(r + g*maxQ - Q[s,a])
reward += r
s = s_new
if goal_flag == True:
break
rList.append(reward)
print ("Score:" + str(sum(rList)/episodes))
print (" Q-Table ")
print (Q)
|
semonte/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnboundLocalVariableInspection/ControlFlowInTryExceptFinally.py
|
83
|
def foo1():
a = 1
try:
for i in range(10):
pass
except Exception:
pass
finally:
b = a #pass
def foo2():
a = 1
try:
for i in range(10):
pass
except Exception:
c = a #pass
finally:
b = a #pass
|
Triv90/Heat
|
refs/heads/stable/grizzly
|
heat/common/custom_backend_auth.py
|
6
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Middleware for authenticating against custom backends.
"""
import logging
from heat.openstack.common import local
from heat.rpc import client as rpc_client
import webob.exc
LOG = logging.getLogger(__name__)
class AuthProtocol(object):
def __init__(self, app, conf):
self.conf = conf
self.app = app
def __call__(self, env, start_response):
"""
Handle incoming request.
Authenticate send downstream on success. Reject request if
we can't authenticate.
"""
LOG.debug('Authenticating user token')
context = local.store.context
engine = rpc_client.EngineClient()
authenticated = engine.authenticated_to_backend(context)
if authenticated:
return self.app(env, start_response)
else:
return self._reject_request(env, start_response)
def _reject_request(self, env, start_response):
"""
Redirect client to auth server.
:param env: wsgi request environment
:param start_response: wsgi response callback
:returns HTTPUnauthorized http response
"""
resp = webob.exc.HTTPUnauthorized("Backend authentication failed", [])
return resp(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return AuthProtocol(app, conf)
return auth_filter
|
Microsoft/PTVS
|
refs/heads/master
|
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/win32com/client/CLSIDToClass.py
|
29
|
"""Manages a dictionary of CLSID strings to Python classes.
Primary use of this module is to allow modules generated by
makepy.py to share classes. @makepy@ automatically generates code
which interacts with this module. You should never need to reference
this module directly.
This module only provides support for modules which have been previously
been imported. The gencache module provides some support for loading modules
on demand - once done, this module supports it...
As an example, the MSACCESS.TLB type library makes reference to the
CLSID of the Database object, as defined in DAO3032.DLL. This
allows code using the MSAccess wrapper to natively use Databases.
This obviously applies to all cooperating objects, not just DAO and
Access.
"""
mapCLSIDToClass = {}
def RegisterCLSID( clsid, pythonClass ):
"""Register a class that wraps a CLSID
This function allows a CLSID to be globally associated with a class.
Certain module will automatically convert an IDispatch object to an
instance of the associated class.
"""
mapCLSIDToClass[str(clsid)] = pythonClass
def RegisterCLSIDsFromDict( dict ):
"""Register a dictionary of CLSID's and classes.
This module performs the same function as @RegisterCLSID@, but for
an entire dictionary of associations.
Typically called by makepy generated modules at import time.
"""
mapCLSIDToClass.update(dict)
def GetClass(clsid):
"""Given a CLSID, return the globally associated class.
clsid -- a string CLSID representation to check.
"""
return mapCLSIDToClass[clsid]
def HasClass(clsid):
"""Determines if the CLSID has an associated class.
clsid -- the string CLSID to check
"""
return clsid in mapCLSIDToClass
|
majorika/crawlers
|
refs/heads/master
|
election_commission/main.py
|
3
|
#!/usr/bin/python2.7
# -*- encoding=utf-8 -*-
from argparse import ArgumentParser, RawTextHelpFormatter
import codecs
import gevent
from gevent import monkey
import json
from types import UnicodeType
from crawlers import Crawler
from crawlers.local.static import get_election_type_name
from utils import check_dir
def print_json(filename, data):
with open(filename, 'w') as f:
json.dump(data, f, encoding="UTF-8", indent=2)
def print_csv(filename, data):
def transform(txt):
if isinstance(txt, int):
txt = str(txt)
if isinstance(txt, list):
txt = '||'.join(txt)
txt = txt.replace(',', '|')
if isinstance(txt, UnicodeType):
txt = txt.encode('utf8')
return txt
attrs = ['assembly_no', 'district', 'cand_no', 'party', 'name_kr',
'name_cn', 'sex', 'birthyear', 'birthmonth', 'birthday',
'address', 'job', 'education', 'experience', 'recommend_priority',
'votenum', 'voterate', 'elected']
with open(filename, 'w') as f:
f.write(codecs.BOM_UTF8)
f.write(','.join(attrs))
f.write('\n')
for cand in data:
values = (cand[attr] if attr in cand else '' for attr in attrs)
values = (transform(value) for value in values)
f.write(','.join(values))
f.write('\n')
def crawl(target, _type, nth, printer, filename, level=None):
crawler = Crawler(target, _type, nth, level)
cand_list = crawler.crawl()
printer(filename, cand_list)
def create_parser():
parser = ArgumentParser(formatter_class=RawTextHelpFormatter)
parser.add_argument('target', choices=['assembly', 'local', 'president'],\
help="name of target election")
parser.add_argument('type', choices=['candidates', 'elected', 'precandidates'],
help="type of person")
parser.add_argument('start', help="starting election id", type=float)
parser.add_argument('end', help="ending election id", type=float,\
nargs='?', default=None)
parser.add_argument('-t', '--test', dest='test', action='store_true',
help="assign datatype to csv instead of json")
parser.add_argument('-d', dest='directory', help="specify data directory")
# TODO: change to subparser
parser.add_argument('-l', choices=['pg', 'pm', 'pp', 'mg', 'mm', 'mp', 'eg', 'em'],
dest="level",
help="specify level for local elections.\n"
"- 1st char: {p:province, m:municipality, e:education},\n"
"- 2nd char: {g: governor, m: member}")
return parser
def main(args):
printer = print_csv if args.test else print_json
filetype = 'csv' if args.test else 'json'
datadir = args.directory if args.directory else '.'
check_dir(datadir)
if args.target=='local':
if args.end:
jobs = []
args.level = get_election_type_name(args.level)
for n in xrange(args.start, args.end+1):
filename = '%s/%s-%s-%s-%d.%s'\
% (datadir, args.target, args.level, args.type, n, filetype)
job = gevent.spawn(crawl, target=args.target, level=args.level,\
_type=args.type, nth=n, filename=filename, printer=printer)
jobs.append(job)
gevent.joinall(jobs)
else:
n = args.start
args.level = get_election_type_name(args.level)
filename = '%s/%s-%s-%s-%.01f.%s' %\
(datadir, args.target, args.level, args.type, n, filetype)
crawl(target=args.target, level=args.level, _type=args.type, nth=n,\
filename=filename, printer=printer)
else:
if args.end:
jobs = []
for n in xrange(args.start, args.end+1):
filename = '%s/%s-%s-%d.%s'\
% (datadir, args.target, args.type, n, filetype)
job = gevent.spawn(crawl, target=args.target, _type=args.type, nth=n,\
filename=filename, printer=printer)
jobs.append(job)
gevent.joinall(jobs)
else:
n = args.start
filename = '%s/%s-%s-%.01f.%s' %\
(datadir, args.target, args.type, n, filetype)
crawl(target=args.target, _type=args.type, nth=n,\
filename=filename, printer=printer)
print 'Data written to %s' % filename
if __name__ == '__main__':
monkey.patch_all()
parser = create_parser()
args = parser.parse_args()
main(args)
|
endlessm/chromium-browser
|
refs/heads/master
|
third_party/catapult/tracing/tracing_build/__init__.py
|
8
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
import tracing_project
tracing_project.UpdateSysPathIfNeeded()
|
matthiaskramm/corepy
|
refs/heads/master
|
corepy/arch/x86_64/isa/x86_64_isa.py
|
1
|
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from corepy.spre.spe import Instruction, DispatchInstruction
from x86_64_fields import *
from x86_64_insts import *
__annoy__ = True
__doc__="""
x86_64 Instruction Set Architecture (ISA).
To use, import this module and call the Instructions as Python
functions to generate a properly coded version. For example, to
create an add instruction:
import corepy.arch.x86_64.isa as isa
import corepy.arch.x86_64.types.registers as regs
inst = isa.add(regs.eax, regs.ebx) # add ebx to eax
Operands are in the same order as presented in the architecture manuals.
For a complete reference and details for all instructions, please
referer to:
'Intel 64 and IA-32 Architectures Software Developer's Manual' or
'AMD64 Architecture Programmer's Manual'.
URL (valid as of Sept 21, 2007):
http://www.intel.com/products/processor/manuals/index.htm
http://developer.amd.com/devguides.jsp
"""
# ------------------------------
# x86 Registers
# ------------------------------
# reg num: named register
gp8_map = {0: al_t, 1: cl_t}
gp16_map = {0: ax_t, 2: dx_t}
gp32_map = {0: eax_t}
gp64_map = {0: rax_t}
fp_map = {0: st0_t}
# ------------------------------
# x86 Instructions
# ------------------------------
# Currently 16bit versions of instructions have separate operand
# functions, and the size-override prefix is in the opcode, so protected
# (32bit default) mode is assumed here.
class adc(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x10}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x10}),
(rax_imm32, {'opcode':[0x15], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x10}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x10}),
(reg64_reg64, {'opcode':[0x11], 'modrm':None}),
(mem64_reg64, {'opcode':[0x11], 'modrm':None}),
(reg64_mem64, {'opcode':[0x13], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x10}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x10}),
(eax_imm32, {'opcode':[0x15], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x10}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x10}),
(reg32_reg32, {'opcode':[0x11], 'modrm':None}),
(mem32_reg32, {'opcode':[0x11], 'modrm':None}),
(reg32_mem32, {'opcode':[0x13], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x10}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x10}),
(ax_imm16, {'opcode':[0x66, 0x15], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x10}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x10}),
(reg16_reg16, {'opcode':[0x11], 'modrm':None}),
(mem16_reg16, {'opcode':[0x11], 'modrm':None}),
(reg16_mem16, {'opcode':[0x13], 'modrm':None}),
(al_imm8, {'opcode':[0x14], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x10}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x10}),
(reg8_reg8, {'opcode':[0x10], 'modrm':None}),
(mem8_reg8, {'opcode':[0x10], 'modrm':None}),
(reg8_mem8, {'opcode':[0x12], 'modrm':None}))
class add(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x00}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x00}),
(rax_imm32, {'opcode':[0x05], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x00}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x00}),
(reg64_reg64, {'opcode':[0x01], 'modrm':None}),
(mem64_reg64, {'opcode':[0x01], 'modrm':None}),
(reg64_mem64, {'opcode':[0x03], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x00}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x00}),
(eax_imm32, {'opcode':[0x05], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x00}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x00}),
(reg32_reg32, {'opcode':[0x01], 'modrm':None}),
(mem32_reg32, {'opcode':[0x01], 'modrm':None}),
(reg32_mem32, {'opcode':[0x03], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x00}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x00}),
(ax_imm16, {'opcode':[0x66, 0x05], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x00}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x00}),
(reg16_reg16, {'opcode':[0x01], 'modrm':None}),
(mem16_reg16, {'opcode':[0x01], 'modrm':None}),
(reg16_mem16, {'opcode':[0x03], 'modrm':None}),
(al_imm8, {'opcode':[0x04], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x00}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x00}),
(reg8_reg8, {'opcode':[0x00], 'modrm':None}),
(mem8_reg8, {'opcode':[0x00], 'modrm':None}),
(reg8_mem8, {'opcode':[0x02], 'modrm':None}))
class and_(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x20}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x20}),
(rax_imm32, {'opcode':[0x25], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x20}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x20}),
(reg64_reg64, {'opcode':[0x21], 'modrm':None}),
(mem64_reg64, {'opcode':[0x21], 'modrm':None}),
(reg64_mem64, {'opcode':[0x23], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x20}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x20}),
(eax_imm32, {'opcode':[0x25], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x20}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x20}),
(reg32_reg32, {'opcode':[0x21], 'modrm':None}),
(mem32_reg32, {'opcode':[0x21], 'modrm':None}),
(reg32_mem32, {'opcode':[0x23], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x20}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x20}),
(ax_imm16, {'opcode':[0x66, 0x25], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x20}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x20}),
(reg16_reg16, {'opcode':[0x21], 'modrm':None}),
(mem16_reg16, {'opcode':[0x21], 'modrm':None}),
(reg16_mem16, {'opcode':[0x23], 'modrm':None}),
(al_imm8, {'opcode':[0x24], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x20}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x20}),
(reg8_reg8, {'opcode':[0x20], 'modrm':None}),
(mem8_reg8, {'opcode':[0x20], 'modrm':None}),
(reg8_mem8, {'opcode':[0x22], 'modrm':None}))
class bsf(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xBC], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0xBC], 'modrm':None}),
(reg32_reg32, {'opcode':[0x0F, 0xBC], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0xBC], 'modrm':None}),
(reg16_reg16, {'opcode':[0x0F, 0xBC], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0xBC], 'modrm':None}))
class bsr(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xBD], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0xBD], 'modrm':None}),
(reg32_reg32, {'opcode':[0x0F, 0xBD], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0xBD], 'modrm':None}),
(reg16_reg16, {'opcode':[0x0F, 0xBD], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0xBD], 'modrm':None}))
class bswap(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0x0F, 0xC8], 'modrm':None}),
(reg32, {'opcode':[0x0F, 0xC8], 'modrm':None}))
class bt(DispatchInstruction):
dispatch = (
(reg64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(mem64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(reg64_reg64, {'opcode':[0x0F, 0xA3], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xA3], 'modrm':None}),
(reg32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(mem32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(reg32_reg32, {'opcode':[0x0F, 0xA3], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xA3], 'modrm':None}),
(reg16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(mem16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x20}),
(reg16_reg16, {'opcode':[0x0F, 0xA3], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xA3], 'modrm':None}))
class btc(DispatchInstruction):
dispatch = (
(reg64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(mem64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(reg64_reg64, {'opcode':[0x0F, 0xBB], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xBB], 'modrm':None}),
(reg32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(mem32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(reg32_reg32, {'opcode':[0x0F, 0xBB], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xBB], 'modrm':None}),
(reg16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(mem16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x38}),
(reg16_reg16, {'opcode':[0x0F, 0xBB], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xBB], 'modrm':None}))
class btr(DispatchInstruction):
dispatch = (
(reg64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(mem64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(reg64_reg64, {'opcode':[0x0F, 0xB3], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xB3], 'modrm':None}),
(reg32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(mem32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(reg32_reg32, {'opcode':[0x0F, 0xB3], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xB3], 'modrm':None}),
(reg16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(mem16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x30}),
(reg16_reg16, {'opcode':[0x0F, 0xB3], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xB3], 'modrm':None}))
class bts(DispatchInstruction):
dispatch = (
(reg64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(mem64_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(reg64_reg64, {'opcode':[0x0F, 0xAB], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xAB], 'modrm':None}),
(reg32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(mem32_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(reg32_reg32, {'opcode':[0x0F, 0xAB], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xAB], 'modrm':None}),
(reg16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(mem16_imm8, {'opcode':[0x0F, 0xBA], 'modrm':0x28}),
(reg16_reg16, {'opcode':[0x0F, 0xAB], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xAB], 'modrm':None}))
class call(DispatchInstruction):
dispatch = (
(lbl32off, {'opcode':[0xE8], 'modrm':None}),
(rel32off, {'opcode':[0xE8], 'modrm':None}),
(reg64, {'opcode':[0xFF], 'modrm':0x10}),
(mem64_32, {'opcode':[0xFF], 'modrm':0x10}),
(reg16, {'opcode':[0xFF], 'modrm':0x10}),
(mem16, {'opcode':[0x66, 0xFF], 'modrm':0x10}))
class cbw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x98], 'modrm':None}
class cdq(Instruction):
machine_inst = no_op
params = {'opcode':[0x99], 'modrm':None}
class cdqe(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0x98], 'modrm':None}
class clc(Instruction):
machine_inst = no_op
params = {'opcode':[0xF8], 'modrm':None}
class cld(Instruction):
machine_inst = no_op
params = {'opcode':[0xFC], 'modrm':None}
class clflush(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0xAE], 'modrm':0x38}
class cli(Instruction):
machine_inst = no_op
params = {'opcode':[0xFA], 'modrm':None}
class clts(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x06], 'modrm':None}
class cmc(Instruction):
machine_inst = no_op
params = {'opcode':[0xF5], 'modrm':None}
class cmovo(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x40], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x40], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x40], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x40], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x40], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x40], 'modrm':None}))
class cmovno(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x41], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x41], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x41], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x41], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x41], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x41], 'modrm':None}))
class cmovb(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x42], 'modrm':None}))
class cmovc(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x42], 'modrm':None}))
class cmovnae(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x42], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x42], 'modrm':None}))
class cmovnb(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x43], 'modrm':None}))
class cmovnc(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x43], 'modrm':None}))
class cmovae(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x43], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x43], 'modrm':None}))
class cmovz(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x44], 'modrm':None}))
class cmove(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x44], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x44], 'modrm':None}))
class cmovnz(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x45], 'modrm':None}))
class cmovne(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x45], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x45], 'modrm':None}))
class cmovbe(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x46], 'modrm':None}))
class cmovna(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x46], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x46], 'modrm':None}))
class cmovnbe(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x47], 'modrm':None}))
class cmova(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x47], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x47], 'modrm':None}))
class cmovs(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x48], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x48], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x48], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x48], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x48], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x48], 'modrm':None}))
class cmovns(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x49], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x49], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x49], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x49], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x49], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x49], 'modrm':None}))
class cmovp(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4A], 'modrm':None}))
class cmovpe(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4A], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4A], 'modrm':None}))
class cmovnp(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4B], 'modrm':None}))
class cmovpo(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4B], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4B], 'modrm':None}))
class cmovl(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4C], 'modrm':None}))
class cmovnge(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4C], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4C], 'modrm':None}))
class cmovnl(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4D], 'modrm':None}))
class cmovge(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4D], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4D], 'modrm':None}))
class cmovle(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4E], 'modrm':None}))
class cmovng(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4E], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4E], 'modrm':None}))
class cmovnle(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4F], 'modrm':None}))
class cmovg(DispatchInstruction):
dispatch = (
(reg64_reg64_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg32_reg32_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg16_reg16_rev, {'opcode':[0x0F, 0x4F], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0x4F], 'modrm':None}))
class cmp(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x38}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x38}),
(rax_imm32, {'opcode':[0x3D], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x38}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x38}),
(reg64_reg64, {'opcode':[0x39], 'modrm':None}),
(mem64_reg64, {'opcode':[0x39], 'modrm':None}),
(reg64_mem64, {'opcode':[0x3B], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x38}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x38}),
(eax_imm32, {'opcode':[0x3D], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x38}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x38}),
(reg32_reg32, {'opcode':[0x39], 'modrm':None}),
(mem32_reg32, {'opcode':[0x39], 'modrm':None}),
(reg32_mem32, {'opcode':[0x3B], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x38}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x38}),
(ax_imm16, {'opcode':[0x66, 0x3D], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x38}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x38}),
(reg16_reg16, {'opcode':[0x39], 'modrm':None}),
(mem16_reg16, {'opcode':[0x39], 'modrm':None}),
(reg16_mem16, {'opcode':[0x3B], 'modrm':None}),
(al_imm8, {'opcode':[0x3C], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x38}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x38}),
(reg8_reg8, {'opcode':[0x38], 'modrm':None}),
(mem8_reg8, {'opcode':[0x38], 'modrm':None}),
(reg8_mem8, {'opcode':[0x3A], 'modrm':None}))
class cmpsb(Instruction):
machine_inst = no_op
params = {'opcode':[0xA6], 'modrm':None}
class cmpsd(Instruction):
machine_inst = no_op
params = {'opcode':[0xA7], 'modrm':None}
class cmpsw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0xA7], 'modrm':None}
class cmpsq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xA7], 'modrm':None}
class cmpxchg(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(reg32_reg32, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(reg16_reg16, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xB1], 'modrm':None}),
(reg8_reg8, {'opcode':[0x0F, 0xB0], 'modrm':None}),
(mem8_reg8, {'opcode':[0x0F, 0xB0], 'modrm':None}))
class cmpxchg8b(Instruction):
machine_inst = mem64_32
params = {'opcode':[0x0F, 0xC7], 'modrm':0x08}
class cmpxchg16b(Instruction):
machine_inst = mem128
params = {'opcode':[0x0F, 0xC7], 'modrm':0x08}
class cpuid(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0xA2], 'modrm':None}
class cqo(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0x99], 'modrm':None}
class crc32(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0xF2]}),
(reg64_mem64, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0xF2]}),
(reg64_reg8, {'opcode':[0x0F, 0x38, 0xF0], 'modrm':None, 'prefix':[0xF2]}),
(reg64_mem8, {'opcode':[0x0F, 0x38, 0xF0], 'modrm':None, 'prefix':[0xF2]}),
(reg32_reg32, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0xF2]}),
(reg32_mem32, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0xF2]}),
(reg32_reg16, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0x66, 0xF2]}),
(reg32_mem16, {'opcode':[0x0F, 0x38, 0xF1], 'modrm':None, 'prefix':[0x66, 0xF2]}),
(reg32_reg8, {'opcode':[0x0F, 0x38, 0xF0], 'modrm':None, 'prefix':[0xF2]}),
(reg32_mem8, {'opcode':[0x0F, 0x38, 0xF0], 'modrm':None, 'prefix':[0xF2]}))
class cwd(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x99], 'modrm':None}
class cwde(Instruction):
machine_inst = no_op
params = {'opcode':[0x98], 'modrm':None}
class dec(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xFF], 'modrm':0x08}),
(mem64, {'opcode':[0xFF], 'modrm':0x08}),
(reg32, {'opcode':[0xFF], 'modrm':0x08}),
(mem32, {'opcode':[0xFF], 'modrm':0x08}),
(reg16, {'opcode':[0xFF], 'modrm':0x08}),
(mem16, {'opcode':[0x66, 0xFF], 'modrm':0x08}),
(reg8, {'opcode':[0xFE], 'modrm':0x08}),
(mem8, {'opcode':[0xFE], 'modrm':0x08}))
class div(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xF7], 'modrm':0x30}),
(mem64, {'opcode':[0xF7], 'modrm':0x30}),
(reg32, {'opcode':[0xF7], 'modrm':0x30}),
(mem32, {'opcode':[0xF7], 'modrm':0x30}),
(reg16, {'opcode':[0xF7], 'modrm':0x30}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x30}),
(reg8, {'opcode':[0xF6], 'modrm':0x30}),
(mem8, {'opcode':[0xF6], 'modrm':0x30}))
class enter(Instruction):
machine_inst = imm16_imm8
params = {'opcode':[0xC8], 'modrm':None}
class hlt(Instruction):
machine_inst = no_op
params = {'opcode':[0xF4], 'modrm':None}
class idiv(DispatchInstruction):
dispatch = (
(reg32, {'opcode':[0xF7], 'modrm':0x38}),
(mem32, {'opcode':[0xF7], 'modrm':0x38}),
(reg16, {'opcode':[0xF7], 'modrm':0x38}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x38}),
(reg8, {'opcode':[0xF6], 'modrm':0x38}),
(mem8, {'opcode':[0xF6], 'modrm':0x38}))
class imul(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xF7], 'modrm':0x28}),
(mem64, {'opcode':[0xF7], 'modrm':0x28}),
(reg64_reg64_rev, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg64_reg64_simm8_rev,{'opcode':[0x6B], 'modrm':None}),
(reg64_mem64_simm8, {'opcode':[0x6B], 'modrm':None}),
(reg64_reg64_imm32, {'opcode':[0x69], 'modrm':None}),
(reg64_mem64_imm32, {'opcode':[0x69], 'modrm':None}),
(reg32, {'opcode':[0xF7], 'modrm':0x28}),
(mem32, {'opcode':[0xF7], 'modrm':0x28}),
(reg32_reg32_rev, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg32_reg32_simm8_rev,{'opcode':[0x6B], 'modrm':None}),
(reg32_mem32_simm8, {'opcode':[0x6B], 'modrm':None}),
(reg32_reg32_imm32, {'opcode':[0x69], 'modrm':None}),
(reg32_mem32_imm32, {'opcode':[0x69], 'modrm':None}),
(reg16, {'opcode':[0xF7], 'modrm':0x28}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x28}),
(reg16_reg16, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0F, 0xAF], 'modrm':None}),
(reg16_reg16_simm8_rev,{'opcode':[0x6B], 'modrm':None}),
(reg16_mem16_simm8, {'opcode':[0x6B], 'modrm':None}),
(reg16_reg16_imm16, {'opcode':[0x69], 'modrm':None}),
(reg16_mem16_imm16, {'opcode':[0x69], 'modrm':None}),
(reg8, {'opcode':[0xF6], 'modrm':0x28}),
(mem8, {'opcode':[0xF6], 'modrm':0x28}))
class in_(DispatchInstruction):
dispatch = (
(eax_dx, {'opcode':[0xED], 'modrm':None}),
(ax_dx, {'opcode':[0x66, 0xED], 'modrm':None}),
(al_dx, {'opcode':[0xEC], 'modrm':None}),
(eax_imm8, {'opcode':[0xE5], 'modrm':None}),
(ax_imm8, {'opcode':[0x66, 0xE5], 'modrm':None}),
(al_imm8, {'opcode':[0xE4], 'modrm':None}))
class inc(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xFF], 'modrm':0x00}),
(mem64, {'opcode':[0xFF], 'modrm':0x00}),
(reg32, {'opcode':[0xFF], 'modrm':0x00}),
(mem32, {'opcode':[0xFF], 'modrm':0x00}),
(reg16, {'opcode':[0xFF], 'modrm':0x00}),
(mem16, {'opcode':[0x66, 0xFF], 'modrm':0x00}),
(reg8, {'opcode':[0xFE], 'modrm':0x00}),
(mem8, {'opcode':[0xFE], 'modrm':0x00}))
class insb(Instruction):
machine_inst = no_op
params = {'opcode':[0x6C], 'modrm':None}
class insd(Instruction):
machine_inst = no_op
params = {'opcode':[0x6D], 'modrm':None}
class insw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x6D], 'modrm':None}
class int_(Instruction):
machine_inst = imm8
params = {'opcode':[0xCD], 'modrm':None}
class int_3(Instruction):
"""NOTE - this is a special form of 'int 3' used for debugging; see the
architecture manuals for more information."""
machine_inst = no_op
params = {'opcode':[0xCC], 'modrm':None}
class invd(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x08], 'modrm':None}
class invlpg(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x01], 'modrm':0x38}
class iret(Instruction):
machine_inst = no_op
params = {'opcode':[0xCF], 'modrm':None}
class iretd(Instruction):
machine_inst = no_op
params = {'opcode':[0xCF], 'modrm':None}
class iretq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xCF], 'modrm':None}
class ja(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x77], [0x0F, 0x87]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x77], [0x0F, 0x87]], 'modrm':None}))
#(rel8off, {'opcode':[0x77], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x87], 'modrm':None}))
class jae(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}))
#(rel8off, {'opcode':[0x73], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x83], 'modrm':None}))
class jb(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}))
#(rel8off, {'opcode':[0x72], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x82], 'modrm':None}))
class jbe(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x76], [0x0F, 0x86]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x76], [0x0F, 0x86]], 'modrm':None}))
#(rel8off, {'opcode':[0x76], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x86], 'modrm':None}))
class jc(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}))
#(rel8off, {'opcode':[0x72], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x82], 'modrm':None}))
class je(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x74], [0x0F, 0x84]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x74], [0x0F, 0x84]], 'modrm':None}))
#(rel8off, {'opcode':[0x74], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x84], 'modrm':None}))
class jecxz(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0x67, 0xE3], 'modrm':None}),
(rel8off, {'opcode':[0x67, 0xE3], 'modrm':None}))
class jg(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7F], [0x0F, 0x8F]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7F], [0x0F, 0x8F]], 'modrm':None}))
#(rel8off, {'opcode':[0x7F], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8F], 'modrm':None}))
class jge(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7D], [0x0F, 0x8D]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7D], [0x0F, 0x8D]], 'modrm':None}))
#(rel8off, {'opcode':[0x7D], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8D], 'modrm':None}))
class jl(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7C], [0x0F, 0x8C]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7C], [0x0F, 0x8C]], 'modrm':None}))
#(rel8off, {'opcode':[0x7C], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8C], 'modrm':None}))
class jle(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7E], [0x0F, 0x8E]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7E], [0x0F, 0x8E]], 'modrm':None}))
#(rel8off, {'opcode':[0x7E], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8E], 'modrm':None}))
class jmp(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0xEB], [0xE9]], 'modrm':None}),
(rel32_8off, {'opcode':[[0xEB], [0xE9]], 'modrm':None}),
#(rel8off, {'opcode':[0xEB], 'modrm':None}),
#(rel32off, {'opcode':[0xE9], 'modrm':None}),
(reg64, {'opcode':[0xFF], 'modrm':0x20}),
(mem64_32, {'opcode':[0xFF], 'modrm':0x20}),
(reg16, {'opcode':[0xFF], 'modrm':0x20}),
(mem16, {'opcode':[0x66, 0xFF], 'modrm':0x20}))
class jna(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x76], [0x0F, 0x86]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x76], [0x0F, 0x86]], 'modrm':None}))
#(rel8off, {'opcode':[0x76], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x86], 'modrm':None}))
class jnae(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x72], [0x0F, 0x82]], 'modrm':None}))
#(rel8off, {'opcode':[0x72], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x82], 'modrm':None}))
class jnb(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}))
#(rel8off, {'opcode':[0x73], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x83], 'modrm':None}))
class jnbe(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x77], [0x0F, 0x87]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x77], [0x0F, 0x87]], 'modrm':None}))
#(rel8off, {'opcode':[0x77], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x87], 'modrm':None}))
class jnc(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x73], [0x0F, 0x83]], 'modrm':None}))
#(rel8off, {'opcode':[0x73], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x83], 'modrm':None}))
class jne(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x75], [0x0F, 0x85]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x75], [0x0F, 0x85]], 'modrm':None}))
#(rel8off, {'opcode':[0x75], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x85], 'modrm':None}))
class jng(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7E], [0x0F, 0x8E]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7E], [0x0F, 0x8E]], 'modrm':None}))
#(rel8off, {'opcode':[0x7E], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8E], 'modrm':None}))
class jnge(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7C], [0x0F, 0x8C]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7C], [0x0F, 0x8C]], 'modrm':None}))
#(rel8off, {'opcode':[0x7C], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8C], 'modrm':None}))
class jnl(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7D], [0x0F, 0x8D]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7D], [0x0F, 0x8D]], 'modrm':None}))
#(rel8off, {'opcode':[0x7D], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8D], 'modrm':None}))
class jnle(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7F], [0x0F, 0x8F]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7F], [0x0F, 0x8F]], 'modrm':None}))
#(rel8off, {'opcode':[0x7F], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8F], 'modrm':None}))
class jno(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x71], [0x0F, 0x81]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x71], [0x0F, 0x81]], 'modrm':None}))
#(rel8off, {'opcode':[0x71], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x81], 'modrm':None}))
class jnp(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7B], [0x0F, 0x8B]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7B], [0x0F, 0x8B]], 'modrm':None}))
#(rel8off, {'opcode':[0x7B], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8B], 'modrm':None}))
class jns(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x79], [0x0F, 0x89]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x79], [0x0F, 0x89]], 'modrm':None}))
#(rel8off, {'opcode':[0x79], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x89], 'modrm':None}))
class jnz(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x75], [0x0F, 0x85]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x75], [0x0F, 0x85]], 'modrm':None}))
#(rel8off, {'opcode':[0x75], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x85], 'modrm':None}))
class jo(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x70], [0x0F, 0x80]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x70], [0x0F, 0x80]], 'modrm':None}))
#(rel8off, {'opcode':[0x70], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x80], 'modrm':None}))
class jp(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7A], [0x0F, 0x8A]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7A], [0x0F, 0x8A]], 'modrm':None}))
#(rel8off, {'opcode':[0x7A], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8A], 'modrm':None}))
class jpe(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7A], [0x0F, 0x8A]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7A], [0x0F, 0x8A]], 'modrm':None}))
#(rel8off, {'opcode':[0x7A], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8A], 'modrm':None}))
class jpo(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x7B], [0x0F, 0x8B]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x7B], [0x0F, 0x8B]], 'modrm':None}))
#(rel8off, {'opcode':[0x7B], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x8B], 'modrm':None}))
class jrcxz(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE3], 'modrm':None}),
(rel8off, {'opcode':[0xE3], 'modrm':None}))
class js(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x78], [0x0F, 0x88]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x78], [0x0F, 0x88]], 'modrm':None}))
#(rel8off, {'opcode':[0x78], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x88], 'modrm':None}))
class jz(DispatchInstruction):
dispatch = (
(lbl32_8off, {'opcode':[[0x74], [0x0F, 0x84]], 'modrm':None}),
(rel32_8off, {'opcode':[[0x74], [0x0F, 0x84]], 'modrm':None}))
#(rel8off, {'opcode':[0x74], 'modrm':None}),
#(rel32off, {'opcode':[0x0F, 0x84], 'modrm':None}))
class lahf(Instruction):
machine_inst = no_op
params = {'opcode':[0x9F], 'modrm':None}
class lea(DispatchInstruction):
dispatch = (
(reg64_mem, {'opcode':[0x8D], 'modrm':0x00}),
(reg32_mem, {'opcode':[0x8D], 'modrm':0x00}),
(reg16_mem, {'opcode':[0x8D], 'modrm':0x00}))
class leave(Instruction):
machine_inst = no_op
params = {'opcode':[0xC9], 'modrm':None}
class lfence(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0xAE, 0xE8], 'modrm':None}
class lodsb(Instruction):
machine_inst = no_op
params = {'opcode':[0xAC], 'modrm':None}
class lodsd(Instruction):
machine_inst = no_op
params = {'opcode':[0xAD], 'modrm':None}
class lodsq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xAD], 'modrm':None}
class lodsw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0xAD], 'modrm':None}
class loop(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE2], 'modrm':None}),
(rel8off, {'opcode':[0xE2], 'modrm':None}))
class loope(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE1], 'modrm':None}),
(rel8off, {'opcode':[0xE1], 'modrm':None}))
class loopne(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE0], 'modrm':None}),
(rel8off, {'opcode':[0xE0], 'modrm':None}))
class loopnz(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE0], 'modrm':None}),
(rel8off, {'opcode':[0xE0], 'modrm':None}))
class loopz(DispatchInstruction):
dispatch = (
(lbl8off, {'opcode':[0xE1], 'modrm':None}),
(rel8off, {'opcode':[0xE1], 'modrm':None}))
class lzcnt(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}),
(reg64_mem64, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}),
(reg32_reg32, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}),
(reg32_mem32, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}),
(reg16_reg16, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}),
(reg16_mem16, {'opcode':[0x0F, 0xBD], 'modrm':None, 'prefix':[0xF3]}))
class mfence(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0xAE, 0xF0], 'modrm':None}
class mov(DispatchInstruction):
dispatch = (
# TODO - implement moffset* operands!
(reg64_imm32, {'opcode':[0xC7], 'modrm':0x00}),
(mem64_imm32, {'opcode':[0xC7], 'modrm':0x00}),
(reg64_imm64, {'opcode':[0xB8], 'modrm':None}),
(reg64_reg64, {'opcode':[0x89], 'modrm':None}),
(mem64_reg64, {'opcode':[0x89], 'modrm':None}),
(reg64_mem64, {'opcode':[0x8B], 'modrm':None}),
(reg32_imm32, {'opcode':[0xB8], 'modrm':None}),
(mem32_imm32, {'opcode':[0xC7], 'modrm':0x00}),
(reg32_reg32, {'opcode':[0x89], 'modrm':None}),
(mem32_reg32, {'opcode':[0x89], 'modrm':None}),
(reg32_mem32, {'opcode':[0x8B], 'modrm':None}),
(reg16_imm16, {'opcode':[0xB8], 'modrm':None}),
(mem16_imm16, {'opcode':[0xC7], 'modrm':0x00}),
(reg16_reg16, {'opcode':[0x89], 'modrm':None}),
(mem16_reg16, {'opcode':[0x89], 'modrm':None}),
(reg16_mem16, {'opcode':[0x8B], 'modrm':None}),
(reg8_imm8, {'opcode':[0xB0], 'modrm':None}),
(mem8_imm8, {'opcode':[0xC6], 'modrm':0x00}),
(reg8_reg8, {'opcode':[0x88], 'modrm':None}),
(mem8_reg8, {'opcode':[0x88], 'modrm':None}),
(reg8_mem8, {'opcode':[0x8A], 'modrm':None}))
class movnti(DispatchInstruction):
dispatch = (
(mem64_reg64, {'opcode':[0x0F, 0xC3], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xC3], 'modrm':None}))
# SSE2!
class movsb(Instruction):
machine_inst = no_op
params = {'opcode':[0xA4], 'modrm':None}
class movsd(Instruction):
machine_inst = no_op
params = {'opcode':[0xA5], 'modrm':None}
class movsq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xA5], 'modrm':None}
class movsw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0xA5], 'modrm':None}
class movsx(DispatchInstruction):
dispatch = (
(reg64_reg8, {'opcode':[0x0F, 0xBE], 'modrm':None, 'prefix':[]}),
(reg64_mem8, {'opcode':[0x0F, 0xBE], 'modrm':None, 'prefix':[]}),
(reg64_reg16, {'opcode':[0x0F, 0xBF], 'modrm':None}),
(reg64_mem16, {'opcode':[0x0F, 0xBF], 'modrm':None}),
(reg32_reg8, {'opcode':[0x0F, 0xBE], 'modrm':None, 'prefix':[]}),
(reg32_mem8, {'opcode':[0x0F, 0xBE], 'modrm':None, 'prefix':[]}),
(reg32_reg16, {'opcode':[0x0F, 0xBF], 'modrm':None, 'prefix':[]}),
(reg32_mem16, {'opcode':[0x0F, 0xBF], 'modrm':None, 'prefix':[]}),
(reg16_reg8, {'opcode':[0x0F, 0xBE], 'modrm':None}),
(reg16_mem8, {'opcode':[0x0F, 0xBE], 'modrm':None}))
class movsxd(DispatchInstruction):
dispatch = (
(reg64_reg32, {'opcode':[0x63], 'modrm':None}),
(reg64_mem32, {'opcode':[0x63], 'modrm':None}))
class movzx(DispatchInstruction):
dispatch = (
(reg64_reg8, {'opcode':[0x0F, 0xB6], 'modrm':None, 'prefix':[]}),
(reg64_mem8, {'opcode':[0x0F, 0xB6], 'modrm':None, 'prefix':[]}),
(reg64_reg16, {'opcode':[0x0F, 0xB7], 'modrm':None}),
(reg64_mem16, {'opcode':[0x0F, 0xB7], 'modrm':None}),
(reg32_reg8, {'opcode':[0x0F, 0xB6], 'modrm':None, 'prefix':[]}),
(reg32_mem8, {'opcode':[0x0F, 0xB6], 'modrm':None, 'prefix':[]}),
(reg32_reg16, {'opcode':[0x0F, 0xB7], 'modrm':None, 'prefix':[]}),
(reg32_mem16, {'opcode':[0x0F, 0xB7], 'modrm':None, 'prefix':[]}),
(reg16_reg8, {'opcode':[0x0F, 0xB6], 'modrm':None}),
(reg16_mem8, {'opcode':[0x0F, 0xB6], 'modrm':None}))
class mul(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xF7], 'modrm':0x20}),
(mem64, {'opcode':[0xF7], 'modrm':0x20}),
(reg32, {'opcode':[0xF7], 'modrm':0x20}),
(mem32, {'opcode':[0xF7], 'modrm':0x20}),
(reg16, {'opcode':[0xF7], 'modrm':0x20}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x20}),
(reg8, {'opcode':[0xF6], 'modrm':0x20}),
(mem8, {'opcode':[0xF6], 'modrm':0x20}))
class neg(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xF7], 'modrm':0x18}),
(mem64, {'opcode':[0xF7], 'modrm':0x18}),
(reg32, {'opcode':[0xF7], 'modrm':0x18}),
(mem32, {'opcode':[0xF7], 'modrm':0x18}),
(reg16, {'opcode':[0xF7], 'modrm':0x18}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x18}),
(reg8, {'opcode':[0xF6], 'modrm':0x18}),
(mem8, {'opcode':[0xF6], 'modrm':0x18}))
# TODO - REX prefix isn't needed for the reg64/mem64 versions.. what to do?
# Could add an extra 'rex' param indicating whether REX is needed..
class nop(Instruction):
machine_inst = no_op
params = {'opcode':[0x90], 'modrm':None}
class not_(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0xF7], 'modrm':0x10}),
(mem64, {'opcode':[0xF7], 'modrm':0x10}),
(reg32, {'opcode':[0xF7], 'modrm':0x10}),
(mem32, {'opcode':[0xF7], 'modrm':0x10}),
(reg16, {'opcode':[0xF7], 'modrm':0x10}),
(mem16, {'opcode':[0x66, 0xF7], 'modrm':0x10}),
(reg8, {'opcode':[0xF6], 'modrm':0x10}),
(mem8, {'opcode':[0xF6], 'modrm':0x10}))
class or_(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x08}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x08}),
(rax_imm32, {'opcode':[0x0D], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x08}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x08}),
(mem64_reg64, {'opcode':[0x09], 'modrm':None}),
(reg64_reg64, {'opcode':[0x09], 'modrm':None}),
(reg64_mem64, {'opcode':[0x0B], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x08}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x08}),
(eax_imm32, {'opcode':[0x0D], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x08}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x08}),
(mem32_reg32, {'opcode':[0x09], 'modrm':None}),
(reg32_reg32, {'opcode':[0x09], 'modrm':None}),
(reg32_mem32, {'opcode':[0x0B], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x08}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x08}),
(ax_imm16, {'opcode':[0x66, 0x0D], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x08}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x08}),
(mem16_reg16, {'opcode':[0x09], 'modrm':None}),
(reg16_reg16, {'opcode':[0x09], 'modrm':None}),
(reg16_mem16, {'opcode':[0x0B], 'modrm':None}),
(al_imm8, {'opcode':[0x0C], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x08}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x08}),
(reg8_reg8, {'opcode':[0x08], 'modrm':None}),
(mem8_reg8, {'opcode':[0x08], 'modrm':None}),
(reg8_mem8, {'opcode':[0x0A], 'modrm':None}))
class out(DispatchInstruction):
dispatch = (
(dx_eax, {'opcode':[0xEF], 'modrm':None}),
(dx_ax, {'opcode':[0x66, 0xEF], 'modrm':None}),
(dx_al, {'opcode':[0xEE], 'modrm':None}),
(imm8_eax, {'opcode':[0xE7], 'modrm':None}),
(imm8_ax, {'opcode':[0x66, 0xE7], 'modrm':None}),
(imm8_al, {'opcode':[0xE6], 'modrm':None}))
class outsb(Instruction):
machine_inst = no_op
params = {'opcode':[0x6E], 'modrm':None}
class outsd(Instruction):
machine_inst = no_op
params = {'opcode':[0x6F], 'modrm':None}
class outsw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x6F], 'modrm':None}
class pause(Instruction):
machine_inst = no_op
params = {'opcode':[0xF3, 0x90], 'modrm':None}
class pop(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0x58], 'modrm':None}),
(mem64, {'opcode':[0x8F], 'modrm':0x00}),
(reg16, {'opcode':[0x58], 'modrm':None}),
(mem16, {'opcode':[0x66, 0x8F], 'modrm':0x00}))
class popcnt(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}),
(reg64_mem64, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}),
(reg32_reg32, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}),
(reg32_mem32, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}),
(reg16_reg16, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}),
(reg16_mem16, {'opcode':[0x0F, 0xB8], 'modrm':None, 'prefix':[0xF3]}))
class popf(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x9D], 'modrm':None}
class popfq(Instruction):
machine_inst = no_op
params = {'opcode':[0x9D], 'modrm':None}
class prefetch(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x0D], 'modrm':0x00}
class prefetchnta(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x18], 'modrm':0x00}
class prefetcht0(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x18], 'modrm':0x08}
class prefetcht1(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x18], 'modrm':0x10}
class prefetcht2(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x18], 'modrm':0x18}
class prefetchw(Instruction):
machine_inst = mem8
params = {'opcode':[0x0F, 0x0D], 'modrm':0x08}
class push(DispatchInstruction):
dispatch = (
(reg64, {'opcode':[0x50], 'modrm':None}),
(mem64, {'opcode':[0xFF], 'modrm':0x30}),
# TODO - add keyword arg to override operand size?
#(imm8, {'opcode':[0x6A], 'modrm':None}),
#(imm16, {'opcode':[0x66, 0x68], 'modrm':None}),
(imm32, {'opcode':[0x68], 'modrm':None}),
(reg16, {'opcode':[0x50], 'modrm':None}),
(mem16, {'opcode':[0x66, 0xFF], 'modrm':0x30}))
class pushf(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0x9C], 'modrm':None}
class pushfq(Instruction):
machine_inst = no_op
params = {'opcode':[0x9C], 'modrm':None}
class rcl(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x10}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x10}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x10}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x10}),
(reg64_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(mem64_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x10}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x10}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x10}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x10}),
(reg32_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(mem32_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x10}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x10}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x10}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x10}),
(reg16_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(mem16_simm8, {'opcode':[0xC1], 'modrm':0x10}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x10}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x10}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x10}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x10}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x10}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x10}))
class rcr(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x18}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x18}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x18}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x18}),
(reg64_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(mem64_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x18}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x18}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x18}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x18}),
(reg32_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(mem32_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x18}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x18}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x18}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x18}),
(reg16_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(mem16_simm8, {'opcode':[0xC1], 'modrm':0x18}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x18}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x18}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x18}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x18}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x18}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x18}))
class rdtsc(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x31], 'modrm':None}
class rdtscp(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x01, 0xF9], 'modrm':None}
class ret(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xC3], 'modrm':None}),
(imm16, {'opcode':[0xC2], 'modrm':None}))
class rol(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x00}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x00}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x00}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x00}),
(reg64_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(mem64_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x00}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x00}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x00}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x00}),
(reg32_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(mem32_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x00}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x00}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x00}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x00}),
(reg16_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(mem16_imm8, {'opcode':[0xC1], 'modrm':0x00}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x00}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x00}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x00}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x00}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x00}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x00}))
class ror(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x08}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x08}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x08}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x08}),
(reg64_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(mem64_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x08}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x08}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x08}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x08}),
(reg32_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(mem32_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x08}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x08}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x08}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x08}),
(reg16_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(mem16_imm8, {'opcode':[0xC1], 'modrm':0x08}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x08}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x08}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x08}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x08}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x08}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x08}))
class sahf(Instruction):
machine_inst = no_op
params = {'opcode':[0x9E], 'modrm':None}
class sal(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg64_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem64_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg32_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem32_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg16_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem16_simm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x20}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x20}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x20}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x20}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x20}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x20}))
class sar(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x38}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x38}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x38}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x38}),
(reg64_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(mem64_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x38}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x38}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x38}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x38}),
(reg32_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(mem32_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x38}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x38}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x38}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x38}),
(reg16_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(mem16_simm8, {'opcode':[0xC1], 'modrm':0x38}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x38}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x38}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x38}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x38}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x38}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x38}))
class sbb(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x18}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x18}),
(rax_imm32, {'opcode':[0x1D], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x18}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x18}),
(reg64_reg64, {'opcode':[0x19], 'modrm':None}),
(mem64_reg64, {'opcode':[0x19], 'modrm':None}),
(reg64_mem64, {'opcode':[0x1B], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x18}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x18}),
(eax_imm32, {'opcode':[0x1D], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x18}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x18}),
(reg32_reg32, {'opcode':[0x19], 'modrm':None}),
(mem32_reg32, {'opcode':[0x19], 'modrm':None}),
(reg32_mem32, {'opcode':[0x1B], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x18}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x18}),
(ax_imm16, {'opcode':[0x66, 0x1D], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x18}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x18}),
(reg16_reg16, {'opcode':[0x19], 'modrm':None}),
(mem16_reg16, {'opcode':[0x19], 'modrm':None}),
(reg16_mem16, {'opcode':[0x1B], 'modrm':None}),
(al_imm8, {'opcode':[0x1C], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x18}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x18}),
(reg8_reg8, {'opcode':[0x18], 'modrm':None}),
(mem8_reg8, {'opcode':[0x18], 'modrm':None}),
(reg8_mem8, {'opcode':[0x1A], 'modrm':None}))
class scasb(Instruction):
machine_inst = no_op
params = {'opcode':[0xAE], 'modrm':None}
class scasd(Instruction):
machine_inst = no_op
params = {'opcode':[0xAF], 'modrm':None}
class scasq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xAF], 'modrm':None}
class scasw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0xAF], 'modrm':None}
class seta(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x97], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x97], 'modrm':0x00}))
class setae(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x93], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x93], 'modrm':0x00}))
class setb(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x92], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x92], 'modrm':0x00}))
class setbe(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x96], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x96], 'modrm':0x00}))
class setc(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x92], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x92], 'modrm':0x00}))
class sete(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x94], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x94], 'modrm':0x00}))
class setg(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9F], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9F], 'modrm':0x00}))
class setge(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9D], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9D], 'modrm':0x00}))
class setl(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9C], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9C], 'modrm':0x00}))
class setle(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9E], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9E], 'modrm':0x00}))
class setna(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x96], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x96], 'modrm':0x00}))
class setnae(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x92], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x92], 'modrm':0x00}))
class setnb(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x93], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x93], 'modrm':0x00}))
class setnbe(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x97], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x97], 'modrm':0x00}))
class setnc(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x93], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x93], 'modrm':0x00}))
class setne(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x95], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x95], 'modrm':0x00}))
class setng(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9E], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9E], 'modrm':0x00}))
class setnge(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9C], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9C], 'modrm':0x00}))
class setnl(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9D], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9D], 'modrm':0x00}))
class setnle(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9F], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9F], 'modrm':0x00}))
class setno(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x91], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x91], 'modrm':0x00}))
class setnp(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9B], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9B], 'modrm':0x00}))
class setns(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x99], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x99], 'modrm':0x00}))
class setnz(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x95], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x95], 'modrm':0x00}))
class seto(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x90], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x90], 'modrm':0x00}))
class setp(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9A], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9A], 'modrm':0x00}))
class setpe(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9A], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9A], 'modrm':0x00}))
class setpo(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x9B], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x9B], 'modrm':0x00}))
class sets(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x98], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x98], 'modrm':0x00}))
class setz(DispatchInstruction):
dispatch = (
(reg8, {'opcode':[0x0F, 0x94], 'modrm':0x00}),
(mem8, {'opcode':[0x0F, 0x94], 'modrm':0x00}))
class sfence(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0xAE, 0xF8], 'modrm':None}
class shl(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg64_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem64_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg32_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem32_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x20}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x20}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x20}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x20}),
(reg16_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(mem16_imm8, {'opcode':[0xC1], 'modrm':0x20}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x20}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x20}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x20}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x20}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x20}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x20}))
class shld(DispatchInstruction):
dispatch = (
(reg64_reg64_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(mem64_reg64_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(reg64_reg64_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}),
(mem64_reg64_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}),
(reg32_reg32_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(mem32_reg32_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(reg32_reg32_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}),
(mem32_reg32_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}),
(reg16_reg16_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(mem16_reg16_imm8, {'opcode':[0x0F, 0xA4], 'modrm':None}),
(reg16_reg16_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}),
(mem16_reg16_cl, {'opcode':[0x0F, 0xA5], 'modrm':None}))
class shr(DispatchInstruction):
dispatch = (
(reg64_1, {'opcode':[0xD1], 'modrm':0x28}),
(mem64_1, {'opcode':[0xD1], 'modrm':0x28}),
(reg64_cl, {'opcode':[0xD3], 'modrm':0x28}),
(mem64_cl, {'opcode':[0xD3], 'modrm':0x28}),
(reg64_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(mem64_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(reg32_1, {'opcode':[0xD1], 'modrm':0x28}),
(mem32_1, {'opcode':[0xD1], 'modrm':0x28}),
(reg32_cl, {'opcode':[0xD3], 'modrm':0x28}),
(mem32_cl, {'opcode':[0xD3], 'modrm':0x28}),
(reg32_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(mem32_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(reg16_1, {'opcode':[0xD1], 'modrm':0x28}),
(mem16_1, {'opcode':[0xD1], 'modrm':0x28}),
(reg16_cl, {'opcode':[0xD3], 'modrm':0x28}),
(mem16_cl, {'opcode':[0xD3], 'modrm':0x28}),
(reg16_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(mem16_imm8, {'opcode':[0xC1], 'modrm':0x28}),
(reg8_1, {'opcode':[0xD0], 'modrm':0x28}),
(mem8_1, {'opcode':[0xD0], 'modrm':0x28}),
(reg8_cl, {'opcode':[0xD2], 'modrm':0x28}),
(mem8_cl, {'opcode':[0xD2], 'modrm':0x28}),
(reg8_imm8, {'opcode':[0xC0], 'modrm':0x28}),
(mem8_imm8, {'opcode':[0xC0], 'modrm':0x28}))
class shrd(DispatchInstruction):
dispatch = (
(reg64_reg64_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(mem64_reg64_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(reg64_reg64_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}),
(mem64_reg64_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}),
(reg32_reg32_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(mem32_reg32_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(reg32_reg32_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}),
(mem32_reg32_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}),
(reg16_reg16_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(mem16_reg16_imm8, {'opcode':[0x0F, 0xAC], 'modrm':None}),
(reg16_reg16_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}),
(mem16_reg16_cl, {'opcode':[0x0F, 0xAD], 'modrm':None}))
class stc(Instruction):
machine_inst = no_op
params = {'opcode':[0xF9], 'modrm':None}
class std(Instruction):
machine_inst = no_op
params = {'opcode':[0xFD], 'modrm':None}
class stosb(Instruction):
machine_inst = no_op
params = {'opcode':[0xAA], 'modrm':None}
class stosd(Instruction):
machine_inst = no_op
params = {'opcode':[0xAB], 'modrm':None}
class stosq(Instruction):
machine_inst = no_op
params = {'opcode':[0x48, 0xAB], 'modrm':None}
class stosw(Instruction):
machine_inst = no_op
params = {'opcode':[0x66, 0xAB], 'modrm':None}
class sub(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x28}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x28}),
(rax_imm32, {'opcode':[0x2D], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x28}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x28}),
(reg64_reg64, {'opcode':[0x29], 'modrm':None}),
(mem64_reg64, {'opcode':[0x29], 'modrm':None}),
(reg64_mem64, {'opcode':[0x2B], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x28}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x28}),
(eax_imm32, {'opcode':[0x2D], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x28}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x28}),
(reg32_reg32, {'opcode':[0x29], 'modrm':None}),
(mem32_reg32, {'opcode':[0x29], 'modrm':None}),
(reg32_mem32, {'opcode':[0x2B], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x28}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x28}),
(ax_imm16, {'opcode':[0x66, 0x2D], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x28}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x28}),
(reg16_reg16, {'opcode':[0x29], 'modrm':None}),
(mem16_reg16, {'opcode':[0x29], 'modrm':None}),
(reg16_mem16, {'opcode':[0x2B], 'modrm':None}),
(al_imm8, {'opcode':[0x2C], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x28}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x28}),
(reg8_reg8, {'opcode':[0x28], 'modrm':None}),
(mem8_reg8, {'opcode':[0x28], 'modrm':None}),
(reg8_mem8, {'opcode':[0x2A], 'modrm':None}))
class test(DispatchInstruction):
dispatch = (
(rax_imm32, {'opcode':[0xA9], 'modrm':None}),
(reg64_imm32, {'opcode':[0xF7], 'modrm':0x00}),
(mem64_imm32, {'opcode':[0xF7], 'modrm':0x00}),
(reg64_reg64, {'opcode':[0x85], 'modrm':None}),
(mem64_reg64, {'opcode':[0x85], 'modrm':None}),
(eax_imm32, {'opcode':[0xA9], 'modrm':None}),
(reg32_imm32, {'opcode':[0xF7], 'modrm':0x00}),
(mem32_imm32, {'opcode':[0xF7], 'modrm':0x00}),
(reg32_reg32, {'opcode':[0x85], 'modrm':None}),
(mem32_reg32, {'opcode':[0x85], 'modrm':None}),
(ax_imm16, {'opcode':[0x66, 0xA9], 'modrm':None}),
(reg16_imm16, {'opcode':[0xF7], 'modrm':0x00}),
(mem16_imm16, {'opcode':[0xF7], 'modrm':0x00}),
(reg16_reg16, {'opcode':[0x85], 'modrm':None}),
(mem16_reg16, {'opcode':[0x85], 'modrm':None}),
(al_imm8, {'opcode':[0xA8], 'modrm':None}),
(reg8_imm8, {'opcode':[0xF6], 'modrm':0x00}),
(mem8_imm8, {'opcode':[0xF6], 'modrm':0x00}),
(reg8_reg8, {'opcode':[0x84], 'modrm':None}),
(mem8_reg8, {'opcode':[0x84], 'modrm':None}))
class ud2(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x0B], 'modrm':None}
class xadd(DispatchInstruction):
dispatch = (
(reg64_reg64, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(mem64_reg64, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(reg32_reg32, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(mem32_reg32, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(reg16_reg16, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(mem16_reg16, {'opcode':[0x0F, 0xC1], 'modrm':None}),
(reg8_reg8, {'opcode':[0x0F, 0xC0], 'modrm':None}),
(mem8_reg8, {'opcode':[0x0F, 0xC0], 'modrm':None}))
class xchg(DispatchInstruction):
dispatch = (
(rax_reg64, {'opcode':[0x90], 'modrm':None}),
(reg64_rax, {'opcode':[0x90], 'modrm':None}),
(reg64_reg64, {'opcode':[0x87], 'modrm':None}),
(mem64_reg64, {'opcode':[0x87], 'modrm':None}),
(reg64_mem64, {'opcode':[0x87], 'modrm':None}),
(eax_reg32, {'opcode':[0x90], 'modrm':None}),
(reg32_eax, {'opcode':[0x90], 'modrm':None}),
(reg32_reg32, {'opcode':[0x87], 'modrm':None}),
(mem32_reg32, {'opcode':[0x87], 'modrm':None}),
(reg32_mem32, {'opcode':[0x87], 'modrm':None}),
(reg16_ax, {'opcode':[0x90], 'modrm':None}),
(ax_reg16, {'opcode':[0x90], 'modrm':None}),
(reg16_reg16, {'opcode':[0x87], 'modrm':None}),
(mem16_reg16, {'opcode':[0x87], 'modrm':None}),
(reg16_mem16, {'opcode':[0x87], 'modrm':None}),
(reg8_reg8, {'opcode':[0x86], 'modrm':None}),
(mem8_reg8, {'opcode':[0x86], 'modrm':None}),
(reg8_mem8, {'opcode':[0x86], 'modrm':None}))
class xlatb(Instruction):
machine_inst = no_op
params = {'opcode':[0xD7], 'modrm':None}
class xor(DispatchInstruction):
dispatch = (
(reg64_simm8, {'opcode':[0x83], 'modrm':0x30}),
(mem64_simm8, {'opcode':[0x83], 'modrm':0x30}),
(rax_imm32, {'opcode':[0x35], 'modrm':None}),
(reg64_imm32, {'opcode':[0x81], 'modrm':0x30}),
(mem64_imm32, {'opcode':[0x81], 'modrm':0x30}),
(reg64_reg64, {'opcode':[0x31], 'modrm':None}),
(mem64_reg64, {'opcode':[0x31], 'modrm':None}),
(reg64_mem64, {'opcode':[0x33], 'modrm':None}),
(reg32_simm8, {'opcode':[0x83], 'modrm':0x30}),
(mem32_simm8, {'opcode':[0x83], 'modrm':0x30}),
(eax_imm32, {'opcode':[0x35], 'modrm':None}),
(reg32_imm32, {'opcode':[0x81], 'modrm':0x30}),
(mem32_imm32, {'opcode':[0x81], 'modrm':0x30}),
(reg32_reg32, {'opcode':[0x31], 'modrm':None}),
(mem32_reg32, {'opcode':[0x31], 'modrm':None}),
(reg32_mem32, {'opcode':[0x33], 'modrm':None}),
(reg16_simm8, {'opcode':[0x83], 'modrm':0x30}),
(mem16_simm8, {'opcode':[0x83], 'modrm':0x30}),
(ax_imm16, {'opcode':[0x66, 0x35], 'modrm':None}),
(reg16_imm16, {'opcode':[0x81], 'modrm':0x30}),
(mem16_imm16, {'opcode':[0x81], 'modrm':0x30}),
(reg16_reg16, {'opcode':[0x31], 'modrm':None}),
(mem16_reg16, {'opcode':[0x31], 'modrm':None}),
(reg16_mem16, {'opcode':[0x33], 'modrm':None}),
(al_imm8, {'opcode':[0x34], 'modrm':None}),
(reg8_imm8, {'opcode':[0x80], 'modrm':0x30}),
(mem8_imm8, {'opcode':[0x80], 'modrm':0x30}),
(reg8_reg8, {'opcode':[0x30], 'modrm':None}),
(mem8_reg8, {'opcode':[0x30], 'modrm':None}),
(reg8_mem8, {'opcode':[0x32], 'modrm':None}))
# X87_ISA = (
class f2xm1(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF0], 'modrm':None}
class fabs(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE1], 'modrm':None}
class fadd(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xC0], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xC0], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x00}),
(mem64, {'opcode':[0xDC], 'modrm':0x00}))
class faddp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xC1], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xC0], 'modrm':None}))
class fiadd(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x00}),
(mem16, {'opcode':[0xDE], 'modrm':0x00}))
class fchs(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE0], 'modrm':None}
class fcmovb(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDA, 0xC0], 'modrm':None}
class fcmovbe(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDA, 0xD0], 'modrm':None}
class fcmove(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDA, 0xC8], 'modrm':None}
class fcmovnb(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xC0], 'modrm':None}
class fcmovnbe(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xD0], 'modrm':None}
class fcmovne(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xC8], 'modrm':None}
class fcmovnu(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xD8], 'modrm':None}
class fcmovu(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDA, 0xD8], 'modrm':None}
class fcom(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xD8, 0xD1], 'modrm':None}),
(sti, {'opcode':[0xD8, 0xD0], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x10}),
(mem64, {'opcode':[0xDC], 'modrm':0x10}))
class fcomp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xD8, 0xD9], 'modrm':None}),
(sti, {'opcode':[0xD8, 0xD8], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x18}),
(mem64, {'opcode':[0xDC], 'modrm':0x18}))
class fcompp(Instruction):
machine_inst = no_op
params = {'opcode':[0xDE, 0xD9], 'modrm':None}
class fcomi(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xF0], 'modrm':None}
class fcomip(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDF, 0xF0], 'modrm':None}
class fcos(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFF], 'modrm':None}
class fdecstp(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF6], 'modrm':None}
class fdiv(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xF0], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xF8], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x30}),
(mem64, {'opcode':[0xDC], 'modrm':0x30}))
class fdivp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xF9], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xF8], 'modrm':None}))
class fidiv(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x30}),
(mem16, {'opcode':[0xDE], 'modrm':0x30}))
class fdivr(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xF8], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xF0], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x38}),
(mem64, {'opcode':[0xDC], 'modrm':0x38}))
class fdivrp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xF1], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xF0], 'modrm':None}))
class fidivr(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x38}),
(mem16, {'opcode':[0xDE], 'modrm':0x38}))
class ffree(Instruction):
machine_inst = sti
params = {'opcode':[0xDD, 0xC0], 'modrm':None}
class ficom(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x10}),
(mem16, {'opcode':[0xDE], 'modrm':0x10}))
class ficomp(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x18}),
(mem16, {'opcode':[0xDE], 'modrm':0x18}))
class fild(DispatchInstruction):
dispatch = (
(mem64, {'opcode':[0xDF], 'modrm':0x28}),
(mem32, {'opcode':[0xDB], 'modrm':0x00}),
(mem16, {'opcode':[0xDF], 'modrm':0x00}))
class fincstp(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF7], 'modrm':None}
class finit(Instruction):
machine_inst = no_op
params = {'opcode':[0x9B, 0xDB, 0xE3], 'modrm':None}
class fninit(Instruction):
machine_inst = no_op
params = {'opcode':[0xDB, 0xE3], 'modrm':None}
class fist(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDB], 'modrm':0x10}),
(mem16, {'opcode':[0xDF], 'modrm':0x10}))
class fistp(DispatchInstruction):
dispatch = (
(mem64, {'opcode':[0xDF], 'modrm':0x38}),
(mem32, {'opcode':[0xDB], 'modrm':0x18}),
(mem16, {'opcode':[0xDF], 'modrm':0x18}))
class fisttp(DispatchInstruction):
dispatch = (
(mem64, {'opcode':[0xDD], 'modrm':0x08}),
(mem32, {'opcode':[0xDB], 'modrm':0x08}),
(mem16, {'opcode':[0xDF], 'modrm':0x08}))
class fld(DispatchInstruction):
dispatch = (
(sti, {'opcode':[0xD9, 0xC0], 'modrm':None}),
(mem80, {'opcode':[0xDB], 'modrm':0x28}),
(mem64, {'opcode':[0xDD], 'modrm':0x00}),
(mem32, {'opcode':[0xD9], 'modrm':0x00}))
class fld1(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE8], 'modrm':None}
class fldcw(Instruction):
machine_inst = mem16
params = {'opcode':[0xD9], 'modrm':0x28}
class fldenv(Instruction):
machine_inst = mem228
params = {'opcode':[0xD9], 'modrm':0x20}
class fldl2e(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xEA], 'modrm':None}
class fldl2t(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE9], 'modrm':None}
class fldlg2(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xEC], 'modrm':None}
class fldln2(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xED], 'modrm':None}
class fldpi(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xEB], 'modrm':None}
class fldz(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xEE], 'modrm':None}
class fmul(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xC8], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xC8], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x08}),
(mem64, {'opcode':[0xDC], 'modrm':0x08}))
class fmulp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xC9], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xC8], 'modrm':None}))
class fimul(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x08}),
(mem16, {'opcode':[0xDE], 'modrm':0x08}))
class fnop(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xD0], 'modrm':None}
class fpatan(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF3], 'modrm':None}
class fprem(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF8], 'modrm':None}
class fprem1(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF5], 'modrm':None}
class fptan(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF2], 'modrm':None}
class frndint(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFC], 'modrm':None}
class frstor(Instruction):
machine_inst = mem752
params = {'opcode':[0xDD], 'modrm':0x20}
class fsave(Instruction):
machine_inst = mem752
params = {'opcode':[0x9B, 0xDD], 'modrm':0x30}
class fnsave(Instruction):
machine_inst = mem752
params = {'opcode':[0xDD], 'modrm':0x30}
class fscale(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFD], 'modrm':None}
class fsin(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFE], 'modrm':None}
class fsincos(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFB], 'modrm':None}
class fsqrt(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xFA], 'modrm':None}
class fst(DispatchInstruction):
dispatch = (
(sti, {'opcode':[0xDD, 0xD0], 'modrm':None}),
(mem64, {'opcode':[0xDD], 'modrm':0x10}),
(mem32, {'opcode':[0xD9], 'modrm':0x10}))
class fstp(DispatchInstruction):
dispatch = (
(sti, {'opcode':[0xDD, 0xD8], 'modrm':None}),
(mem80, {'opcode':[0xDB], 'modrm':0x38}),
(mem64, {'opcode':[0xDD], 'modrm':0x18}),
(mem32, {'opcode':[0xD9], 'modrm':0x18}))
class fstcw(Instruction):
machine_inst = mem16
params = {'opcode':[0x9B, 0xD9], 'modrm':0x38}
class fnstcw(Instruction):
machine_inst = mem16
params = {'opcode':[0xD9], 'modrm':0x38}
class fstenv(Instruction):
machine_inst = mem228
params = {'opcode':[0x9B, 0xD9], 'modrm':0x30}
class fnstenv(Instruction):
machine_inst = mem228
params = {'opcode':[0xD9], 'modrm':0x30}
class fstsw(DispatchInstruction):
dispatch = (
(ax, {'opcode':[0x9B, 0xDF, 0xE0], 'modrm':None}),
(mem16, {'opcode':[0x9B, 0xDD], 'modrm':0x38}))
class fnstsw(DispatchInstruction):
dispatch = (
(ax, {'opcode':[0xDF, 0xE0], 'modrm':None}),
(mem16, {'opcode':[0xDD], 'modrm':0x38}))
class fsub(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xE0], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xE8], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x20}),
(mem64, {'opcode':[0xDC], 'modrm':0x20}))
class fsubp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xE9], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xE8], 'modrm':None}))
class fisub(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x20}),
(mem16, {'opcode':[0xDE], 'modrm':0x20}))
class fsubr(DispatchInstruction):
dispatch = (
(st0_sti, {'opcode':[0xD8, 0xE8], 'modrm':None}),
(sti_st0, {'opcode':[0xDC, 0xE0], 'modrm':None}),
(mem32, {'opcode':[0xD8], 'modrm':0x28}),
(mem64, {'opcode':[0xDC], 'modrm':0x28}))
class fsubrp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDE, 0xE1], 'modrm':None}),
(sti_st0, {'opcode':[0xDE, 0xE0], 'modrm':None}))
class fisubr(DispatchInstruction):
dispatch = (
(mem32, {'opcode':[0xDA], 'modrm':0x28}),
(mem16, {'opcode':[0xDE], 'modrm':0x28}))
class ftst(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE4], 'modrm':None}
class fucom(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDD, 0xE1], 'modrm':None}),
(sti, {'opcode':[0xDD, 0xE0], 'modrm':None}))
class fucomp(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xDD, 0xE9], 'modrm':None}),
(sti, {'opcode':[0xDD, 0xE8], 'modrm':None}))
class fucompp(Instruction):
machine_inst = no_op
params = {'opcode':[0xDA, 0xE9], 'modrm':None}
class fucomi(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDB, 0xE8], 'modrm':None}
class fucomip(Instruction):
machine_inst = st0_sti
params = {'opcode':[0xDF, 0xE8], 'modrm':None}
class fwait(Instruction):
machine_inst = no_op
params = {'opcode':[0x9B], 'modrm':None}
class fxam(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xE5], 'modrm':None}
class fxch(DispatchInstruction):
dispatch = (
(no_op, {'opcode':[0xD9, 0xC9], 'modrm':None}),
(sti, {'opcode':[0xD9, 0xC8], 'modrm':None}))
class fxrstor(Instruction):
machine_inst = mem4096
params = {'opcode':[0x0F, 0xAE], 'modrm':0x08}
#sse?
class fxsave(Instruction):
machine_inst = mem4096
params = {'opcode':[0x0F, 0xAE], 'modrm':0x00}
#sse?
class fxtract(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF4], 'modrm':None}
class fyl2x(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF1], 'modrm':None}
class fyl2xp1(Instruction):
machine_inst = no_op
params = {'opcode':[0xD9, 0xF9], 'modrm':None}
#SSE_ISA = (
class addpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class addps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class addsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class addss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x58], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class addsubpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD0], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD0], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 3
class addsubps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD0], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem128, {'opcode':[0x0F, 0xD0], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 3
class andnpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x55], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x55], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class andnps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x55], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x55], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class andpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x54], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x54], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class andps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x54], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x54], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class blendpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x0D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class blendps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x0C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class blendvpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x15], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x15], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class blendvps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x14], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x14], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class cmpeqpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0x66]}))
arch_ext = 2
class cmpeqps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[]}))
arch_ext = 1
class cmpeqsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0xF2]}))
arch_ext = 2
class cmpeqss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':0, 'prefix':[0xF3]}))
arch_ext = 1
class cmplepd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0x66]}))
arch_ext = 2
class cmpleps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[]}))
arch_ext = 1
class cmplesd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0xF2]}))
arch_ext = 2
class cmpless(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':2, 'prefix':[0xF3]}))
arch_ext = 1
class cmpltpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0x66]}))
arch_ext = 2
class cmpltps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[]}))
arch_ext = 1
class cmpltsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0xF2]}))
arch_ext = 2
class cmpltss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':1, 'prefix':[0xF3]}))
arch_ext = 1
class cmpneqpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0x66]}))
arch_ext = 2
class cmpneqps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[]}))
arch_ext = 1
class cmpneqsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0xF2]}))
arch_ext = 2
class cmpneqss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':4, 'prefix':[0xF3]}))
arch_ext = 1
class cmpnlepd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0x66]}))
arch_ext = 2
class cmpnleps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[]}))
arch_ext = 1
class cmpnlesd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0xF2]}))
arch_ext = 2
class cmpnless(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':6, 'prefix':[0xF3]}))
arch_ext = 1
class cmpnltpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0x66]}))
arch_ext = 2
class cmpnltps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[]}))
arch_ext = 1
class cmpnltsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0xF2]}))
arch_ext = 2
class cmpnltss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':5, 'prefix':[0xF3]}))
arch_ext = 1
class cmpordpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0x66]}))
arch_ext = 2
class cmpordps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[]}))
arch_ext = 1
class cmpordsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0xF2]}))
arch_ext = 2
class cmpordss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':7, 'prefix':[0xF3]}))
arch_ext = 1
class cmpunordpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0x66]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0x66]}))
arch_ext = 2
class cmpunordps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[]}),
(xmm_mem128_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[]}))
arch_ext = 1
class cmpunordsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0xF2]}),
(xmm_mem64_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0xF2]}))
arch_ext = 2
class cmpunordss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0xF3]}),
(xmm_mem32_imm, {'opcode':[0x0F, 0xC2], 'modrm':None, 'imm':3, 'prefix':[0xF3]}))
arch_ext = 1
class cmppd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cmpps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class cmpsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cmpss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32_imm8, {'opcode':[0x0F, 0xC2], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class comisd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x2F], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x2F], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class comiss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x2F], 'modrm':None, 'prefix':[]}),
(xmm_mem64, {'opcode':[0x0F, 0x2F], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class cvtdq2pd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem64, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class cvtdq2ps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[]}),
(xmm_mem64, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[]}))
arch_ext = 2
class cvtpd2dq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem128, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvtpd2pi(DispatchInstruction):
dispatch = (
(mmx_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0x66]}),
(mmx_mem128, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvtpd2ps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvtpi2pd(DispatchInstruction):
dispatch = (
(xmm_mmx, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvtpi2ps(DispatchInstruction):
dispatch = (
(xmm_mmx, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[]}),
(xmm_mem64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[]}))
arch_ext = 2
class cvtps2dq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvtps2pd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[]}),
(xmm_mem64, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[]}))
arch_ext = 2
class cvtps2pi(DispatchInstruction):
dispatch = (
(mmx_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[]}),
(mmx_mem64, {'opcode':[0x0F, 0x2D], 'modrm':None}))
arch_ext = 2
class cvtsd2si(DispatchInstruction):
dispatch = (
# TODO - reg64 version defined by intel manuals but not AMD
#(reg64_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF2]}),
#(reg64_mem64, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF2]}),
(reg32_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF2]}),
(reg32_mem64, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvtsd2ss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvtsi2sd(DispatchInstruction):
dispatch = (
(xmm_reg64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_reg32, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem32, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvtsi2sd(DispatchInstruction):
dispatch = (
# TODO - reg64 version defined by intel manuals but not AMD
#(xmm_reg64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
#(xmm_mem64, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_reg32, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem32, {'opcode':[0x0F, 0x2A], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvtss2sd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x5A], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class cvtss2si(DispatchInstruction):
dispatch = (
# TODO - reg64 version defined by intel manuals but not AMD
#(reg64_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF3]}),
#(reg64_mem32, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF3]}),
(reg32_xmm, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF3]}),
(reg32_mem32, {'opcode':[0x0F, 0x2D], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class cvttpd2dq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE6], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvttpd2pi(DispatchInstruction):
dispatch = (
(mmx_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0x66]}),
(mmx_mem128, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class cvttps2dq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem128, {'opcode':[0x0F, 0x5B], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class cvttps2pi(DispatchInstruction):
dispatch = (
(mmx_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[]}),
(mmx_mem64, {'opcode':[0x0F, 0x2C], 'modrm':None}))
arch_ext = 2
class cvttsd2si(DispatchInstruction):
dispatch = (
# TODO - reg64 version defined by intel manuals but not AMD
#(reg64_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF2]}),
#(reg64_mem64, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF2]}),
(reg32_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF2]}),
(reg32_mem64, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class cvttss2si(DispatchInstruction):
dispatch = (
# TODO - reg64 version defined by intel manuals but not AMD
#(reg64_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF3]}),
#(reg64_mem32, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF3]}),
(reg32_xmm, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF3]}),
(reg32_mem32, {'opcode':[0x0F, 0x2C], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class divpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class divps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class divsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class divss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x5E], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class dppd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x41], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x41], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class dpps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x40], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x40], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class emms(Instruction):
machine_inst = no_op
params = {'opcode':[0x0F, 0x77],'modrm':None}
arch_ext = 0
class extractps(DispatchInstruction):
dispatch = (
(reg64_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x17], 'modrm':None, 'prefix':[0x66]}),
(reg32_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x17], 'modrm':None, 'prefix':[0x66]}),
(mem32_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x17], 'modrm':None, 'prefix':[0x66]}))
# TODO - ugh, this make the printer not emit 'dword' for the mem32 case
#arch_ext = 4
class extrq(DispatchInstruction):
dispatch = (
(xmm_imm8_imm8, {'opcode':[0x0F, 0x78], 'modrm':0x00, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0x79], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class haddpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x7C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x7C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 3
class haddps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x7C], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem128, {'opcode':[0x0F, 0x7C], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 3
class hsubpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x7D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x7D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 3
class hsubps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x7D], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem128, {'opcode':[0x0F, 0x7D], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 3
class insertps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x21], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32_imm8, {'opcode':[0x0F, 0x3A, 0x21], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class insertq(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8_imm8, {'opcode':[0x0F, 0x78], 'modrm':None, 'prefix':[0xF2]}),
(xmm_xmm, {'opcode':[0x0F, 0x79], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 4
class lddqu(Instruction):
machine_inst = xmm_mem128
params = {'opcode':[0x0F, 0xF0],'modrm':None, 'prefix':[0xF2]}
arch_ext = 3
class ldmxcsr(Instruction):
machine_inst = mem32
params = {'opcode':[0x0F, 0xAE],'modrm':0x10}
arch_ext = 1
class maskmovdqu(Instruction):
machine_inst = xmm_xmm
params = {'opcode':[0x0F, 0xF7],'modrm':None, 'prefix':[0x66]}
arch_ext = 2
class maskmovq(Instruction):
machine_inst = mmx_mmx
params = {'opcode':[0x0F, 0xF7],'modrm':None}
arch_ext = 1
class maxpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class maxps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class maxsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class maxss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x5F], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class minpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class minps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class minsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class minss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x5D], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class movapd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x28], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x28], 'modrm':None, 'prefix':[0x66]}),
(mem128_xmm, {'opcode':[0x0F, 0x29], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class movaps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x28], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x28], 'modrm':None, 'prefix':[]}),
(mem128_xmm, {'opcode':[0x0F, 0x29], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class movd(DispatchInstruction):
dispatch = (
# TODO - these are valid according to AMD64, but not according to Intel 64
#(xmm_reg64, {'opcode':[0x0F, 0x6E], 'modrm':None, 'prefix':[0x66]}),
#(xmm_mem64, {'opcode':[0x0F, 0x6E], 'modrm':None, 'prefix':[0x66]}),
#(mem64_xmm, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0x66]}),
#(reg64_xmm_rev, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0x66]}),
#(mmx_reg64, {'opcode':[0x0F, 0x6E], 'modrm':None}),
#(mmx_mem64, {'opcode':[0x0F, 0x6E], 'modrm':None}),
#(mem64_mmx, {'opcode':[0x0F, 0x7E], 'modrm':None}),
#(reg64_mmx_rev, {'opcode':[0x0F, 0x7E], 'modrm':None}),
(xmm_reg32, {'opcode':[0x0F, 0x6E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32, {'opcode':[0x0F, 0x6E], 'modrm':None, 'prefix':[0x66]}),
(mem32_xmm, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0x66]}),
(reg32_xmm_rev, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0x66]}),
(mmx_reg32, {'opcode':[0x0F, 0x6E], 'modrm':None}),
(mmx_mem32, {'opcode':[0x0F, 0x6E], 'modrm':None}),
(mem32_mmx, {'opcode':[0x0F, 0x7E], 'modrm':None}),
(reg32_mmx_rev, {'opcode':[0x0F, 0x7E], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class movddup(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 3
class movdq2q(Instruction):
machine_inst = mmx_xmm
params = {'opcode':[0x0F, 0xD6],'modrm':None, 'prefix':[0xF2]}
arch_ext = 2
class movdqa(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6F], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x6F], 'modrm':None, 'prefix':[0x66]}),
(mem128_xmm, {'opcode':[0x0F, 0x7F], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class movdqu(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6F], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem128, {'opcode':[0x0F, 0x6F], 'modrm':None, 'prefix':[0xF3]}),
(mem128_xmm, {'opcode':[0x0F, 0x7F], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class movhlps(Instruction):
machine_inst = xmm_xmm
params = {'opcode':[0x0F, 0x12],'modrm':None, 'prefix':[]}
arch_ext = 1
class movhpd(DispatchInstruction):
dispatch = (
(xmm_mem64, {'opcode':[0x0F, 0x16], 'modrm':None, 'prefix':[0x66]}),
(mem64_xmm, {'opcode':[0x0F, 0x17], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class movhps(DispatchInstruction):
dispatch = (
(xmm_mem64, {'opcode':[0x0F, 0x16], 'modrm':None, 'prefix':[]}),
(mem64_xmm, {'opcode':[0x0F, 0x17], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class movlhps(Instruction):
machine_inst = xmm_xmm
params = {'opcode':[0x0F, 0x16], 'modrm':None, 'prefix':[]}
arch_ext = 1
class movlpd(DispatchInstruction):
dispatch = (
(xmm_mem64, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[0x66]}),
(mem64_xmm, {'opcode':[0x0F, 0x13], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class movlps(DispatchInstruction):
dispatch = (
(xmm_mem64, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[]}),
(mem64_xmm, {'opcode':[0x0F, 0x13], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class movmskpd(Instruction):
machine_inst = reg32_xmm
params = {'opcode':[0x0F, 0x50], 'modrm':None, 'prefix':[0x66]}
arch_ext = 2
class movmskps(Instruction):
machine_inst = reg32_xmm
params = {'opcode':[0x0F, 0x50], 'modrm':None, 'prefix':[]}
arch_ext = 2
class movntdq(Instruction):
machine_inst = mem128_xmm
params = {'opcode':[0x0F, 0xE7], 'modrm':None, 'prefix':[0x66]}
arch_ext = 2
class movntdqa(Instruction):
machine_inst = xmm_mem128
params = {'opcode':[0x0F, 0x38, 0x2A], 'modrm':None, 'prefix':[0x66]}
arch_ext = 4
class movntpd(Instruction):
machine_inst = mem128_xmm
params = {'opcode':[0x0F, 0x2B], 'modrm':None, 'prefix':[0x66]}
arch_ext = 2
class movntps(Instruction):
machine_inst = mem128_xmm
params = {'opcode':[0x0F, 0x2B], 'modrm':None, 'prefix':[]}
arch_ext = 2
class movntq(Instruction):
machine_inst = mem64_mmx
params = {'opcode':[0x0F, 0xE7], 'modrm':None, 'prefix':[]}
arch_ext = 1
class movntsd(Instruction):
machine_inst = mem64_xmm
params = {'opcode':[0x0F, 0x2B], 'modrm':None, 'prefix':[0xF2]}
arch_ext = 4
class movntss(Instruction):
machine_inst = mem32_xmm
params = {'opcode':[0x0F, 0x2B], 'modrm':None, 'prefix':[0xF3]}
arch_ext = 4
class movq(DispatchInstruction):
dispatch = (
# TODO - first 4 are defined by Intel 64 but not AMD64
(xmm_reg64, {'opcode':[0x0F, 0x6E], 'modrm':None, 'prefix':[0x66]}),
(reg64_xmm_rev, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0x66]}),
(mmx_reg64, {'opcode':[0x0F, 0x6E], 'modrm':None}),
(reg64_mmx_rev, {'opcode':[0x0F, 0x7E], 'modrm':None}),
(xmm_xmm, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem64, {'opcode':[0x0F, 0x7E], 'modrm':None, 'prefix':[0xF3]}),
(mem64_xmm, {'opcode':[0x0F, 0xD6], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x6F], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x6F], 'modrm':None}),
(mem64_mmx, {'opcode':[0x0F, 0x7F], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class movq2dq(Instruction):
machine_inst = xmm_mmx
params = {'opcode':[0x0F, 0xD6], 'modrm':None, 'prefix':[0xF3]}
arch_ext = 2
class movsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0xF2]}),
(mem64_xmm, {'opcode':[0x0F, 0x11], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class movshdup(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x16], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem128, {'opcode':[0x0F, 0x16], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 3
class movsldup(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem128, {'opcode':[0x0F, 0x12], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 3
class movss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0xF3]}),
(mem32_xmm, {'opcode':[0x0F, 0x11], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class movupd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[0x66]}),
(mem128_xmm, {'opcode':[0x0F, 0x11], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class movups(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x10], 'modrm':None, 'prefix':[]}),
(mem128_xmm, {'opcode':[0x0F, 0x11], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class mpsadbw(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x42], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x42], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class mulpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class mulps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class mulsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class mulss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x59], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class orpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x56], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x56], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class orps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x56], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x56], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class pabsb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x1C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x1C], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x1C], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x1C], 'modrm':None}))
arch_ext = 3
class pabsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x1E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x1E], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x1E], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x1E], 'modrm':None}))
arch_ext = 3
class pabsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x1D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x1D], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x1D], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x1D], 'modrm':None}))
arch_ext = 3
class packssdw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x6B], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x6B], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x6B], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class packsswb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x63], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x63], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x63], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x63], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class packusdw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x2B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x2B], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class packuswb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x67], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x67], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x67], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x67], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xFC], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xFC], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xFC], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xFC], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xFE], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xFE], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xFE], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xFE], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD4], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD4], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xD4], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xD4], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddsb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xEC], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xEC], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xEC], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xEC], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xED], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xED], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xED], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xED], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class paddusb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDC], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDC], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDC], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDC], 'modrm':None}))
arch_ext = 0
class paddusw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDD], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDD], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDD], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDD], 'modrm':None}))
arch_ext = 0
class paddw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xFD], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xFD], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xFD], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xFD], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class palignr(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0F], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8,{'opcode':[0x0F, 0x3A, 0x0F], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx_imm8, {'opcode':[0x0F, 0x3A, 0x0F], 'modrm':None}),
(mmx_mem64_imm8, {'opcode':[0x0F, 0x3A, 0x0F], 'modrm':None}))
arch_ext = 3
class pand(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDB], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDB], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDB], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDB], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pandn(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDF], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDF], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDF], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDF], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pavgb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE0], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE0], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE0], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE0], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pavgw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE3], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE3], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE3], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE3], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pblendvb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x10], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x10], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pblendw(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x0E], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpeqb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x74], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x74], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x74], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x74], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpeqd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x76], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x76], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x76], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x76], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpeqq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x29], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x29], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpeqw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x75], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x75], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x75], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x75], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpestri(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x61], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x61], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpestrm(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x60], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x60], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpgtb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x64], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x64], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x64], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x64], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpgtd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x66], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x66], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x66], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x66], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpgtw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x65], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x65], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x65], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x65], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pcmpgtq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x37], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x37], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpistri(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x63], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x63], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pcmpistrm(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x62], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x62], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pextrb(DispatchInstruction):
dispatch = (
(reg64_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x14], 'modrm':None, 'prefix':[0x66]}),
(reg32_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x14], 'modrm':None, 'prefix':[0x66]}),
(mem8_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x14], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pextrd(DispatchInstruction):
dispatch = (
(reg32_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x16], 'modrm':None, 'prefix':[0x66]}),
(mem32_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x16], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pextrq(DispatchInstruction):
dispatch = (
(reg64_xmm_imm8_rev, {'opcode':[0x0F, 0x3A, 0x16], 'modrm':None, 'prefix':[0x66]}),
(mem64_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x16], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pextrw(DispatchInstruction):
dispatch = (
(reg64_xmm_imm8, {'opcode':[0x0F, 0xC5], 'modrm':None, 'prefix':[0x66]}),
(reg32_xmm_imm8, {'opcode':[0x0F, 0xC5], 'modrm':None, 'prefix':[0x66]}),
(mem16_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x15], 'modrm':None, 'prefix':[0x66]}),
(reg64_mmx_imm8, {'opcode':[0x0F, 0xC5], 'modrm':None}),
(reg32_mmx_imm8, {'opcode':[0x0F, 0xC5], 'modrm':None}))
arch_ext = 1
class phaddsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x03], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x03], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x03], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x03], 'modrm':None}))
arch_ext = 3
class phaddw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x01], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x01], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x01], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x01], 'modrm':None}))
arch_ext = 3
class phaddd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x02], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x02], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x02], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x02], 'modrm':None}))
arch_ext = 3
class phminposuw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x41], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x41], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class phsubsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x07], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x07], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x07], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x07], 'modrm':None}))
arch_ext = 3
class phsubw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x05], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x05], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x05], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x05], 'modrm':None}))
arch_ext = 3
class phsubd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x06], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x06], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x06], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x06], 'modrm':None}))
arch_ext = 3
class pinsrb(DispatchInstruction):
dispatch = (
(xmm_reg32_imm8, {'opcode':[0x0F, 0x3A, 0x20], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem8_imm8, {'opcode':[0x0F, 0x3A, 0x20], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pinsrd(DispatchInstruction):
dispatch = (
(xmm_reg32_imm8, {'opcode':[0x0F, 0x3A, 0x22], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32_imm8, {'opcode':[0x0F, 0x3A, 0x22], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pinsrq(DispatchInstruction):
dispatch = (
(xmm_reg64_imm8, {'opcode':[0x0F, 0x3A, 0x22], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64_imm8, {'opcode':[0x0F, 0x3A, 0x22], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pinsrw(DispatchInstruction):
dispatch = (
(xmm_reg32_imm8, {'opcode':[0x0F, 0xC4], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem16_imm8, {'opcode':[0x0F, 0xC4], 'modrm':None, 'prefix':[0x66]}),
(mmx_reg32_imm8, {'opcode':[0x0F, 0xC4], 'modrm':None}),
(mmx_mem16_imm8, {'opcode':[0x0F, 0xC4], 'modrm':None}))
arch_ext = 1
class pmaddubsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x04], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x04], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x04], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x04], 'modrm':None}))
arch_ext = 3
class pmaddwd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xF5], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF5], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xF5], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xF5], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pmaxsb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmaxsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmaxsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xEE], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xEE], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xEE], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xEE], 'modrm':None}))
arch_ext = 1
class pmaxub(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDE], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDE], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDE], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDE], 'modrm':None}))
arch_ext = 1
class pmaxud(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3F], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3F], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmaxuw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3E], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pminsb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x38], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x38], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pminsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x39], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x39], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pminsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xEA], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xEA], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xEA], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xEA], 'modrm':None}))
arch_ext = 1
class pminub(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xDA], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xDA], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xDA], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xDA], 'modrm':None}))
arch_ext = 1
class pminud(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3B], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pminuw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x3A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x3A], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovmskb(DispatchInstruction):
dispatch = (
# TODO - undocumented reg64 forms?
#(reg64_xmm, {'opcode':[0x0F, 0xD7], 'modrm':None, 'prefix':[0x66]}),
#(reg64_mmx, {'opcode':[0x0F, 0xD7], 'modrm':None}),
(reg32_xmm, {'opcode':[0x0F, 0xD7], 'modrm':None, 'prefix':[0x66]}),
(reg32_mmx, {'opcode':[0x0F, 0xD7], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 1
class pmovsxbw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x20], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x20], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovsxbd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x21], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32, {'opcode':[0x0F, 0x38, 0x21], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovsxbq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x22], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem16, {'opcode':[0x0F, 0x38, 0x22], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovsxwd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x23], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x23], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovsxwq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x24], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32, {'opcode':[0x0F, 0x38, 0x24], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovsxdq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x25], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x25], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxbw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x30], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x30], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxbd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x31], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32, {'opcode':[0x0F, 0x38, 0x31], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxbq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x32], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem16, {'opcode':[0x0F, 0x38, 0x32], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxwd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x33], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x33], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxwq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x34], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem32, {'opcode':[0x0F, 0x38, 0x34], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmovzxdq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x35], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x38, 0x35], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmuldq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x28], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x28], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmulhrsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x0B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x0B], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x0B], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x0B], 'modrm':None}))
arch_ext = 3
class pmulhuw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE4], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE4], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE4], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE4], 'modrm':None}))
arch_ext = 1
class pmulhw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE5], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE5], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE5], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE5], 'modrm':None}))
arch_ext = 1
class pmulld(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x40], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x40], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class pmullw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD5], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD5], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xD5], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xD5], 'modrm':None}))
arch_ext = 2 # and 0
class pmuludq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xF4], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF4], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xF4], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xF4], 'modrm':None}))
arch_ext = 2
class por(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xEB], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xEB], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xEB], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xEB], 'modrm':None}))
arch_ext = 2 # and 0
class psadbw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xF6], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF6], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xF6], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xF6], 'modrm':None}))
arch_ext = 1
class pshufb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x00], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x00], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x00], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x00], 'modrm':None}))
arch_ext = 3
class pshufd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class pshufhw(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class pshuflw(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x70], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class pshufw(DispatchInstruction):
dispatch = (
(mmx_mmx_imm8, {'opcode':[0x0F, 0x70], 'modrm':None}),
(mmx_mem64_imm8, {'opcode':[0x0F, 0x70], 'modrm':None}))
arch_ext = 1
class psignb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x08], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x08], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x08], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x08], 'modrm':None}))
arch_ext = 3
class psignd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x0A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x0A], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x0A], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x0A], 'modrm':None}))
arch_ext = 3
class psignw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x09], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x09], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x38, 0x09], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x38, 0x09], 'modrm':None}))
arch_ext = 3
class pslld(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x30, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xF2], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF2], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x30}),
(mmx_mmx, {'opcode':[0x0F, 0xF2], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xF2], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class pslldq(Instruction):
machine_inst = xmm_imm8
params = {'opcode':[0x0F, 0x73], 'modrm':0x38, 'prefix':[0x66]}
arch_ext = 1
class psllq(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x73], 'modrm':0x30, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xF3], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF3], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x73], 'modrm':0x30}),
(mmx_mmx, {'opcode':[0x0F, 0xF3], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xF3], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psllw(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x30, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xF1], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF1], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x30}),
(mmx_mmx, {'opcode':[0x0F, 0xF1], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xF1], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psrad(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x20, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xE2], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE2], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x20}),
(mmx_mmx, {'opcode':[0x0F, 0xE2], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xE2], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psraw(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x20, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xE1], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE1], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x20}),
(mmx_mmx, {'opcode':[0x0F, 0xE1], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xE1], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psrld(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x10, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xD2], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD2], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x72], 'modrm':0x10}),
(mmx_mmx, {'opcode':[0x0F, 0xD2], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xD2], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psrldq(Instruction):
machine_inst = xmm_imm8
params = {'opcode':[0x0F, 0x73], 'modrm':0x18, 'prefix':[0x66]}
arch_ext = 1
class psrlq(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x73], 'modrm':0x10, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xD3], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD3], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x73], 'modrm':0x10}),
(mmx_mmx, {'opcode':[0x0F, 0xD3], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xD3], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psrlw(DispatchInstruction):
dispatch = (
(xmm_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x10, 'prefix':[0x66]}),
(xmm_xmm, {'opcode':[0x0F, 0xD1], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD1], 'modrm':None, 'prefix':[0x66]}),
(mmx_imm8, {'opcode':[0x0F, 0x71], 'modrm':0x10}),
(mmx_mmx, {'opcode':[0x0F, 0xD1], 'modrm':None}),
(mmx_mem128, {'opcode':[0x0F, 0xD1], 'modrm':None}))
arch_ext = 2 # and 0 and 1
class psubb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xF8], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF8], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xF8], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xF8], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class psubd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xFA], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xFA], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xFA], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xFA], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class psubq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xFB], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xFB], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xFB], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xFB], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class psubsb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE8], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE8], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE8], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE8], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class psubsw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xE9], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xE9], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xE9], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xE9], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class psubusb(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD8], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD8], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xD8], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xD8], 'modrm':None}))
arch_ext = 0
class psubusw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xD9], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xD9], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xD9], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xD9], 'modrm':None}))
arch_ext = 0
class psubw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xF9], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xF9], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xF9], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xF9], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class ptest(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x38, 0x17], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x38, 0x17], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class punpckhbw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x68], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x68], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x68], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x68], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class punpckhdq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x6A], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x6A], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x6A], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class punpckhqdq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6D], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x6D], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class punpckhwd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x69], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x69], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x69], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x69], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class punpcklbw(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x60], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x60], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x60], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x60], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class punpckldq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x62], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x62], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x62], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x62], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class punpcklqdq(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x6C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x6C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class punpcklwd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x61], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x61], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0x61], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0x61], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class pxor(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0xEF], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0xEF], 'modrm':None, 'prefix':[0x66]}),
(mmx_mmx, {'opcode':[0x0F, 0xEF], 'modrm':None}),
(mmx_mem64, {'opcode':[0x0F, 0xEF], 'modrm':None}))
arch_ext = 2 # TODO - err, some are 2, some are 0
class rcpps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x53], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x53], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class rcpss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x53], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x53], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class roundpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x09], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x09], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class roundps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x08], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x08], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class roundsd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0B], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64_imm8, {'opcode':[0x0F, 0x3A, 0x0B], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class roundss(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0x3A, 0x0A], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0x3A, 0x0A], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 4
class rsqrtps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x52], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x52], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class rsqrtss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x52], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x52], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 2
class shufpd(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC6], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0xC6], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class shufps(DispatchInstruction):
dispatch = (
(xmm_xmm_imm8, {'opcode':[0x0F, 0xC6], 'modrm':None, 'prefix':[]}),
(xmm_mem128_imm8, {'opcode':[0x0F, 0xC6], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class sqrtpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class sqrtps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class sqrtsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class sqrtss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32 , {'opcode':[0x0F, 0x51], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class stmxcsr(Instruction):
machine_inst = mem32
params = {'opcode':[0x0F, 0xAE], 'modrm':0x18}
arch_ext = 1
class subpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class subps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class subsd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0xF2]}),
(xmm_mem64, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0xF2]}))
arch_ext = 2
class subss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0xF3]}),
(xmm_mem32, {'opcode':[0x0F, 0x5C], 'modrm':None, 'prefix':[0xF3]}))
arch_ext = 1
class ucomisd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x2E], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem64, {'opcode':[0x0F, 0x2E], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class ucomiss(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x2E], 'modrm':None, 'prefix':[]}),
(xmm_mem32, {'opcode':[0x0F, 0x2E], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class unpckhpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x15], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x15], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 1
class unpckhps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x15], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x15], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class unpcklpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x14], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x14], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 1
class unpcklps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x14], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x14], 'modrm':None, 'prefix':[]}))
arch_ext = 1
class xorpd(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x57], 'modrm':None, 'prefix':[0x66]}),
(xmm_mem128, {'opcode':[0x0F, 0x57], 'modrm':None, 'prefix':[0x66]}))
arch_ext = 2
class xorps(DispatchInstruction):
dispatch = (
(xmm_xmm, {'opcode':[0x0F, 0x57], 'modrm':None, 'prefix':[]}),
(xmm_mem128, {'opcode':[0x0F, 0x57], 'modrm':None, 'prefix':[]}))
arch_ext = 1
|
liberalcoin/liberalcoin
|
refs/heads/master
|
share/qt/extract_strings_qt.py
|
2945
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
crazy-canux/xplugin_nagios
|
refs/heads/master
|
plugin/plugins/db2/src/check_db2_database_log.py
|
1
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (C) Canux CHENG <canuxcheng@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import db2
plugin = db2.CheckDatabaseLog(version=db2.VERSION,
description="check DB2 Database log usage").run()
|
yanatan16/pycodejam
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup, find_packages
import sys
setup(
name = "pycodejam",
version = "1.2.0",
packages = find_packages('src', exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_dir = { '': 'src' },
test_suite = 'codejam.tests',
# metadata for upload to PyPI
author = "Jon Eisen",
author_email = "jon.m.eisen@gmail.com",
description = "This module provides helpers to run and parse CodeJam problems",
url = "http://github.com/yanatan16/pycodejam",
license = "MIT",
keywords = "google code jam codejam competition problem",
zip_safe = True
)
|
sertac/django
|
refs/heads/master
|
tests/custom_migration_operations/operations.py
|
518
|
from django.db.migrations.operations.base import Operation
class TestOperation(Operation):
def __init__(self):
pass
def deconstruct(self):
return (
self.__class__.__name__,
[],
{}
)
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
class CreateModel(TestOperation):
pass
class ArgsOperation(TestOperation):
def __init__(self, arg1, arg2):
self.arg1, self.arg2 = arg1, arg2
def deconstruct(self):
return (
self.__class__.__name__,
[self.arg1, self.arg2],
{}
)
class KwargsOperation(TestOperation):
def __init__(self, kwarg1=None, kwarg2=None):
self.kwarg1, self.kwarg2 = kwarg1, kwarg2
def deconstruct(self):
kwargs = {}
if self.kwarg1 is not None:
kwargs['kwarg1'] = self.kwarg1
if self.kwarg2 is not None:
kwargs['kwarg2'] = self.kwarg2
return (
self.__class__.__name__,
[],
kwargs
)
class ArgsKwargsOperation(TestOperation):
def __init__(self, arg1, arg2, kwarg1=None, kwarg2=None):
self.arg1, self.arg2 = arg1, arg2
self.kwarg1, self.kwarg2 = kwarg1, kwarg2
def deconstruct(self):
kwargs = {}
if self.kwarg1 is not None:
kwargs['kwarg1'] = self.kwarg1
if self.kwarg2 is not None:
kwargs['kwarg2'] = self.kwarg2
return (
self.__class__.__name__,
[self.arg1, self.arg2],
kwargs,
)
class ExpandArgsOperation(TestOperation):
serialization_expand_args = ['arg']
def __init__(self, arg):
self.arg = arg
def deconstruct(self):
return (
self.__class__.__name__,
[self.arg],
{}
)
|
TileHalo/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/manifestupdate.py
|
38
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import urlparse
from collections import namedtuple, defaultdict
from wptmanifest.node import (DataNode, ConditionalNode, BinaryExpressionNode,
BinaryOperatorNode, VariableNode, StringNode, NumberNode,
UnaryExpressionNode, UnaryOperatorNode, KeyValueNode)
from wptmanifest.backends import conditional
from wptmanifest.backends.conditional import ManifestItem
import expected
"""Manifest structure used to update the expected results of a test
Each manifest file is represented by an ExpectedManifest that has one
or more TestNode children, one per test in the manifest. Each
TestNode has zero or more SubtestNode children, one for each known
subtest of the test.
In these representations, conditionals expressions in the manifest are
not evaluated upfront but stored as python functions to be evaluated
at runtime.
When a result for a test is to be updated set_result on the
[Sub]TestNode is called to store the new result, alongside the
existing conditional that result's run info matched, if any. Once all
new results are known, coalesce_expected is called to compute the new
set of results and conditionals. The AST of the underlying parsed manifest
is updated with the changes, and the result is serialised to a file.
"""
Result = namedtuple("Result", ["run_info", "status"])
def data_cls_getter(output_node, visited_node):
# visited_node is intentionally unused
if output_node is None:
return ExpectedManifest
elif isinstance(output_node, ExpectedManifest):
return TestNode
elif isinstance(output_node, TestNode):
return SubtestNode
else:
raise ValueError
class ExpectedManifest(ManifestItem):
def __init__(self, node, test_path=None, url_base=None):
"""Object representing all the tests in a particular manifest
:param node: AST Node associated with this object. If this is None,
a new AST is created to associate with this manifest.
:param test_path: Path of the test file associated with this manifest.
:param url_base: Base url for serving the tests in this manifest
"""
if node is None:
node = DataNode(None)
ManifestItem.__init__(self, node)
self.child_map = {}
self.test_path = test_path
self.url_base = url_base
assert self.url_base is not None
self.modified = False
def append(self, child):
ManifestItem.append(self, child)
if child.id in self.child_map:
print "Warning: Duplicate heading %s" % child.id
self.child_map[child.id] = child
def _remove_child(self, child):
del self.child_map[child.id]
ManifestItem._remove_child(self, child)
def get_test(self, test_id):
"""Return a TestNode by test id, or None if no test matches
:param test_id: The id of the test to look up"""
return self.child_map[test_id]
def has_test(self, test_id):
"""Boolean indicating whether the current test has a known child test
with id test id
:param test_id: The id of the test to look up"""
return test_id in self.child_map
@property
def url(self):
return urlparse.urljoin(self.url_base,
"/".join(self.test_path.split(os.path.sep)))
class TestNode(ManifestItem):
def __init__(self, node):
"""Tree node associated with a particular test in a manifest
:param node: AST node associated with the test"""
ManifestItem.__init__(self, node)
self.updated_expected = []
self.new_expected = []
self.subtests = {}
self.default_status = None
self._from_file = True
@classmethod
def create(cls, test_type, test_id):
"""Create a TestNode corresponding to a given test
:param test_type: The type of the test
:param test_id: The id of the test"""
url = test_id
name = url.split("/")[-1]
node = DataNode(name)
self = cls(node)
self.set("type", test_type)
self._from_file = False
return self
@property
def is_empty(self):
required_keys = set(["type"])
if set(self._data.keys()) != required_keys:
return False
return all(child.is_empty for child in self.children)
@property
def test_type(self):
"""The type of the test represented by this TestNode"""
return self.get("type", None)
@property
def id(self):
"""The id of the test represented by this TestNode"""
return urlparse.urljoin(self.parent.url, self.name)
def disabled(self, run_info):
"""Boolean indicating whether this test is disabled when run in an
environment with the given run_info
:param run_info: Dictionary of run_info parameters"""
return self.get("disabled", run_info) is not None
def set_result(self, run_info, result):
"""Set the result of the test in a particular run
:param run_info: Dictionary of run_info parameters corresponding
to this run
:param result: Status of the test in this run"""
if self.default_status is not None:
assert self.default_status == result.default_expected
else:
self.default_status = result.default_expected
# Add this result to the list of results satisfying
# any condition in the list of updated results it matches
for (cond, values) in self.updated_expected:
if cond(run_info):
values.append(Result(run_info, result.status))
if result.status != cond.value:
self.root.modified = True
break
else:
# We didn't find a previous value for this
self.new_expected.append(Result(run_info, result.status))
self.root.modified = True
def coalesce_expected(self):
"""Update the underlying manifest AST for this test based on all the
added results.
This will update existing conditionals if they got the same result in
all matching runs in the updated results, will delete existing conditionals
that get more than one different result in the updated run, and add new
conditionals for anything that doesn't match an existing conditional.
Conditionals not matched by any added result are not changed."""
final_conditionals = []
try:
unconditional_status = self.get("expected")
except KeyError:
unconditional_status = self.default_status
for conditional_value, results in self.updated_expected:
if not results:
# The conditional didn't match anything in these runs so leave it alone
final_conditionals.append(conditional_value)
elif all(results[0].status == result.status for result in results):
# All the new values for this conditional matched, so update the node
result = results[0]
if (result.status == unconditional_status and
conditional_value.condition_node is not None):
self.remove_value("expected", conditional_value)
else:
conditional_value.value = result.status
final_conditionals.append(conditional_value)
elif conditional_value.condition_node is not None:
# Blow away the existing condition and rebuild from scratch
# This isn't sure to work if we have a conditional later that matches
# these values too, but we can hope, verify that we get the results
# we expect, and if not let a human sort it out
self.remove_value("expected", conditional_value)
self.new_expected.extend(results)
elif conditional_value.condition_node is None:
self.new_expected.extend(result for result in results
if result.status != unconditional_status)
# It is an invariant that nothing in new_expected matches an existing
# condition except for the default condition
if self.new_expected:
if all(self.new_expected[0].status == result.status
for result in self.new_expected) and not self.updated_expected:
status = self.new_expected[0].status
if status != self.default_status:
self.set("expected", status, condition=None)
final_conditionals.append(self._data["expected"][-1])
else:
for conditional_node, status in group_conditionals(self.new_expected):
if status != unconditional_status:
self.set("expected", status, condition=conditional_node.children[0])
final_conditionals.append(self._data["expected"][-1])
if ("expected" in self._data and
len(self._data["expected"]) > 0 and
self._data["expected"][-1].condition_node is None and
self._data["expected"][-1].value == self.default_status):
self.remove_value("expected", self._data["expected"][-1])
if ("expected" in self._data and
len(self._data["expected"]) == 0):
for child in self.node.children:
if (isinstance(child, KeyValueNode) and
child.data == "expected"):
child.remove()
break
def _add_key_value(self, node, values):
ManifestItem._add_key_value(self, node, values)
if node.data == "expected":
self.updated_expected = []
for value in values:
self.updated_expected.append((value, []))
def clear_expected(self):
"""Clear all the expected data for this test and all of its subtests"""
self.updated_expected = []
if "expected" in self._data:
for child in self.node.children:
if (isinstance(child, KeyValueNode) and
child.data == "expected"):
child.remove()
del self._data["expected"]
break
for subtest in self.subtests.itervalues():
subtest.clear_expected()
def append(self, node):
child = ManifestItem.append(self, node)
self.subtests[child.name] = child
def get_subtest(self, name):
"""Return a SubtestNode corresponding to a particular subtest of
the current test, creating a new one if no subtest with that name
already exists.
:param name: Name of the subtest"""
if name in self.subtests:
return self.subtests[name]
else:
subtest = SubtestNode.create(name)
self.append(subtest)
return subtest
class SubtestNode(TestNode):
def __init__(self, node):
assert isinstance(node, DataNode)
TestNode.__init__(self, node)
@classmethod
def create(cls, name):
node = DataNode(name)
self = cls(node)
return self
@property
def is_empty(self):
if self._data:
return False
return True
def group_conditionals(values):
"""Given a list of Result objects, return a list of
(conditional_node, status) pairs representing the conditional
expressions that are required to match each status
:param values: List of Results"""
by_property = defaultdict(set)
for run_info, status in values:
for prop_name, prop_value in run_info.iteritems():
by_property[(prop_name, prop_value)].add(status)
# If we have more than one value, remove any properties that are common
# for all the values
if len(values) > 1:
for key, statuses in by_property.copy().iteritems():
if len(statuses) == len(values):
del by_property[key]
properties = set(item[0] for item in by_property.iterkeys())
prop_order = ["debug", "e10s", "os", "version", "processor", "bits"]
include_props = []
for prop in prop_order:
if prop in properties:
include_props.append(prop)
conditions = {}
for run_info, status in values:
prop_set = tuple((prop, run_info[prop]) for prop in include_props)
if prop_set in conditions:
continue
expr = make_expr(prop_set, status)
conditions[prop_set] = (expr, status)
return conditions.values()
def make_expr(prop_set, status):
"""Create an AST that returns the value ``status`` given all the
properties in prop_set match."""
root = ConditionalNode()
assert len(prop_set) > 0
no_value_props = set(["debug", "e10s"])
expressions = []
for prop, value in prop_set:
number_types = (int, float, long)
value_cls = (NumberNode
if type(value) in number_types
else StringNode)
if prop not in no_value_props:
expressions.append(
BinaryExpressionNode(
BinaryOperatorNode("=="),
VariableNode(prop),
value_cls(unicode(value))
))
else:
if value:
expressions.append(VariableNode(prop))
else:
expressions.append(
UnaryExpressionNode(
UnaryOperatorNode("not"),
VariableNode(prop)
))
if len(expressions) > 1:
prev = expressions[-1]
for curr in reversed(expressions[:-1]):
node = BinaryExpressionNode(
BinaryOperatorNode("and"),
curr,
prev)
prev = node
else:
node = expressions[0]
root.append(node)
root.append(StringNode(status))
return root
def get_manifest(metadata_root, test_path, url_base):
"""Get the ExpectedManifest for a particular test path, or None if there is no
metadata stored for that test path.
:param metadata_root: Absolute path to the root of the metadata directory
:param test_path: Path to the test(s) relative to the test root
:param url_base: Base url for serving the tests in this manifest
"""
manifest_path = expected.expected_path(metadata_root, test_path)
try:
with open(manifest_path) as f:
return compile(f, test_path, url_base)
except IOError:
return None
def compile(manifest_file, test_path, url_base):
return conditional.compile(manifest_file,
data_cls_getter=data_cls_getter,
test_path=test_path,
url_base=url_base)
|
sylarcp/anita
|
refs/heads/master
|
venv/lib/python2.7/site-packages/sqlalchemy/dialects/oracle/base.py
|
21
|
# oracle/base.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: oracle
:name: Oracle
Oracle version 8 through current (11g at the time of this writing) are
supported.
Connect Arguments
-----------------
The dialect supports several :func:`~sqlalchemy.create_engine()` arguments
which affect the behavior of the dialect regardless of driver in use.
* ``use_ansi`` - Use ANSI JOIN constructs (see the section on Oracle 8).
Defaults to ``True``. If ``False``, Oracle-8 compatible constructs are used
for joins.
* ``optimize_limits`` - defaults to ``False``. see the section on
LIMIT/OFFSET.
* ``use_binds_for_limits`` - defaults to ``True``. see the section on
LIMIT/OFFSET.
Auto Increment Behavior
-----------------------
SQLAlchemy Table objects which include integer primary keys are usually
assumed to have "autoincrementing" behavior, meaning they can generate their
own primary key values upon INSERT. Since Oracle has no "autoincrement"
feature, SQLAlchemy relies upon sequences to produce these values. With the
Oracle dialect, *a sequence must always be explicitly specified to enable
autoincrement*. This is divergent with the majority of documentation
examples which assume the usage of an autoincrement-capable database. To
specify sequences, use the sqlalchemy.schema.Sequence object which is passed
to a Column construct::
t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
Column(...), ...
)
This step is also required when using table reflection, i.e. autoload=True::
t = Table('mytable', metadata,
Column('id', Integer, Sequence('id_seq'), primary_key=True),
autoload=True
)
Identifier Casing
-----------------
In Oracle, the data dictionary represents all case insensitive identifier
names using UPPERCASE text. SQLAlchemy on the other hand considers an
all-lower case identifier name to be case insensitive. The Oracle dialect
converts all case insensitive identifiers to and from those two formats during
schema level communication, such as reflection of tables and indexes. Using
an UPPERCASE name on the SQLAlchemy side indicates a case sensitive
identifier, and SQLAlchemy will quote the name - this will cause mismatches
against data dictionary data received from Oracle, so unless identifier names
have been truly created as case sensitive (i.e. using quoted names), all
lowercase names should be used on the SQLAlchemy side.
LIMIT/OFFSET Support
--------------------
Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses
a wrapped subquery approach in conjunction with ROWNUM. The exact methodology
is taken from
http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html .
There are two options which affect its behavior:
* the "FIRST ROWS()" optimization keyword is not used by default. To enable
the usage of this optimization directive, specify ``optimize_limits=True``
to :func:`.create_engine`.
* the values passed for the limit/offset are sent as bound parameters. Some
users have observed that Oracle produces a poor query plan when the values
are sent as binds and not rendered literally. To render the limit/offset
values literally within the SQL statement, specify
``use_binds_for_limits=False`` to :func:`.create_engine`.
Some users have reported better performance when the entirely different
approach of a window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to
provide LIMIT/OFFSET (note that the majority of users don't observe this).
To suit this case the method used for LIMIT/OFFSET can be replaced entirely.
See the recipe at
http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
which installs a select compiler that overrides the generation of limit/offset
with a window function.
.. _oracle_returning:
RETURNING Support
-----------------
The Oracle database supports a limited form of RETURNING, in order to retrieve
result sets of matched rows from INSERT, UPDATE and DELETE statements.
Oracle's RETURNING..INTO syntax only supports one row being returned, as it
relies upon OUT parameters in order to function. In addition, supported
DBAPIs have further limitations (see :ref:`cx_oracle_returning`).
SQLAlchemy's "implicit returning" feature, which employs RETURNING within an
INSERT and sometimes an UPDATE statement in order to fetch newly generated
primary key values and other SQL defaults and expressions, is normally enabled
on the Oracle backend. By default, "implicit returning" typically only
fetches the value of a single ``nextval(some_seq)`` expression embedded into
an INSERT in order to increment a sequence within an INSERT statement and get
the value back at the same time. To disable this feature across the board,
specify ``implicit_returning=False`` to :func:`.create_engine`::
engine = create_engine("oracle://scott:tiger@dsn",
implicit_returning=False)
Implicit returning can also be disabled on a table-by-table basis as a table
option::
# Core Table
my_table = Table("my_table", metadata, ..., implicit_returning=False)
# declarative
class MyClass(Base):
__tablename__ = 'my_table'
__table_args__ = {"implicit_returning": False}
.. seealso::
:ref:`cx_oracle_returning` - additional cx_oracle-specific restrictions on
implicit returning.
ON UPDATE CASCADE
-----------------
Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based
solution is available at
http://asktom.oracle.com/tkyte/update_cascade/index.html .
When using the SQLAlchemy ORM, the ORM has limited ability to manually issue
cascading updates - specify ForeignKey objects using the
"deferrable=True, initially='deferred'" keyword arguments,
and specify "passive_updates=False" on each relationship().
Oracle 8 Compatibility
----------------------
When Oracle 8 is detected, the dialect internally configures itself to the
following behaviors:
* the use_ansi flag is set to False. This has the effect of converting all
JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN
makes use of Oracle's (+) operator.
* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are
issued instead. This because these types don't seem to work correctly on
Oracle 8 even though they are available. The
:class:`~sqlalchemy.types.NVARCHAR` and
:class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate
NVARCHAR2 and NCLOB.
* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
encodes all Python unicode objects to "string" before passing in as bind
parameters.
Synonym/DBLINK Reflection
-------------------------
When using reflection with Table objects, the dialect can optionally search
for tables indicated by synonyms, either in local or remote schemas or
accessed over DBLINK, by passing the flag ``oracle_resolve_synonyms=True`` as
a keyword argument to the :class:`.Table` construct::
some_table = Table('some_table', autoload=True,
autoload_with=some_engine,
oracle_resolve_synonyms=True)
When this flag is set, the given name (such as ``some_table`` above) will
be searched not just in the ``ALL_TABLES`` view, but also within the
``ALL_SYNONYMS`` view to see if this name is actually a synonym to another
name. If the synonym is located and refers to a DBLINK, the oracle dialect
knows how to locate the table's information using DBLINK syntax(e.g.
``@dblink``).
``oracle_resolve_synonyms`` is accepted wherever reflection arguments are
accepted, including methods such as :meth:`.MetaData.reflect` and
:meth:`.Inspector.get_columns`.
If synonyms are not in use, this flag should be left disabled.
DateTime Compatibility
----------------------
Oracle has no datatype known as ``DATETIME``, it instead has only ``DATE``,
which can actually store a date and time value. For this reason, the Oracle
dialect provides a type :class:`.oracle.DATE` which is a subclass of
:class:`.DateTime`. This type has no special behavior, and is only
present as a "marker" for this type; additionally, when a database column
is reflected and the type is reported as ``DATE``, the time-supporting
:class:`.oracle.DATE` type is used.
.. versionchanged:: 0.9.4 Added :class:`.oracle.DATE` to subclass
:class:`.DateTime`. This is a change as previous versions
would reflect a ``DATE`` column as :class:`.types.DATE`, which subclasses
:class:`.Date`. The only significance here is for schemes that are
examining the type of column for use in special Python translations or
for migrating schemas to other database backends.
"""
import re
from sqlalchemy import util, sql
from sqlalchemy.engine import default, base, reflection
from sqlalchemy.sql import compiler, visitors, expression
from sqlalchemy.sql import (operators as sql_operators,
functions as sql_functions)
from sqlalchemy import types as sqltypes, schema as sa_schema
from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, \
BLOB, CLOB, TIMESTAMP, FLOAT
RESERVED_WORDS = \
set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '
'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '
'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '
'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '
'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '
'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '
'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '
'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '
'DECIMAL UNION PUBLIC AND START UID COMMENT CURRENT LEVEL'.split())
NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER '
'CURRENT_TIME CURRENT_TIMESTAMP'.split())
class RAW(sqltypes._Binary):
__visit_name__ = 'RAW'
OracleRaw = RAW
class NCLOB(sqltypes.Text):
__visit_name__ = 'NCLOB'
class VARCHAR2(VARCHAR):
__visit_name__ = 'VARCHAR2'
NVARCHAR2 = NVARCHAR
class NUMBER(sqltypes.Numeric, sqltypes.Integer):
__visit_name__ = 'NUMBER'
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = bool(scale and scale > 0)
super(NUMBER, self).__init__(
precision=precision, scale=scale, asdecimal=asdecimal)
def adapt(self, impltype):
ret = super(NUMBER, self).adapt(impltype)
# leave a hint for the DBAPI handler
ret._is_oracle_number = True
return ret
@property
def _type_affinity(self):
if bool(self.scale and self.scale > 0):
return sqltypes.Numeric
else:
return sqltypes.Integer
class DOUBLE_PRECISION(sqltypes.Numeric):
__visit_name__ = 'DOUBLE_PRECISION'
def __init__(self, precision=None, scale=None, asdecimal=None):
if asdecimal is None:
asdecimal = False
super(DOUBLE_PRECISION, self).__init__(
precision=precision, scale=scale, asdecimal=asdecimal)
class BFILE(sqltypes.LargeBinary):
__visit_name__ = 'BFILE'
class LONG(sqltypes.Text):
__visit_name__ = 'LONG'
class DATE(sqltypes.DateTime):
"""Provide the oracle DATE type.
This type has no special Python behavior, except that it subclasses
:class:`.types.DateTime`; this is to suit the fact that the Oracle
``DATE`` type supports a time value.
.. versionadded:: 0.9.4
"""
__visit_name__ = 'DATE'
def _compare_type_affinity(self, other):
return other._type_affinity in (sqltypes.DateTime, sqltypes.Date)
class INTERVAL(sqltypes.TypeEngine):
__visit_name__ = 'INTERVAL'
def __init__(self,
day_precision=None,
second_precision=None):
"""Construct an INTERVAL.
Note that only DAY TO SECOND intervals are currently supported.
This is due to a lack of support for YEAR TO MONTH intervals
within available DBAPIs (cx_oracle and zxjdbc).
:param day_precision: the day precision value. this is the number of
digits to store for the day field. Defaults to "2"
:param second_precision: the second precision value. this is the
number of digits to store for the fractional seconds field.
Defaults to "6".
"""
self.day_precision = day_precision
self.second_precision = second_precision
@classmethod
def _adapt_from_generic_interval(cls, interval):
return INTERVAL(day_precision=interval.day_precision,
second_precision=interval.second_precision)
@property
def _type_affinity(self):
return sqltypes.Interval
class ROWID(sqltypes.TypeEngine):
"""Oracle ROWID type.
When used in a cast() or similar, generates ROWID.
"""
__visit_name__ = 'ROWID'
class _OracleBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi):
return dbapi.NUMBER
colspecs = {
sqltypes.Boolean: _OracleBoolean,
sqltypes.Interval: INTERVAL,
sqltypes.DateTime: DATE
}
ischema_names = {
'VARCHAR2': VARCHAR,
'NVARCHAR2': NVARCHAR,
'CHAR': CHAR,
'DATE': DATE,
'NUMBER': NUMBER,
'BLOB': BLOB,
'BFILE': BFILE,
'CLOB': CLOB,
'NCLOB': NCLOB,
'TIMESTAMP': TIMESTAMP,
'TIMESTAMP WITH TIME ZONE': TIMESTAMP,
'INTERVAL DAY TO SECOND': INTERVAL,
'RAW': RAW,
'FLOAT': FLOAT,
'DOUBLE PRECISION': DOUBLE_PRECISION,
'LONG': LONG,
}
class OracleTypeCompiler(compiler.GenericTypeCompiler):
# Note:
# Oracle DATE == DATETIME
# Oracle does not allow milliseconds in DATE
# Oracle does not support TIME columns
def visit_datetime(self, type_):
return self.visit_DATE(type_)
def visit_float(self, type_):
return self.visit_FLOAT(type_)
def visit_unicode(self, type_):
if self.dialect._supports_nchar:
return self.visit_NVARCHAR2(type_)
else:
return self.visit_VARCHAR2(type_)
def visit_INTERVAL(self, type_):
return "INTERVAL DAY%s TO SECOND%s" % (
type_.day_precision is not None and
"(%d)" % type_.day_precision or
"",
type_.second_precision is not None and
"(%d)" % type_.second_precision or
"",
)
def visit_LONG(self, type_):
return "LONG"
def visit_TIMESTAMP(self, type_):
if type_.timezone:
return "TIMESTAMP WITH TIME ZONE"
else:
return "TIMESTAMP"
def visit_DOUBLE_PRECISION(self, type_):
return self._generate_numeric(type_, "DOUBLE PRECISION")
def visit_NUMBER(self, type_, **kw):
return self._generate_numeric(type_, "NUMBER", **kw)
def _generate_numeric(self, type_, name, precision=None, scale=None):
if precision is None:
precision = type_.precision
if scale is None:
scale = getattr(type_, 'scale', None)
if precision is None:
return name
elif scale is None:
n = "%(name)s(%(precision)s)"
return n % {'name': name, 'precision': precision}
else:
n = "%(name)s(%(precision)s, %(scale)s)"
return n % {'name': name, 'precision': precision, 'scale': scale}
def visit_string(self, type_):
return self.visit_VARCHAR2(type_)
def visit_VARCHAR2(self, type_):
return self._visit_varchar(type_, '', '2')
def visit_NVARCHAR2(self, type_):
return self._visit_varchar(type_, 'N', '2')
visit_NVARCHAR = visit_NVARCHAR2
def visit_VARCHAR(self, type_):
return self._visit_varchar(type_, '', '')
def _visit_varchar(self, type_, n, num):
if not type_.length:
return "%(n)sVARCHAR%(two)s" % {'two': num, 'n': n}
elif not n and self.dialect._supports_char_length:
varchar = "VARCHAR%(two)s(%(length)s CHAR)"
return varchar % {'length': type_.length, 'two': num}
else:
varchar = "%(n)sVARCHAR%(two)s(%(length)s)"
return varchar % {'length': type_.length, 'two': num, 'n': n}
def visit_text(self, type_):
return self.visit_CLOB(type_)
def visit_unicode_text(self, type_):
if self.dialect._supports_nchar:
return self.visit_NCLOB(type_)
else:
return self.visit_CLOB(type_)
def visit_large_binary(self, type_):
return self.visit_BLOB(type_)
def visit_big_integer(self, type_):
return self.visit_NUMBER(type_, precision=19)
def visit_boolean(self, type_):
return self.visit_SMALLINT(type_)
def visit_RAW(self, type_):
if type_.length:
return "RAW(%(length)s)" % {'length': type_.length}
else:
return "RAW"
def visit_ROWID(self, type_):
return "ROWID"
class OracleCompiler(compiler.SQLCompiler):
"""Oracle compiler modifies the lexical structure of Select
statements to work under non-ANSI configured Oracle databases, if
the use_ansi flag is False.
"""
compound_keywords = util.update_copy(
compiler.SQLCompiler.compound_keywords,
{
expression.CompoundSelect.EXCEPT: 'MINUS'
}
)
def __init__(self, *args, **kwargs):
self.__wheres = {}
self._quoted_bind_names = {}
super(OracleCompiler, self).__init__(*args, **kwargs)
def visit_mod_binary(self, binary, operator, **kw):
return "mod(%s, %s)" % (self.process(binary.left, **kw),
self.process(binary.right, **kw))
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_char_length_func(self, fn, **kw):
return "LENGTH" + self.function_argspec(fn, **kw)
def visit_match_op_binary(self, binary, operator, **kw):
return "CONTAINS (%s, %s)" % (self.process(binary.left),
self.process(binary.right))
def visit_true(self, expr, **kw):
return '1'
def visit_false(self, expr, **kw):
return '0'
def get_select_hint_text(self, byfroms):
return " ".join(
"/*+ %s */" % text for table, text in byfroms.items()
)
def function_argspec(self, fn, **kw):
if len(fn.clauses) > 0 or fn.name.upper() not in NO_ARG_FNS:
return compiler.SQLCompiler.function_argspec(self, fn, **kw)
else:
return ""
def default_from(self):
"""Called when a ``SELECT`` statement has no froms,
and no ``FROM`` clause is to be appended.
The Oracle compiler tacks a "FROM DUAL" to the statement.
"""
return " FROM DUAL"
def visit_join(self, join, **kwargs):
if self.dialect.use_ansi:
return compiler.SQLCompiler.visit_join(self, join, **kwargs)
else:
kwargs['asfrom'] = True
if isinstance(join.right, expression.FromGrouping):
right = join.right.element
else:
right = join.right
return self.process(join.left, **kwargs) + \
", " + self.process(right, **kwargs)
def _get_nonansi_join_whereclause(self, froms):
clauses = []
def visit_join(join):
if join.isouter:
def visit_binary(binary):
if binary.operator == sql_operators.eq:
if join.right.is_derived_from(binary.left.table):
binary.left = _OuterJoinColumn(binary.left)
elif join.right.is_derived_from(binary.right.table):
binary.right = _OuterJoinColumn(binary.right)
clauses.append(visitors.cloned_traverse(
join.onclause, {}, {'binary': visit_binary}))
else:
clauses.append(join.onclause)
for j in join.left, join.right:
if isinstance(j, expression.Join):
visit_join(j)
elif isinstance(j, expression.FromGrouping):
visit_join(j.element)
for f in froms:
if isinstance(f, expression.Join):
visit_join(f)
if not clauses:
return None
else:
return sql.and_(*clauses)
def visit_outer_join_column(self, vc):
return self.process(vc.column) + "(+)"
def visit_sequence(self, seq):
return (self.dialect.identifier_preparer.format_sequence(seq) +
".nextval")
def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
"""Oracle doesn't like ``FROM table AS alias``. Is the AS standard
SQL??
"""
if asfrom or ashint:
alias_name = isinstance(alias.name, expression._truncated_label) and \
self._truncated_identifier("alias", alias.name) or alias.name
if ashint:
return alias_name
elif asfrom:
return self.process(alias.original, asfrom=asfrom, **kwargs) + \
" " + self.preparer.format_alias(alias, alias_name)
else:
return self.process(alias.original, **kwargs)
def returning_clause(self, stmt, returning_cols):
columns = []
binds = []
for i, column in enumerate(
expression._select_iterables(returning_cols)):
if column.type._has_column_expression:
col_expr = column.type.column_expression(column)
else:
col_expr = column
outparam = sql.outparam("ret_%d" % i, type_=column.type)
self.binds[outparam.key] = outparam
binds.append(
self.bindparam_string(self._truncate_bindparam(outparam)))
columns.append(
self.process(col_expr, within_columns_clause=False))
self.result_map[outparam.key] = (
outparam.key,
(column, getattr(column, 'name', None),
getattr(column, 'key', None)),
column.type
)
return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
def _TODO_visit_compound_select(self, select):
"""Need to determine how to get ``LIMIT``/``OFFSET`` into a
``UNION`` for Oracle.
"""
pass
def visit_select(self, select, **kwargs):
"""Look for ``LIMIT`` and OFFSET in a select statement, and if
so tries to wrap it in a subquery with ``rownum`` criterion.
"""
if not getattr(select, '_oracle_visit', None):
if not self.dialect.use_ansi:
froms = self._display_froms_for_select(
select, kwargs.get('asfrom', False))
whereclause = self._get_nonansi_join_whereclause(froms)
if whereclause is not None:
select = select.where(whereclause)
select._oracle_visit = True
if select._limit is not None or select._offset is not None:
# See http://www.oracle.com/technology/oramag/oracle/06-sep/\
# o56asktom.html
#
# Generalized form of an Oracle pagination query:
# select ... from (
# select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from
# ( select distinct ... where ... order by ...
# ) where ROWNUM <= :limit+:offset
# ) where ora_rn > :offset
# Outer select and "ROWNUM as ora_rn" can be dropped if
# limit=0
# TODO: use annotations instead of clone + attr set ?
select = select._generate()
select._oracle_visit = True
# Wrap the middle select and add the hint
limitselect = sql.select([c for c in select.c])
if select._limit and self.dialect.optimize_limits:
limitselect = limitselect.prefix_with(
"/*+ FIRST_ROWS(%d) */" %
select._limit)
limitselect._oracle_visit = True
limitselect._is_wrapper = True
# If needed, add the limiting clause
if select._limit is not None:
max_row = select._limit
if select._offset is not None:
max_row += select._offset
if not self.dialect.use_binds_for_limits:
max_row = sql.literal_column("%d" % max_row)
limitselect.append_whereclause(
sql.literal_column("ROWNUM") <= max_row)
# If needed, add the ora_rn, and wrap again with offset.
if select._offset is None:
limitselect._for_update_arg = select._for_update_arg
select = limitselect
else:
limitselect = limitselect.column(
sql.literal_column("ROWNUM").label("ora_rn"))
limitselect._oracle_visit = True
limitselect._is_wrapper = True
offsetselect = sql.select(
[c for c in limitselect.c if c.key != 'ora_rn'])
offsetselect._oracle_visit = True
offsetselect._is_wrapper = True
offset_value = select._offset
if not self.dialect.use_binds_for_limits:
offset_value = sql.literal_column("%d" % offset_value)
offsetselect.append_whereclause(
sql.literal_column("ora_rn") > offset_value)
offsetselect._for_update_arg = select._for_update_arg
select = offsetselect
kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
return compiler.SQLCompiler.visit_select(self, select, **kwargs)
def limit_clause(self, select):
return ""
def for_update_clause(self, select):
if self.is_subquery():
return ""
tmp = ' FOR UPDATE'
if select._for_update_arg.of:
tmp += ' OF ' + ', '.join(
self.process(elem) for elem in
select._for_update_arg.of
)
if select._for_update_arg.nowait:
tmp += " NOWAIT"
return tmp
class OracleDDLCompiler(compiler.DDLCompiler):
def define_constraint_cascades(self, constraint):
text = ""
if constraint.ondelete is not None:
text += " ON DELETE %s" % constraint.ondelete
# oracle has no ON UPDATE CASCADE -
# its only available via triggers
# http://asktom.oracle.com/tkyte/update_cascade/index.html
if constraint.onupdate is not None:
util.warn(
"Oracle does not contain native UPDATE CASCADE "
"functionality - onupdates will not be rendered for foreign "
"keys. Consider using deferrable=True, initially='deferred' "
"or triggers.")
return text
def visit_create_index(self, create, **kw):
return super(OracleDDLCompiler, self).\
visit_create_index(create, include_schema=True)
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words = set([x.lower() for x in RESERVED_WORDS])
illegal_initial_characters = set(
(str(dig) for dig in range(0, 10))).union(["_", "$"])
def _bindparam_requires_quotes(self, value):
"""Return True if the given identifier requires quoting."""
lc_value = value.lower()
return (lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
or not self.legal_characters.match(util.text_type(value))
)
def format_savepoint(self, savepoint):
name = re.sub(r'^_+', '', savepoint.ident)
return super(
OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
return self._execute_scalar(
"SELECT " +
self.dialect.identifier_preparer.format_sequence(seq) +
".nextval FROM DUAL", type_)
class OracleDialect(default.DefaultDialect):
name = 'oracle'
supports_alter = True
supports_unicode_statements = False
supports_unicode_binds = False
max_identifier_length = 30
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
supports_sequences = True
sequences_optional = False
postfetch_lastrowid = False
default_paramstyle = 'named'
colspecs = colspecs
ischema_names = ischema_names
requires_name_normalize = True
supports_default_values = False
supports_empty_insert = False
statement_compiler = OracleCompiler
ddl_compiler = OracleDDLCompiler
type_compiler = OracleTypeCompiler
preparer = OracleIdentifierPreparer
execution_ctx_cls = OracleExecutionContext
reflection_options = ('oracle_resolve_synonyms', )
construct_arguments = [
(sa_schema.Table, {"resolve_synonyms": False})
]
def __init__(self,
use_ansi=True,
optimize_limits=False,
use_binds_for_limits=True,
**kwargs):
default.DefaultDialect.__init__(self, **kwargs)
self.use_ansi = use_ansi
self.optimize_limits = optimize_limits
self.use_binds_for_limits = use_binds_for_limits
def initialize(self, connection):
super(OracleDialect, self).initialize(connection)
self.implicit_returning = self.__dict__.get(
'implicit_returning',
self.server_version_info > (10, )
)
if self._is_oracle_8:
self.colspecs = self.colspecs.copy()
self.colspecs.pop(sqltypes.Interval)
self.use_ansi = False
@property
def _is_oracle_8(self):
return self.server_version_info and \
self.server_version_info < (9, )
@property
def _supports_char_length(self):
return not self._is_oracle_8
@property
def _supports_nchar(self):
return not self._is_oracle_8
def do_release_savepoint(self, connection, name):
# Oracle does not support RELEASE SAVEPOINT
pass
def has_table(self, connection, table_name, schema=None):
if not schema:
schema = self.default_schema_name
cursor = connection.execute(
sql.text("SELECT table_name FROM all_tables "
"WHERE table_name = :name AND owner = :schema_name"),
name=self.denormalize_name(table_name),
schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
if not schema:
schema = self.default_schema_name
cursor = connection.execute(
sql.text("SELECT sequence_name FROM all_sequences "
"WHERE sequence_name = :name AND "
"sequence_owner = :schema_name"),
name=self.denormalize_name(sequence_name),
schema_name=self.denormalize_name(schema))
return cursor.first() is not None
def normalize_name(self, name):
if name is None:
return None
if util.py2k:
if isinstance(name, str):
name = name.decode(self.encoding)
if name.upper() == name and not \
self.identifier_preparer._requires_quotes(name.lower()):
return name.lower()
else:
return name
def denormalize_name(self, name):
if name is None:
return None
elif name.lower() == name and not \
self.identifier_preparer._requires_quotes(name.lower()):
name = name.upper()
if util.py2k:
if not self.supports_unicode_binds:
name = name.encode(self.encoding)
else:
name = unicode(name)
return name
def _get_default_schema_name(self, connection):
return self.normalize_name(
connection.execute('SELECT USER FROM DUAL').scalar())
def _resolve_synonym(self, connection, desired_owner=None,
desired_synonym=None, desired_table=None):
"""search for a local synonym matching the given desired owner/name.
if desired_owner is None, attempts to locate a distinct owner.
returns the actual name, owner, dblink name, and synonym name if
found.
"""
q = "SELECT owner, table_owner, table_name, db_link, "\
"synonym_name FROM all_synonyms WHERE "
clauses = []
params = {}
if desired_synonym:
clauses.append("synonym_name = :synonym_name")
params['synonym_name'] = desired_synonym
if desired_owner:
clauses.append("owner = :desired_owner")
params['desired_owner'] = desired_owner
if desired_table:
clauses.append("table_name = :tname")
params['tname'] = desired_table
q += " AND ".join(clauses)
result = connection.execute(sql.text(q), **params)
if desired_owner:
row = result.first()
if row:
return (row['table_name'], row['table_owner'],
row['db_link'], row['synonym_name'])
else:
return None, None, None, None
else:
rows = result.fetchall()
if len(rows) > 1:
raise AssertionError(
"There are multiple tables visible to the schema, you "
"must specify owner")
elif len(rows) == 1:
row = rows[0]
return (row['table_name'], row['table_owner'],
row['db_link'], row['synonym_name'])
else:
return None, None, None, None
@reflection.cache
def _prepare_reflection_args(self, connection, table_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
if resolve_synonyms:
actual_name, owner, dblink, synonym = self._resolve_synonym(
connection,
desired_owner=self.denormalize_name(schema),
desired_synonym=self.denormalize_name(table_name)
)
else:
actual_name, owner, dblink, synonym = None, None, None, None
if not actual_name:
actual_name = self.denormalize_name(table_name)
if dblink:
# using user_db_links here since all_db_links appears
# to have more restricted permissions.
# http://docs.oracle.com/cd/B28359_01/server.111/b28310/ds_admin005.htm
# will need to hear from more users if we are doing
# the right thing here. See [ticket:2619]
owner = connection.scalar(
sql.text("SELECT username FROM user_db_links "
"WHERE db_link=:link"), link=dblink)
dblink = "@" + dblink
elif not owner:
owner = self.denormalize_name(schema or self.default_schema_name)
return (actual_name, owner, dblink or '', synonym)
@reflection.cache
def get_schema_names(self, connection, **kw):
s = "SELECT username FROM all_users ORDER BY username"
cursor = connection.execute(s,)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
schema = self.denormalize_name(schema or self.default_schema_name)
# note that table_names() isn't loading DBLINKed or synonym'ed tables
if schema is None:
schema = self.default_schema_name
s = sql.text(
"SELECT table_name FROM all_tables "
"WHERE nvl(tablespace_name, 'no tablespace') NOT IN "
"('SYSTEM', 'SYSAUX') "
"AND OWNER = :owner "
"AND IOT_NAME IS NULL")
cursor = connection.execute(s, owner=schema)
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
schema = self.denormalize_name(schema or self.default_schema_name)
s = sql.text("SELECT view_name FROM all_views WHERE owner = :owner")
cursor = connection.execute(s, owner=self.denormalize_name(schema))
return [self.normalize_name(row[0]) for row in cursor]
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
"""
kw arguments can be:
oracle_resolve_synonyms
dblink
"""
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
columns = []
if self._supports_char_length:
char_length_col = 'char_length'
else:
char_length_col = 'data_length'
params = {"table_name": table_name}
text = "SELECT column_name, data_type, %(char_length_col)s, "\
"data_precision, data_scale, "\
"nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\
"WHERE table_name = :table_name"
if schema is not None:
params['owner'] = schema
text += " AND owner = :owner "
text += " ORDER BY column_id"
text = text % {'dblink': dblink, 'char_length_col': char_length_col}
c = connection.execute(sql.text(text), **params)
for row in c:
(colname, orig_colname, coltype, length, precision, scale, nullable, default) = \
(self.normalize_name(row[0]), row[0], row[1], row[
2], row[3], row[4], row[5] == 'Y', row[6])
if coltype == 'NUMBER':
coltype = NUMBER(precision, scale)
elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'):
coltype = self.ischema_names.get(coltype)(length)
elif 'WITH TIME ZONE' in coltype:
coltype = TIMESTAMP(timezone=True)
else:
coltype = re.sub(r'\(\d+\)', '', coltype)
try:
coltype = self.ischema_names[coltype]
except KeyError:
util.warn("Did not recognize type '%s' of column '%s'" %
(coltype, colname))
coltype = sqltypes.NULLTYPE
cdict = {
'name': colname,
'type': coltype,
'nullable': nullable,
'default': default,
'autoincrement': default is None
}
if orig_colname.lower() == orig_colname:
cdict['quote'] = True
columns.append(cdict)
return columns
@reflection.cache
def get_indexes(self, connection, table_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
indexes = []
params = {'table_name': table_name}
text = \
"SELECT a.index_name, a.column_name, b.uniqueness "\
"\nFROM ALL_IND_COLUMNS%(dblink)s a, "\
"\nALL_INDEXES%(dblink)s b "\
"\nWHERE "\
"\na.index_name = b.index_name "\
"\nAND a.table_owner = b.table_owner "\
"\nAND a.table_name = b.table_name "\
"\nAND a.table_name = :table_name "
if schema is not None:
params['schema'] = schema
text += "AND a.table_owner = :schema "
text += "ORDER BY a.index_name, a.column_position"
text = text % {'dblink': dblink}
q = sql.text(text)
rp = connection.execute(q, **params)
indexes = []
last_index_name = None
pk_constraint = self.get_pk_constraint(
connection, table_name, schema, resolve_synonyms=resolve_synonyms,
dblink=dblink, info_cache=kw.get('info_cache'))
pkeys = pk_constraint['constrained_columns']
uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
def upper_name_set(names):
return set([i.upper() for i in names])
pk_names = upper_name_set(pkeys)
def remove_if_primary_key(index):
# don't include the primary key index
if index is not None and \
upper_name_set(index['column_names']) == pk_names:
indexes.pop()
index = None
for rset in rp:
if rset.index_name != last_index_name:
remove_if_primary_key(index)
index = dict(name=self.normalize_name(rset.index_name),
column_names=[])
indexes.append(index)
index['unique'] = uniqueness.get(rset.uniqueness, False)
# filter out Oracle SYS_NC names. could also do an outer join
# to the all_tab_columns table and check for real col names there.
if not oracle_sys_col.match(rset.column_name):
index['column_names'].append(
self.normalize_name(rset.column_name))
last_index_name = rset.index_name
remove_if_primary_key(index)
return indexes
@reflection.cache
def _get_constraint_data(self, connection, table_name, schema=None,
dblink='', **kw):
params = {'table_name': table_name}
text = \
"SELECT"\
"\nac.constraint_name,"\
"\nac.constraint_type,"\
"\nloc.column_name AS local_column,"\
"\nrem.table_name AS remote_table,"\
"\nrem.column_name AS remote_column,"\
"\nrem.owner AS remote_owner,"\
"\nloc.position as loc_pos,"\
"\nrem.position as rem_pos"\
"\nFROM all_constraints%(dblink)s ac,"\
"\nall_cons_columns%(dblink)s loc,"\
"\nall_cons_columns%(dblink)s rem"\
"\nWHERE ac.table_name = :table_name"\
"\nAND ac.constraint_type IN ('R','P')"
if schema is not None:
params['owner'] = schema
text += "\nAND ac.owner = :owner"
text += \
"\nAND ac.owner = loc.owner"\
"\nAND ac.constraint_name = loc.constraint_name"\
"\nAND ac.r_owner = rem.owner(+)"\
"\nAND ac.r_constraint_name = rem.constraint_name(+)"\
"\nAND (rem.position IS NULL or loc.position=rem.position)"\
"\nORDER BY ac.constraint_name, loc.position"
text = text % {'dblink': dblink}
rp = connection.execute(sql.text(text), **params)
constraint_data = rp.fetchall()
return constraint_data
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
pkeys = []
constraint_name = None
constraint_data = self._get_constraint_data(
connection, table_name, schema, dblink,
info_cache=kw.get('info_cache'))
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'P':
if constraint_name is None:
constraint_name = self.normalize_name(cons_name)
pkeys.append(local_column)
return {'constrained_columns': pkeys, 'name': constraint_name}
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
"""
kw arguments can be:
oracle_resolve_synonyms
dblink
"""
requested_schema = schema # to check later on
resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
dblink = kw.get('dblink', '')
info_cache = kw.get('info_cache')
(table_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, table_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
constraint_data = self._get_constraint_data(
connection, table_name, schema, dblink,
info_cache=kw.get('info_cache'))
def fkey_rec():
return {
'name': None,
'constrained_columns': [],
'referred_schema': None,
'referred_table': None,
'referred_columns': []
}
fkeys = util.defaultdict(fkey_rec)
for row in constraint_data:
(cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
if cons_type == 'R':
if remote_table is None:
# ticket 363
util.warn(
("Got 'None' querying 'table_name' from "
"all_cons_columns%(dblink)s - does the user have "
"proper rights to the table?") % {'dblink': dblink})
continue
rec = fkeys[cons_name]
rec['name'] = cons_name
local_cols, remote_cols = rec[
'constrained_columns'], rec['referred_columns']
if not rec['referred_table']:
if resolve_synonyms:
ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
self._resolve_synonym(
connection,
desired_owner=self.denormalize_name(
remote_owner),
desired_table=self.denormalize_name(
remote_table)
)
if ref_synonym:
remote_table = self.normalize_name(ref_synonym)
remote_owner = self.normalize_name(
ref_remote_owner)
rec['referred_table'] = remote_table
if requested_schema is not None or \
self.denormalize_name(remote_owner) != schema:
rec['referred_schema'] = remote_owner
local_cols.append(local_column)
remote_cols.append(remote_column)
return list(fkeys.values())
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None,
resolve_synonyms=False, dblink='', **kw):
info_cache = kw.get('info_cache')
(view_name, schema, dblink, synonym) = \
self._prepare_reflection_args(connection, view_name, schema,
resolve_synonyms, dblink,
info_cache=info_cache)
params = {'view_name': view_name}
text = "SELECT text FROM all_views WHERE view_name=:view_name"
if schema is not None:
text += " AND owner = :schema"
params['schema'] = schema
rp = connection.execute(sql.text(text), **params).scalar()
if rp:
if util.py2k:
rp = rp.decode(self.encoding)
return rp
else:
return None
class _OuterJoinColumn(sql.ClauseElement):
__visit_name__ = 'outer_join_column'
def __init__(self, column):
self.column = column
|
lazytech-org/RIOT
|
refs/heads/master
|
tests/od/tests/02-run.py
|
25
|
#!/usr/bin/env python3
# Copyright (C) 2017 Hamburg University of Applied Sciences
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
def testfunc(child):
child.expect_exact("od_hex_dump(short_str, sizeof(short_str), OD_WIDTH_DEFAULT)")
child.expect_exact("00000000 41 42 00 AB.")
child.expect_exact("od_hex_dump(long_str, sizeof(long_str), OD_WIDTH_DEFAULT)")
child.expect_exact("00000000 FF 2C 61 FF 2E 62 63 64 65 66 67 68 69 6A 6B 6C .,a..bcdefghijkl")
child.expect_exact("00000010 6D 6E 6F 70 00 mnop.")
child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 4)")
child.expect_exact("00000000 FF 2C 61 FF .,a.")
child.expect_exact("00000004 2E 62 63 64 .bcd")
child.expect_exact("00000008 65 66 67 68 efgh")
child.expect_exact("0000000C 69 6A 6B 6C ijkl")
child.expect_exact("00000010 6D 6E 6F 70 mnop")
child.expect_exact("00000014 00 .")
child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 3)")
child.expect_exact("00000000 FF 2C 61 .,a")
child.expect_exact("00000003 FF 2E 62 ..b")
child.expect_exact("00000006 63 64 65 cde")
child.expect_exact("00000009 66 67 68 fgh")
child.expect_exact("0000000C 69 6A 6B ijk")
child.expect_exact("0000000F 6C 6D 6E lmn")
child.expect_exact("00000012 6F 70 00 op.")
child.expect_exact("od_hex_dump(long_str, sizeof(long_str), 8)")
child.expect_exact("00000000 FF 2C 61 FF 2E 62 63 64 .,a..bcd")
child.expect_exact("00000008 65 66 67 68 69 6A 6B 6C efghijkl")
child.expect_exact("00000010 6D 6E 6F 70 00 mnop.")
print("All tests successful")
if __name__ == "__main__":
sys.exit(run(testfunc, timeout=1))
|
hgl888/chromium-crosswalk
|
refs/heads/master
|
build/android/pylib/device/device_list.py
|
114
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to keep track of devices across builds."""
import os
LAST_DEVICES_FILENAME = '.last_devices'
LAST_MISSING_DEVICES_FILENAME = '.last_missing'
def GetPersistentDeviceList(file_name):
"""Returns a list of devices.
Args:
file_name: the file name containing a list of devices.
Returns: List of device serial numbers that were on the bot.
"""
with open(file_name) as f:
return f.read().splitlines()
def WritePersistentDeviceList(file_name, device_list):
path = os.path.dirname(file_name)
if not os.path.exists(path):
os.makedirs(path)
with open(file_name, 'w') as f:
f.write('\n'.join(set(device_list)))
|
dantebarba/docker-media-server
|
refs/heads/master
|
plex/Sub-Zero.bundle/Contents/Libraries/Shared/ftfy/__init__.py
|
2
|
# -*- coding: utf-8 -*-
"""
ftfy: fixes text for you
This is a module for making text less broken. See the `fix_text` function
for more information.
"""
from __future__ import unicode_literals
import unicodedata
import ftfy.bad_codecs
from ftfy import fixes
from ftfy.formatting import display_ljust
from ftfy.compatibility import is_printable
__version__ = '4.4.3'
# See the docstring for ftfy.bad_codecs to see what we're doing here.
ftfy.bad_codecs.ok()
def fix_text(text,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC',
max_decode_length=10**6):
r"""
Given Unicode text as input, fix inconsistencies and glitches in it,
such as mojibake.
Let's start with some examples:
>>> print(fix_text('ünicode'))
ünicode
>>> print(fix_text('Broken text… it’s flubberific!',
... normalization='NFKC'))
Broken text... it's flubberific!
>>> print(fix_text('HTML entities <3'))
HTML entities <3
>>> print(fix_text('<em>HTML entities <3</em>'))
<em>HTML entities <3</em>
>>> print(fix_text("¯\\_(ã\x83\x84)_/¯"))
¯\_(ツ)_/¯
>>> # This example string starts with a byte-order mark, even if
>>> # you can't see it on the Web.
>>> print(fix_text('\ufeffParty like\nit’s 1999!'))
Party like
it's 1999!
>>> print(fix_text('LOUD NOISES'))
LOUD NOISES
>>> len(fix_text('fi' * 100000))
200000
>>> len(fix_text(''))
0
Based on the options you provide, ftfy applies these steps in order:
- If `remove_terminal_escapes` is True, remove sequences of bytes that are
instructions for Unix terminals, such as the codes that make text appear
in different colors.
- If `fix_encoding` is True, look for common mistakes that come from
encoding or decoding Unicode text incorrectly, and fix them if they are
reasonably fixable. See `fixes.fix_encoding` for details.
- If `fix_entities` is True, replace HTML entities with their equivalent
characters. If it's "auto" (the default), then consider replacing HTML
entities, but don't do so in text where you have seen a pair of actual
angle brackets (that's probably actually HTML and you shouldn't mess
with the entities).
- If `uncurl_quotes` is True, replace various curly quotation marks with
plain-ASCII straight quotes.
- If `fix_latin_ligatures` is True, then ligatures made of Latin letters,
such as `fi`, will be separated into individual letters. These ligatures
are usually not meaningful outside of font rendering, and often represent
copy-and-paste errors.
- If `fix_character_width` is True, half-width and full-width characters
will be replaced by their standard-width form.
- If `fix_line_breaks` is true, convert all line breaks to Unix style
(CRLF and CR line breaks become LF line breaks).
- If `fix_surrogates` is true, ensure that there are no UTF-16 surrogates
in the resulting string, by converting them to the correct characters
when they're appropriately paired, or replacing them with \ufffd
otherwise.
- If `remove_control_chars` is true, remove control characters that
are not suitable for use in text. This includes most of the ASCII control
characters, plus some Unicode controls such as the byte order mark
(U+FEFF). Useful control characters, such as Tab, Line Feed, and
bidirectional marks, are left as they are.
- If `remove_bom` is True, remove the Byte-Order Mark at the start of the
string if it exists. (This is largely redundant, because it's a special
case of `remove_control_characters`. This option will become deprecated
in a later version.)
- If `normalization` is not None, apply the specified form of Unicode
normalization, which can be one of 'NFC', 'NFKC', 'NFD', and 'NFKD'.
- The default normalization, NFC, combines characters and diacritics that
are written using separate code points, such as converting "e" plus an
acute accent modifier into "é", or converting "ka" (か) plus a dakuten
into the single character "ga" (が). Unicode can be converted to NFC
form without any change in its meaning.
- If you ask for NFKC normalization, it will apply additional
normalizations that can change the meanings of characters. For example,
ellipsis characters will be replaced with three periods, all ligatures
will be replaced with the individual characters that make them up,
and characters that differ in font style will be converted to the same
character.
- If anything was changed, repeat all the steps, so that the function is
idempotent. "&amp;" will become "&", for example, not "&".
`fix_text` will work one line at a time, with the possibility that some
lines are in different encodings, allowing it to fix text that has been
concatenated together from different sources.
When it encounters lines longer than `max_decode_length` (1 million
codepoints by default), it will not run the `fix_encoding` step, to avoid
unbounded slowdowns.
If you're certain that any decoding errors in the text would have affected
the entire text in the same way, and you don't mind operations that scale
with the length of the text, you can use `fix_text_segment` directly to
fix the whole string in one batch.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
out = []
pos = 0
while pos < len(text):
textbreak = text.find('\n', pos) + 1
fix_encoding_this_time = fix_encoding
if textbreak == 0:
textbreak = len(text)
if (textbreak - pos) > max_decode_length:
fix_encoding_this_time = False
substring = text[pos:textbreak]
if fix_entities == 'auto' and '<' in substring and '>' in substring:
# we see angle brackets together; this could be HTML
fix_entities = False
out.append(
fix_text_segment(
substring,
fix_entities=fix_entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding_this_time,
uncurl_quotes=uncurl_quotes,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
)
pos = textbreak
return ''.join(out)
# Some alternate names for the main functions
ftfy = fix_text
fix_encoding = fixes.fix_encoding
fix_text_encoding = fixes.fix_text_encoding # deprecated
def fix_file(input_file,
encoding=None,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Fix text that is found in a file.
If the file is being read as Unicode text, use that. If it's being read as
bytes, then we hope an encoding was supplied. If not, unfortunately, we
have to guess what encoding it is. We'll try a few common encodings, but we
make no promises. See the `guess_bytes` function for how this is done.
The output is a stream of fixed lines of text.
"""
entities = fix_entities
for line in input_file:
if isinstance(line, bytes):
if encoding is None:
line, encoding = guess_bytes(line)
else:
line = line.decode(encoding)
if fix_entities == 'auto' and '<' in line and '>' in line:
entities = False
yield fix_text_segment(
line,
fix_entities=entities,
remove_terminal_escapes=remove_terminal_escapes,
fix_encoding=fix_encoding,
fix_latin_ligatures=fix_latin_ligatures,
fix_character_width=fix_character_width,
uncurl_quotes=uncurl_quotes,
fix_line_breaks=fix_line_breaks,
fix_surrogates=fix_surrogates,
remove_control_chars=remove_control_chars,
remove_bom=remove_bom,
normalization=normalization
)
def fix_text_segment(text,
fix_entities='auto',
remove_terminal_escapes=True,
fix_encoding=True,
fix_latin_ligatures=True,
fix_character_width=True,
uncurl_quotes=True,
fix_line_breaks=True,
fix_surrogates=True,
remove_control_chars=True,
remove_bom=True,
normalization='NFC'):
"""
Apply fixes to text in a single chunk. This could be a line of text
within a larger run of `fix_text`, or it could be a larger amount
of text that you are certain is in a consistent encoding.
See `fix_text` for a description of the parameters.
"""
if isinstance(text, bytes):
raise UnicodeError(fixes.BYTES_ERROR_TEXT)
if fix_entities == 'auto' and '<' in text and '>' in text:
fix_entities = False
while True:
origtext = text
if remove_terminal_escapes:
text = fixes.remove_terminal_escapes(text)
if fix_encoding:
text = fixes.fix_encoding(text)
if fix_entities:
text = fixes.unescape_html(text)
if fix_latin_ligatures:
text = fixes.fix_latin_ligatures(text)
if fix_character_width:
text = fixes.fix_character_width(text)
if uncurl_quotes:
text = fixes.uncurl_quotes(text)
if fix_line_breaks:
text = fixes.fix_line_breaks(text)
if fix_surrogates:
text = fixes.fix_surrogates(text)
if remove_control_chars:
text = fixes.remove_control_chars(text)
if remove_bom and not remove_control_chars:
# Skip this step if we've already done `remove_control_chars`,
# because it would be redundant.
text = fixes.remove_bom(text)
if normalization is not None:
text = unicodedata.normalize(normalization, text)
if text == origtext:
return text
def guess_bytes(bstring):
"""
NOTE: Using `guess_bytes` is not the recommended way of using ftfy. ftfy
is not designed to be an encoding detector.
In the unfortunate situation that you have some bytes in an unknown
encoding, ftfy can guess a reasonable strategy for decoding them, by trying
a few common encodings that can be distinguished from each other.
Unlike the rest of ftfy, this may not be accurate, and it may *create*
Unicode problems instead of solving them!
It doesn't try East Asian encodings at all, and if you have East Asian text
that you don't know how to decode, you are somewhat out of luck. East
Asian encodings require some serious statistics to distinguish from each
other, so we can't support them without decreasing the accuracy of ftfy.
If you don't know which encoding you have at all, I recommend
trying the 'chardet' module, and being appropriately skeptical about its
results.
The encodings we try here are:
- UTF-16 with a byte order mark, because a UTF-16 byte order mark looks
like nothing else
- UTF-8, because it's the global standard, which has been used by a
majority of the Web since 2008
- "utf-8-variants", because it's what people actually implement when they
think they're doing UTF-8
- MacRoman, because Microsoft Office thinks it's still a thing, and it
can be distinguished by its line breaks. (If there are no line breaks in
the string, though, you're out of luck.)
- "sloppy-windows-1252", the Latin-1-like encoding that is the most common
single-byte encoding
"""
if type(bstring) == type(''):
raise UnicodeError(
"This string was already decoded as Unicode. You should pass "
"bytes to guess_bytes, not Unicode."
)
if bstring.startswith(b'\xfe\xff') or bstring.startswith(b'\xff\xfe'):
return bstring.decode('utf-16'), 'utf-16'
byteset = set(bytes(bstring))
byte_ed, byte_c0, byte_CR, byte_LF = b'\xed\xc0\r\n'
try:
if byte_ed in byteset or byte_c0 in byteset:
# Byte 0xed can be used to encode a range of codepoints that
# are UTF-16 surrogates. UTF-8 does not use UTF-16 surrogates,
# so when we see 0xed, it's very likely we're being asked to
# decode CESU-8, the variant that encodes UTF-16 surrogates
# instead of the original characters themselves.
#
# This will occasionally trigger on standard UTF-8, as there
# are some Korean characters that also use byte 0xed, but that's
# not harmful.
#
# Byte 0xc0 is impossible because, numerically, it would only
# encode characters lower than U+0040. Those already have
# single-byte representations, and UTF-8 requires using the
# shortest possible representation. However, Java hides the null
# codepoint, U+0000, in a non-standard longer representation -- it
# encodes it as 0xc0 0x80 instead of 0x00, guaranteeing that 0x00
# will never appear in the encoded bytes.
#
# The 'utf-8-variants' decoder can handle both of these cases, as
# well as standard UTF-8, at the cost of a bit of speed.
return bstring.decode('utf-8-variants'), 'utf-8-variants'
else:
return bstring.decode('utf-8'), 'utf-8'
except UnicodeDecodeError:
pass
if byte_CR in bstring and byte_LF not in bstring:
return bstring.decode('macroman'), 'macroman'
else:
return bstring.decode('sloppy-windows-1252'), 'sloppy-windows-1252'
def explain_unicode(text):
"""
A utility method that's useful for debugging mysterious Unicode.
It breaks down a string, showing you for each codepoint its number in
hexadecimal, its glyph, its category in the Unicode standard, and its name
in the Unicode standard.
>>> explain_unicode('(╯°□°)╯︵ ┻━┻')
U+0028 ( [Ps] LEFT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+00B0 ° [So] DEGREE SIGN
U+25A1 □ [So] WHITE SQUARE
U+00B0 ° [So] DEGREE SIGN
U+0029 ) [Pe] RIGHT PARENTHESIS
U+256F ╯ [So] BOX DRAWINGS LIGHT ARC UP AND LEFT
U+FE35 ︵ [Ps] PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
U+0020 [Zs] SPACE
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
U+2501 ━ [So] BOX DRAWINGS HEAVY HORIZONTAL
U+253B ┻ [So] BOX DRAWINGS HEAVY UP AND HORIZONTAL
"""
for char in text:
if is_printable(char):
display = char
else:
display = char.encode('unicode-escape').decode('ascii')
print('U+{code:04X} {display} [{category}] {name}'.format(
display=display_ljust(display, 7),
code=ord(char),
category=unicodedata.category(char),
name=unicodedata.name(char, '<unknown>')
))
|
Versent/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/ec2.py
|
67
|
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
try:
from distutils.version import LooseVersion
HAS_LOOSE_VERSION = True
except:
HAS_LOOSE_VERSION = False
def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None, **params):
if conn_type not in ['both', 'resource', 'client']:
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type parameter in the boto3_conn function call')
resource = boto3.session.Session().resource(resource, region_name=region, endpoint_url=endpoint, **params)
client = resource.meta.client
if conn_type == 'resource':
return resource
elif conn_type == 'client':
return client
else:
return client, resource
def aws_common_argument_spec():
return dict(
ec2_url=dict(),
aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'], no_log=True),
aws_access_key=dict(aliases=['ec2_access_key', 'access_key']),
validate_certs=dict(default=True, type='bool'),
security_token=dict(aliases=['access_token'], no_log=True),
profile=dict(),
)
def ec2_argument_spec():
spec = aws_common_argument_spec()
spec.update(
dict(
region=dict(aliases=['aws_region', 'ec2_region']),
)
)
return spec
def boto_supports_profile_name():
return hasattr(boto.ec2.EC2Connection, 'profile_name')
def get_aws_connection_info(module, boto3=False):
# Check module args for credentials, then check environment vars
# access_key
ec2_url = module.params.get('ec2_url')
access_key = module.params.get('aws_access_key')
secret_key = module.params.get('aws_secret_key')
security_token = module.params.get('security_token')
region = module.params.get('region')
profile_name = module.params.get('profile')
validate_certs = module.params.get('validate_certs')
if not ec2_url:
if 'AWS_URL' in os.environ:
ec2_url = os.environ['AWS_URL']
elif 'EC2_URL' in os.environ:
ec2_url = os.environ['EC2_URL']
if not access_key:
if 'AWS_ACCESS_KEY_ID' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY_ID']
elif 'AWS_ACCESS_KEY' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY']
elif 'EC2_ACCESS_KEY' in os.environ:
access_key = os.environ['EC2_ACCESS_KEY']
else:
# in case access_key came in as empty string
access_key = None
if not secret_key:
if 'AWS_SECRET_ACCESS_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
elif 'AWS_SECRET_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_KEY']
elif 'EC2_SECRET_KEY' in os.environ:
secret_key = os.environ['EC2_SECRET_KEY']
else:
# in case secret_key came in as empty string
secret_key = None
if not region:
if 'AWS_REGION' in os.environ:
region = os.environ['AWS_REGION']
elif 'EC2_REGION' in os.environ:
region = os.environ['EC2_REGION']
else:
# boto.config.get returns None if config not found
region = boto.config.get('Boto', 'aws_region')
if not region:
region = boto.config.get('Boto', 'ec2_region')
if not security_token:
if 'AWS_SECURITY_TOKEN' in os.environ:
security_token = os.environ['AWS_SECURITY_TOKEN']
elif 'EC2_SECURITY_TOKEN' in os.environ:
security_token = os.environ['EC2_SECURITY_TOKEN']
else:
# in case security_token came in as empty string
security_token = None
if boto3:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=security_token)
if validate_certs:
boto_params['verify'] = validate_certs
if profile_name:
boto_params['profile_name'] = profile_name
else:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
security_token=security_token)
# profile_name only works as a key in boto >= 2.24
# so only set profile_name if passed as an argument
if profile_name:
if not boto_supports_profile_name():
module.fail_json("boto does not support profile_name before 2.24")
boto_params['profile_name'] = profile_name
if validate_certs and HAS_LOOSE_VERSION and LooseVersion(boto.Version) >= LooseVersion("2.6.0"):
boto_params['validate_certs'] = validate_certs
return region, ec2_url, boto_params
def get_ec2_creds(module):
''' for compatibility mode with old modules that don't/can't yet
use ec2_connect method '''
region, ec2_url, boto_params = get_aws_connection_info(module)
return ec2_url, boto_params['aws_access_key_id'], boto_params['aws_secret_access_key'], region
def boto_fix_security_token_in_profile(conn, profile_name):
''' monkey patch for boto issue boto/boto#2100 '''
profile = 'profile ' + profile_name
if boto.config.has_option(profile, 'aws_security_token'):
conn.provider.set_security_token(boto.config.get(profile, 'aws_security_token'))
return conn
def connect_to_aws(aws_module, region, **params):
conn = aws_module.connect_to_region(region, **params)
if not conn:
if region not in [aws_module_region.name for aws_module_region in aws_module.regions()]:
raise StandardError("Region %s does not seem to be available for aws module %s. If the region definitely exists, you may need to upgrade boto or extend with endpoints_path" % (region, aws_module.__name__))
else:
raise StandardError("Unknown problem connecting to region %s for aws module %s." % (region, aws_module.__name__))
if params.get('profile_name'):
conn = boto_fix_security_token_in_profile(conn, params['profile_name'])
return conn
def ec2_connect(module):
""" Return an ec2 connection"""
region, ec2_url, boto_params = get_aws_connection_info(module)
# If we have a region specified, connect to its endpoint.
if region:
try:
ec2 = connect_to_aws(boto.ec2, region, **boto_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg=str(e))
# Otherwise, no region so we fallback to the old connection method
elif ec2_url:
try:
ec2 = boto.connect_ec2_endpoint(ec2_url, **boto_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="Either region or ec2_url must be specified")
return ec2
|
tlakshman26/cinder-bug-fix-volume-conversion-full
|
refs/heads/master
|
cinder/tests/unit/volume/drivers/netapp/dataontap/client/test_client_base.py
|
9
|
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2015 Tom Barron. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from lxml import etree
import mock
import six
from cinder import test
import cinder.tests.unit.volume.drivers.netapp.dataontap.fakes as fake
from cinder.volume.drivers.netapp.dataontap.client import api as netapp_api
from cinder.volume.drivers.netapp.dataontap.client import client_base
CONNECTION_INFO = {'hostname': 'hostname',
'transport_type': 'https',
'port': 443,
'username': 'admin',
'password': 'passw0rd'}
class NetAppBaseClientTestCase(test.TestCase):
def setUp(self):
super(NetAppBaseClientTestCase, self).setUp()
self.client = client_base.Client(**CONNECTION_INFO)
self.client.connection = mock.MagicMock()
self.connection = self.client.connection
self.fake_volume = six.text_type(uuid.uuid4())
self.fake_lun = six.text_type(uuid.uuid4())
self.fake_size = '1024'
self.fake_metadata = {'OsType': 'linux', 'SpaceReserved': 'true'}
def tearDown(self):
super(NetAppBaseClientTestCase, self).tearDown()
def test_get_ontapi_version(self):
version_response = netapp_api.NaElement(
etree.XML("""<results status="passed">
<major-version>1</major-version>
<minor-version>19</minor-version>
</results>"""))
self.connection.invoke_successfully.return_value = version_response
major, minor = self.client.get_ontapi_version(cached=False)
self.assertEqual('1', major)
self.assertEqual('19', minor)
def test_get_ontapi_version_cached(self):
self.connection.get_api_version.return_value = (1, 20)
major, minor = self.client.get_ontapi_version()
self.assertEqual(1, self.connection.get_api_version.call_count)
self.assertEqual(1, major)
self.assertEqual(20, minor)
def test_check_is_naelement(self):
element = netapp_api.NaElement('name')
self.assertIsNone(self.client.check_is_naelement(element))
self.assertRaises(ValueError, self.client.check_is_naelement, None)
def test_create_lun(self):
expected_path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.create_lun(self.fake_volume,
self.fake_lun,
self.fake_size,
self.fake_metadata)
mock_create_node.assert_called_once_with(
'lun-create-by-size',
**{'path': expected_path,
'size': self.fake_size,
'ostype': self.fake_metadata['OsType'],
'space-reservation-enabled':
self.fake_metadata['SpaceReserved']})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_create_lun_with_qos_policy_group_name(self):
expected_path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
expected_qos_group_name = 'qos_1'
mock_request = mock.Mock()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
return_value=mock_request
) as mock_create_node:
self.client.create_lun(
self.fake_volume,
self.fake_lun,
self.fake_size,
self.fake_metadata,
qos_policy_group_name=expected_qos_group_name)
mock_create_node.assert_called_once_with(
'lun-create-by-size',
**{'path': expected_path, 'size': self.fake_size,
'ostype': self.fake_metadata['OsType'],
'space-reservation-enabled':
self.fake_metadata['SpaceReserved']})
mock_request.add_new_child.assert_called_once_with(
'qos-policy-group', expected_qos_group_name)
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_create_lun_raises_on_failure(self):
self.connection.invoke_successfully = mock.Mock(
side_effect=netapp_api.NaApiError)
self.assertRaises(netapp_api.NaApiError,
self.client.create_lun,
self.fake_volume,
self.fake_lun,
self.fake_size,
self.fake_metadata)
def test_destroy_lun(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.destroy_lun(path)
mock_create_node.assert_called_once_with(
'lun-destroy',
**{'path': path})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_destroy_lun_force(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
mock_request = mock.Mock()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
return_value=mock_request
) as mock_create_node:
self.client.destroy_lun(path)
mock_create_node.assert_called_once_with('lun-destroy',
**{'path': path})
mock_request.add_new_child.assert_called_once_with('force', 'true')
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_map_lun(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
expected_lun_id = 'my_lun'
mock_response = mock.Mock()
self.connection.invoke_successfully.return_value = mock_response
mock_response.get_child_content.return_value = expected_lun_id
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
actual_lun_id = self.client.map_lun(path, igroup)
mock_create_node.assert_called_once_with(
'lun-map',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
self.assertEqual(expected_lun_id, actual_lun_id)
def test_map_lun_with_lun_id(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
expected_lun_id = 'my_lun'
mock_response = mock.Mock()
self.connection.invoke_successfully.return_value = mock_response
mock_response.get_child_content.return_value = expected_lun_id
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
actual_lun_id = self.client.map_lun(path, igroup,
lun_id=expected_lun_id)
mock_create_node.assert_called_once_with(
'lun-map',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
self.assertEqual(expected_lun_id, actual_lun_id)
def test_map_lun_with_api_error(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
self.connection.invoke_successfully.side_effect =\
netapp_api.NaApiError()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.assertRaises(netapp_api.NaApiError, self.client.map_lun,
path, igroup)
mock_create_node.assert_called_once_with(
'lun-map',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_unmap_lun(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
mock_response = mock.Mock()
self.connection.invoke_successfully.return_value = mock_response
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.unmap_lun(path, igroup)
mock_create_node.assert_called_once_with(
'lun-unmap',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_unmap_lun_with_api_error(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
self.connection.invoke_successfully.side_effect =\
netapp_api.NaApiError()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.assertRaises(netapp_api.NaApiError, self.client.unmap_lun,
path, igroup)
mock_create_node.assert_called_once_with(
'lun-unmap',
**{'path': path, 'initiator-group': igroup})
def test_unmap_lun_already_unmapped(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
EINVALIDINPUTERROR = '13115'
self.connection.invoke_successfully.side_effect =\
netapp_api.NaApiError(code=EINVALIDINPUTERROR)
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.unmap_lun(path, igroup)
mock_create_node.assert_called_once_with(
'lun-unmap',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_unmap_lun_lun_not_mapped_in_group(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
igroup = 'igroup'
EVDISK_ERROR_NO_SUCH_LUNMAP = '9016'
self.connection.invoke_successfully.side_effect =\
netapp_api.NaApiError(code=EVDISK_ERROR_NO_SUCH_LUNMAP)
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.unmap_lun(path, igroup)
mock_create_node.assert_called_once_with(
'lun-unmap',
**{'path': path, 'initiator-group': igroup})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_create_igroup(self):
igroup = 'igroup'
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.create_igroup(igroup)
mock_create_node.assert_called_once_with(
'igroup-create',
**{'initiator-group-name': igroup,
'initiator-group-type': 'iscsi',
'os-type': 'default'})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_add_igroup_initiator(self):
igroup = 'igroup'
initiator = 'initiator'
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
) as mock_create_node:
self.client.add_igroup_initiator(igroup, initiator)
mock_create_node.assert_called_once_with(
'igroup-add',
**{'initiator-group-name': igroup,
'initiator': initiator})
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_do_direct_resize(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
new_size = 1024
mock_request = mock.Mock()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
return_value=mock_request
) as mock_create_node:
self.client.do_direct_resize(path, new_size)
mock_create_node.assert_called_once_with(
'lun-resize',
**{'path': path,
'size': new_size})
mock_request.add_new_child.assert_called_once_with(
'force', 'true')
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_do_direct_resize_not_forced(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
new_size = 1024
mock_request = mock.Mock()
with mock.patch.object(netapp_api.NaElement,
'create_node_with_children',
return_value=mock_request
) as mock_create_node:
self.client.do_direct_resize(path, new_size, force=False)
mock_create_node.assert_called_once_with(
'lun-resize',
**{'path': path,
'size': new_size})
self.assertFalse(mock_request.add_new_child.called)
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_get_lun_geometry(self):
expected_keys = set(['size', 'bytes_per_sector', 'sectors_per_track',
'tracks_per_cylinder', 'cylinders', 'max_resize'])
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
mock_response = mock.Mock()
self.connection.invoke_successfully.return_value = mock_response
geometry = self.client.get_lun_geometry(path)
self.assertEqual(expected_keys, set(geometry.keys()))
def test_get_lun_geometry_with_api_error(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
self.connection.invoke_successfully.side_effect =\
netapp_api.NaApiError()
geometry = self.client.get_lun_geometry(path)
self.assertEqual({}, geometry)
def test_get_volume_options(self):
fake_response = netapp_api.NaElement('volume')
fake_response.add_node_with_children('options', test='blah')
self.connection.invoke_successfully.return_value = fake_response
options = self.client.get_volume_options('volume')
self.assertEqual(1, len(options))
def test_get_volume_options_with_no_options(self):
fake_response = netapp_api.NaElement('options')
self.connection.invoke_successfully.return_value = fake_response
options = self.client.get_volume_options('volume')
self.assertEqual([], options)
def test_move_lun(self):
path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
new_path = '/vol/%s/%s' % (self.fake_volume, self.fake_lun)
fake_response = netapp_api.NaElement('options')
self.connection.invoke_successfully.return_value = fake_response
self.client.move_lun(path, new_path)
self.connection.invoke_successfully.assert_called_once_with(
mock.ANY, True)
def test_get_igroup_by_initiators(self):
self.assertRaises(NotImplementedError,
self.client.get_igroup_by_initiators,
fake.FC_FORMATTED_INITIATORS)
def test_get_fc_target_wwpns(self):
self.assertRaises(NotImplementedError,
self.client.get_fc_target_wwpns)
def test_has_luns_mapped_to_initiator(self):
initiator = fake.FC_FORMATTED_INITIATORS[0]
version_response = netapp_api.NaElement(
etree.XML("""
<results status="passed">
<lun-maps>
<lun-map-info>
<path>/vol/cinder1/volume-9be956b3-9854-4a5c-a7f5-13a16da52c9c</path>
<initiator-group>openstack-4b57a80b-ebca-4d27-bd63-48ac5408d08b
</initiator-group>
<lun-id>0</lun-id>
</lun-map-info>
<lun-map-info>
<path>/vol/cinder1/volume-ac90433c-a560-41b3-9357-7f3f80071eb5</path>
<initiator-group>openstack-4b57a80b-ebca-4d27-bd63-48ac5408d08b
</initiator-group>
<lun-id>1</lun-id>
</lun-map-info>
</lun-maps>
</results>"""))
self.connection.invoke_successfully.return_value = version_response
self.assertTrue(self.client._has_luns_mapped_to_initiator(initiator))
def test_has_luns_mapped_to_initiator_not_mapped(self):
initiator = fake.FC_FORMATTED_INITIATORS[0]
version_response = netapp_api.NaElement(
etree.XML("""
<results status="passed">
<lun-maps />
</results>"""))
self.connection.invoke_successfully.return_value = version_response
self.assertFalse(self.client._has_luns_mapped_to_initiator(initiator))
@mock.patch.object(client_base.Client, '_has_luns_mapped_to_initiator')
def test_has_luns_mapped_to_initiators(self,
mock_has_luns_mapped_to_initiator):
initiators = fake.FC_FORMATTED_INITIATORS
mock_has_luns_mapped_to_initiator.return_value = True
self.assertTrue(self.client.has_luns_mapped_to_initiators(initiators))
@mock.patch.object(client_base.Client, '_has_luns_mapped_to_initiator')
def test_has_luns_mapped_to_initiators_not_mapped(
self, mock_has_luns_mapped_to_initiator):
initiators = fake.FC_FORMATTED_INITIATORS
mock_has_luns_mapped_to_initiator.return_value = False
self.assertFalse(self.client.has_luns_mapped_to_initiators(initiators))
|
anaran/kuma
|
refs/heads/master
|
vendor/packages/translate/storage/test_directory.py
|
25
|
#!/usr/bin/env python
"""Tests for the directory module"""
import os
from translate.storage import directory
class TestDirectory(object):
"""a test class to run tests on a test Pootle Server"""
def setup_method(self, method):
"""sets up a test directory"""
print("setup_method called on", self.__class__.__name__)
self.testdir = "%s_testdir" % (self.__class__.__name__)
self.cleardir(self.testdir)
os.mkdir(self.testdir)
def teardown_method(self, method):
"""removes the attributes set up by setup_method"""
self.cleardir(self.testdir)
def cleardir(self, dirname):
"""removes the given directory"""
if os.path.exists(dirname):
for dirpath, subdirs, filenames in os.walk(dirname, topdown=False):
for name in filenames:
os.remove(os.path.join(dirpath, name))
for name in subdirs:
os.rmdir(os.path.join(dirpath, name))
if os.path.exists(dirname):
os.rmdir(dirname)
assert not os.path.exists(dirname)
def touchfiles(self, dir, filenames, content=None):
for filename in filenames:
f = open(os.path.join(dir, filename), "w")
if content:
f.write(content)
f.close()
def mkdir(self, dir):
"""Makes a directory inside self.testdir."""
os.mkdir(os.path.join(self.testdir, dir))
def test_created(self):
"""test that the directory actually exists"""
print(self.testdir)
assert os.path.isdir(self.testdir)
def test_basic(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
files.sort()
self.touchfiles(self.testdir, files)
d = directory.Directory(self.testdir)
filenames = [name for dir, name in d.getfiles()]
filenames.sort()
assert filenames == files
def test_structure(self):
"""Tests a small directory structure."""
files = ["a.po", "b.po", "c.po"]
self.touchfiles(self.testdir, files)
self.mkdir("bla")
self.touchfiles(os.path.join(self.testdir, "bla"), files)
d = directory.Directory(self.testdir)
filenames = [name for dirname, name in d.getfiles()]
filenames.sort()
files = files * 2
files.sort()
assert filenames == files
def test_getunits(self):
"""Tests basic functionality."""
files = ["a.po", "b.po", "c.po"]
posource = '''msgid "bla"\nmsgstr "blabla"\n'''
self.touchfiles(self.testdir, files, posource)
d = directory.Directory(self.testdir)
for unit in d.getunits():
assert unit.target == "blabla"
assert len(d.getunits()) == 3
|
agentr13/python-phonenumbers
|
refs/heads/dev
|
python/phonenumbers/shortdata/region_DK.py
|
8
|
"""Auto-generated file, do not edit by hand. DK metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_DK = PhoneMetadata(id='DK', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_number_pattern='\\d{3}'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='112', possible_number_pattern='\\d{3}', example_number='112'),
short_code=PhoneNumberDesc(national_number_pattern='112', possible_number_pattern='\\d{3}', example_number='112'),
standard_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
carrier_specific=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
short_data=True)
|
wxgeo/geophar
|
refs/heads/master
|
wxgeometrie/sympy/parsing/tests/test_latex.py
|
2
|
import os
import glob
import tempfile
import shutil
import difflib
from sympy.parsing.latex._build_latex_antlr import (
build_parser,
check_antlr_version,
dir_latex_antlr
)
from sympy.utilities.pytest import raises, skip, XFAIL
from sympy.external import import_module
from sympy import (
Symbol, Mul, Add, Eq, Abs, sin, asin, cos, Pow,
csc, sec, Limit, oo, Derivative, Integral, factorial,
sqrt, root, StrictLessThan, LessThan, StrictGreaterThan,
GreaterThan, Sum, Product, E, log, tan
)
from sympy.abc import x, y, z, a, b, c, f, t, k, n
antlr4 = import_module("antlr4")
# disable tests if antlr4-python*-runtime is not present
if not antlr4:
disabled = True
theta = Symbol('theta')
# shorthand definitions
def _Add(a, b):
return Add(a, b, evaluate=False)
def _Mul(a, b):
return Mul(a, b, evaluate=False)
def _Pow(a, b):
return Pow(a, b, evaluate=False)
def _Abs(a):
return Abs(a, evaluate=False)
def _factorial(a):
return factorial(a, evaluate=False)
def _log(a, b):
return log(a, b, evaluate=False)
# These LaTeX strings should parse to the corresponding SymPy expression
GOOD_PAIRS = [
("0", 0),
("1", 1),
("-3.14", _Mul(-1, 3.14)),
("(-7.13)(1.5)", _Mul(_Mul(-1, 7.13), 1.5)),
("x", x),
("2x", 2*x),
("x^2", x**2),
("x^{3 + 1}", x**_Add(3, 1)),
("-c", -c),
("a \\cdot b", a * b),
("a / b", a / b),
("a \\div b", a / b),
("a + b", a + b),
("a + b - a", _Add(a+b, -a)),
("a^2 + b^2 = c^2", Eq(a**2 + b**2, c**2)),
("\\sin \\theta", sin(theta)),
("\\sin(\\theta)", sin(theta)),
("\\sin^{-1} a", asin(a)),
("\\sin a \\cos b", _Mul(sin(a), cos(b))),
("\\sin \\cos \\theta", sin(cos(theta))),
("\\sin(\\cos \\theta)", sin(cos(theta))),
("\\frac{a}{b}", a / b),
("\\frac{a + b}{c}", _Mul(a + b, _Pow(c, -1))),
("\\frac{7}{3}", _Mul(7, _Pow(3, -1))),
("(\\csc x)(\\sec y)", csc(x)*sec(y)),
("\\lim_{x \\to 3} a", Limit(a, x, 3)),
("\\lim_{x \\rightarrow 3} a", Limit(a, x, 3)),
("\\lim_{x \\Rightarrow 3} a", Limit(a, x, 3)),
("\\lim_{x \\longrightarrow 3} a", Limit(a, x, 3)),
("\\lim_{x \\Longrightarrow 3} a", Limit(a, x, 3)),
("\\lim_{x \\to 3^{+}} a", Limit(a, x, 3, dir='+')),
("\\lim_{x \\to 3^{-}} a", Limit(a, x, 3, dir='-')),
("\\infty", oo),
("\\lim_{x \\to \\infty} \\frac{1}{x}",
Limit(_Mul(1, _Pow(x, -1)), x, oo)),
("\\frac{d}{dx} x", Derivative(x, x)),
("\\frac{d}{dt} x", Derivative(x, t)),
("f(x)", f(x)),
("f(x, y)", f(x, y)),
("f(x, y, z)", f(x, y, z)),
("\\frac{d f(x)}{dx}", Derivative(f(x), x)),
("\\frac{d\\theta(x)}{dx}", Derivative(theta(x), x)),
("|x|", _Abs(x)),
("||x||", _Abs(Abs(x))),
("|x||y|", _Abs(x)*_Abs(y)),
("||x||y||", _Abs(_Abs(x)*_Abs(y))),
("\\pi^{|xy|}", Symbol('pi')**_Abs(x*y)),
("\\int x dx", Integral(x, x)),
("\\int x d\\theta", Integral(x, theta)),
("\\int (x^2 - y)dx", Integral(x**2 - y, x)),
("\\int x + a dx", Integral(_Add(x, a), x)),
("\\int da", Integral(1, a)),
("\\int_0^7 dx", Integral(1, (x, 0, 7))),
("\\int_a^b x dx", Integral(x, (x, a, b))),
("\\int^b_a x dx", Integral(x, (x, a, b))),
("\\int_{a}^b x dx", Integral(x, (x, a, b))),
("\\int^{b}_a x dx", Integral(x, (x, a, b))),
("\\int_{a}^{b} x dx", Integral(x, (x, a, b))),
("\\int^{b}_{a} x dx", Integral(x, (x, a, b))),
("\\int_{f(a)}^{f(b)} f(z) dz", Integral(f(z), (z, f(a), f(b)))),
("\\int (x+a)", Integral(_Add(x, a), x)),
("\\int a + b + c dx", Integral(_Add(_Add(a, b), c), x)),
("\\int \\frac{dz}{z}", Integral(Pow(z, -1), z)),
("\\int \\frac{3 dz}{z}", Integral(3*Pow(z, -1), z)),
("\\int \\frac{1}{x} dx", Integral(Pow(x, -1), x)),
("\\int \\frac{1}{a} + \\frac{1}{b} dx",
Integral(_Add(_Pow(a, -1), Pow(b, -1)), x)),
("\\int \\frac{3 \\cdot d\\theta}{\\theta}",
Integral(3*_Pow(theta, -1), theta)),
("\\int \\frac{1}{x} + 1 dx", Integral(_Add(_Pow(x, -1), 1), x)),
("x_0", Symbol('x_{0}')),
("x_{1}", Symbol('x_{1}')),
("x_a", Symbol('x_{a}')),
("x_{b}", Symbol('x_{b}')),
("h_\\theta", Symbol('h_{theta}')),
("h_{\\theta}", Symbol('h_{theta}')),
("h_{\\theta}(x_0, x_1)",
Symbol('h_{theta}')(Symbol('x_{0}'), Symbol('x_{1}'))),
("x!", _factorial(x)),
("100!", _factorial(100)),
("\\theta!", _factorial(theta)),
("(x + 1)!", _factorial(_Add(x, 1))),
("(x!)!", _factorial(_factorial(x))),
("x!!!", _factorial(_factorial(_factorial(x)))),
("5!7!", _Mul(_factorial(5), _factorial(7))),
("\\sqrt{x}", sqrt(x)),
("\\sqrt{x + b}", sqrt(_Add(x, b))),
("\\sqrt[3]{\\sin x}", root(sin(x), 3)),
("\\sqrt[y]{\\sin x}", root(sin(x), y)),
("\\sqrt[\\theta]{\\sin x}", root(sin(x), theta)),
("x < y", StrictLessThan(x, y)),
("x \\leq y", LessThan(x, y)),
("x > y", StrictGreaterThan(x, y)),
("x \\geq y", GreaterThan(x, y)),
("\\mathit{x}", Symbol('x')),
("\\mathit{test}", Symbol('test')),
("\\mathit{TEST}", Symbol('TEST')),
("\\mathit{HELLO world}", Symbol('HELLO world')),
("\\sum_{k = 1}^{3} c", Sum(c, (k, 1, 3))),
("\\sum_{k = 1}^3 c", Sum(c, (k, 1, 3))),
("\\sum^{3}_{k = 1} c", Sum(c, (k, 1, 3))),
("\\sum^3_{k = 1} c", Sum(c, (k, 1, 3))),
("\\sum_{k = 1}^{10} k^2", Sum(k**2, (k, 1, 10))),
("\\sum_{n = 0}^{\\infty} \\frac{1}{n!}",
Sum(_Pow(_factorial(n), -1), (n, 0, oo))),
("\\prod_{a = b}^{c} x", Product(x, (a, b, c))),
("\\prod_{a = b}^c x", Product(x, (a, b, c))),
("\\prod^{c}_{a = b} x", Product(x, (a, b, c))),
("\\prod^c_{a = b} x", Product(x, (a, b, c))),
("\\ln x", _log(x, E)),
("\\ln xy", _log(x*y, E)),
("\\log x", _log(x, 10)),
("\\log xy", _log(x*y, 10)),
("\\log_{2} x", _log(x, 2)),
("\\log_{a} x", _log(x, a)),
("\\log_{11} x", _log(x, 11)),
("\\log_{a^2} x", _log(x, _Pow(a, 2))),
("[x]", x),
("[a + b]", _Add(a, b)),
("\\frac{d}{dx} [ \\tan x ]", Derivative(tan(x), x))
]
def test_parseable():
from sympy.parsing.latex import parse_latex
for latex_str, sympy_expr in GOOD_PAIRS:
assert parse_latex(latex_str) == sympy_expr
# At time of migration from latex2sympy, should work but doesn't
FAILING_PAIRS = [
("\\log_2 x", _log(x, 2)),
("\\log_a x", _log(x, a)),
]
def test_failing_parseable():
from sympy.parsing.latex import parse_latex
for latex_str, sympy_expr in FAILING_PAIRS:
with raises(Exception):
assert parse_latex(latex_str) == sympy_expr
# These bad LaTeX strings should raise a LaTeXParsingError when parsed
BAD_STRINGS = [
"(",
")",
"\\frac{d}{dx}",
"(\\frac{d}{dx})"
"\\sqrt{}",
"\\sqrt",
"{",
"}",
"\\mathit{x + y}",
"\\mathit{21}",
"\\frac{2}{}",
"\\frac{}{2}",
"\\int",
"!",
"!0",
"_",
"^",
"|",
"||x|",
"()",
"((((((((((((((((()))))))))))))))))",
"-",
"\\frac{d}{dx} + \\frac{d}{dt}",
"f(x,,y)",
"f(x,y,",
"\\sin^x",
"\\cos^2",
"@",
"#",
"$",
"%",
"&",
"*",
"\\",
"~",
"\\frac{(2 + x}{1 - x)}"
]
def test_not_parseable():
from sympy.parsing.latex import parse_latex, LaTeXParsingError
for latex_str in BAD_STRINGS:
with raises(LaTeXParsingError):
parse_latex(latex_str)
# At time of migration from latex2sympy, should fail but doesn't
FAILING_BAD_STRINGS = [
"\\cos 1 \\cos",
"f(,",
"f()",
"a \\div \\div b",
"a \\cdot \\cdot b",
"a // b",
"a +",
"1.1.1",
"1 +",
"a / b /",
]
@XFAIL
def test_failing_not_parseable():
from sympy.parsing.latex import parse_latex, LaTeXParsingError
for latex_str in FAILING_BAD_STRINGS:
with raises(LaTeXParsingError):
parse_latex(latex_str)
def test_antlr_generation():
""" Does rebuilding the parser create the same content as
what is checked in?
"""
if not check_antlr_version():
return skip('antlr4 not available, skipping')
tmpdir = tempfile.mkdtemp()
try:
build_parser(tmpdir)
for filename in sorted(glob.glob(os.path.join(tmpdir, "*.*"))):
base = os.path.basename(filename)
with open(filename) as generated:
with open(os.path.join(dir_latex_antlr, base)) as checked_in:
diff = difflib.context_diff(
checked_in.readlines(),
generated.readlines()
)
assert list(diff) == [], "{} not the same".format(base)
finally:
shutil.rmtree(tmpdir)
|
Tranzystorek/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pytest/doc/en/example/multipython.py
|
171
|
"""
module containing a parametrized tests testing cross-python
serialization via the pickle module.
"""
import py
import pytest
import _pytest._code
pythonlist = ['python2.6', 'python2.7', 'python3.3']
@pytest.fixture(params=pythonlist)
def python1(request, tmpdir):
picklefile = tmpdir.join("data.pickle")
return Python(request.param, picklefile)
@pytest.fixture(params=pythonlist)
def python2(request, python1):
return Python(request.param, python1.picklefile)
class Python:
def __init__(self, version, picklefile):
self.pythonpath = py.path.local.sysfind(version)
if not self.pythonpath:
pytest.skip("%r not found" %(version,))
self.picklefile = picklefile
def dumps(self, obj):
dumpfile = self.picklefile.dirpath("dump.py")
dumpfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'wb')
s = pickle.dump(%r, f, protocol=2)
f.close()
""" % (str(self.picklefile), obj)))
py.process.cmdexec("%s %s" %(self.pythonpath, dumpfile))
def load_and_is_true(self, expression):
loadfile = self.picklefile.dirpath("load.py")
loadfile.write(_pytest._code.Source("""
import pickle
f = open(%r, 'rb')
obj = pickle.load(f)
f.close()
res = eval(%r)
if not res:
raise SystemExit(1)
""" % (str(self.picklefile), expression)))
print (loadfile)
py.process.cmdexec("%s %s" %(self.pythonpath, loadfile))
@pytest.mark.parametrize("obj", [42, {}, {1:3},])
def test_basic_objects(python1, python2, obj):
python1.dumps(obj)
python2.load_and_is_true("obj == %s" % obj)
|
obi-two/Rebelion
|
refs/heads/master
|
data/scripts/templates/object/static/creature/shared_yavin4_skreeg.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/creature/shared_yavin4_skreeg.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
fabian4/ceilometer
|
refs/heads/master
|
ceilometer/storage/impl_db2.py
|
7
|
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2013 eNovance
# Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""DB2 storage backend
"""
from __future__ import division
import copy
import datetime
import itertools
import sys
import bson.code
import bson.objectid
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
import pymongo
import six
import ceilometer
from ceilometer import storage
from ceilometer.storage import base
from ceilometer.storage import models
from ceilometer.storage.mongo import utils as pymongo_utils
from ceilometer.storage import pymongo_base
from ceilometer import utils
LOG = log.getLogger(__name__)
AVAILABLE_CAPABILITIES = {
'resources': {'query': {'simple': True,
'metadata': True}},
'statistics': {'groupby': True,
'query': {'simple': True,
'metadata': True},
'aggregation': {'standard': True}}
}
class Connection(pymongo_base.Connection):
"""The db2 storage for Ceilometer
Collections::
- meter
- the raw incoming data
- resource
- the metadata for resources
- { _id: uuid of resource,
metadata: metadata dictionaries
user_id: uuid
project_id: uuid
meter: [ array of {counter_name: string, counter_type: string,
counter_unit: string} ]
}
"""
CAPABILITIES = utils.update_nested(pymongo_base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
CONNECTION_POOL = pymongo_utils.ConnectionPool()
GROUP = {'_id': '$counter_name',
'unit': {'$min': '$counter_unit'},
'min': {'$min': '$counter_volume'},
'max': {'$max': '$counter_volume'},
'sum': {'$sum': '$counter_volume'},
'count': {'$sum': 1},
'duration_start': {'$min': '$timestamp'},
'duration_end': {'$max': '$timestamp'},
}
PROJECT = {'_id': 0, 'unit': 1,
'min': 1, 'max': 1, 'sum': 1, 'count': 1,
'avg': {'$divide': ['$sum', '$count']},
'duration_start': 1,
'duration_end': 1,
}
SORT_OPERATION_MAP = {'desc': pymongo.DESCENDING, 'asc': pymongo.ASCENDING}
SECONDS_IN_A_DAY = 86400
def __init__(self, url):
# Since we are using pymongo, even though we are connecting to DB2
# we still have to make sure that the scheme which used to distinguish
# db2 driver from mongodb driver be replaced so that pymongo will not
# produce an exception on the scheme.
url = url.replace('db2:', 'mongodb:', 1)
self.conn = self.CONNECTION_POOL.connect(url)
# Require MongoDB 2.2 to use aggregate(), since we are using mongodb
# as backend for test, the following code is necessary to make sure
# that the test wont try aggregate on older mongodb during the test.
# For db2, the versionArray won't be part of the server_info, so there
# will not be exception when real db2 gets used as backend.
server_info = self.conn.server_info()
if server_info.get('sysInfo'):
self._using_mongodb = True
else:
self._using_mongodb = False
if self._using_mongodb and server_info.get('versionArray') < [2, 2]:
raise storage.StorageBadVersion("Need at least MongoDB 2.2")
connection_options = pymongo.uri_parser.parse_uri(url)
self.db = getattr(self.conn, connection_options['database'])
if connection_options.get('username'):
self.db.authenticate(connection_options['username'],
connection_options['password'])
self.upgrade()
@classmethod
def _build_sort_instructions(cls, sort_keys=None, sort_dir='desc'):
"""Returns a sort_instruction.
Sort instructions are used in the query to determine what attributes
to sort on and what direction to use.
:param q: The query dict passed in.
:param sort_keys: array of attributes by which results be sorted.
:param sort_dir: direction in which results be sorted (asc, desc).
:return: sort parameters
"""
sort_keys = sort_keys or []
sort_instructions = []
_sort_dir = cls.SORT_OPERATION_MAP.get(
sort_dir, cls.SORT_OPERATION_MAP['desc'])
for _sort_key in sort_keys:
_instruction = (_sort_key, _sort_dir)
sort_instructions.append(_instruction)
return sort_instructions
def _generate_random_str(self, str_len):
init_str = str(bson.objectid.ObjectId())
objectid_len = len(init_str)
if str_len >= objectid_len:
init_str = (init_str * int(str_len/objectid_len) +
'x' * int(str_len % objectid_len))
return init_str
def upgrade(self, version=None):
# create collection if not present
if 'resource' not in self.db.conn.collection_names():
self.db.conn.create_collection('resource')
if 'meter' not in self.db.conn.collection_names():
self.db.conn.create_collection('meter')
# Establish indexes
#
# We need variations for user_id vs. project_id because of the
# way the indexes are stored in b-trees. The user_id and
# project_id values are usually mutually exclusive in the
# queries, so the database won't take advantage of an index
# including both.
if self.db.resource.index_information() == {}:
# Initializing a longer resource id to workaround DB2 nosql issue.
# Longer resource id is required by compute node's resource as
# their id is '<hostname>_<nodename>'. DB2 creates a VARCHAR(70)
# for resource id when its length < 70. But DB2 can create a
# VARCHAR(n) for the resource id which has n(n>70) characters.
# Users can adjust 'db2nosql_resource_id_maxlen'(default is 512)
# for their ENV.
resource_id = self._generate_random_str(
cfg.CONF.database.db2nosql_resource_id_maxlen)
self.db.resource.insert_one({'_id': resource_id,
'no_key': resource_id})
meter_id = str(bson.objectid.ObjectId())
timestamp = timeutils.utcnow()
self.db.meter.insert_one({'_id': meter_id,
'no_key': meter_id,
'timestamp': timestamp})
self.db.resource.create_index([
('user_id', pymongo.ASCENDING),
('project_id', pymongo.ASCENDING),
('source', pymongo.ASCENDING)], name='resource_idx')
self.db.meter.create_index([
('resource_id', pymongo.ASCENDING),
('user_id', pymongo.ASCENDING),
('project_id', pymongo.ASCENDING),
('counter_name', pymongo.ASCENDING),
('timestamp', pymongo.ASCENDING),
('source', pymongo.ASCENDING)], name='meter_idx')
self.db.meter.create_index([('timestamp',
pymongo.DESCENDING)],
name='timestamp_idx')
self.db.resource.remove({'_id': resource_id})
self.db.meter.remove({'_id': meter_id})
def clear(self):
# db2 does not support drop_database, remove all collections
for col in ['resource', 'meter']:
self.db[col].drop()
# drop_database command does nothing on db2 database since this has
# not been implemented. However calling this method is important for
# removal of all the empty dbs created during the test runs since
# test run is against mongodb on Jenkins
self.conn.drop_database(self.db.name)
self.conn.close()
def record_metering_data(self, data):
"""Write the data to the backend storage system.
:param data: a dictionary such as returned by
ceilometer.meter.meter_message_from_counter
"""
# Record the updated resource metadata
data = copy.deepcopy(data)
data['resource_metadata'] = pymongo_utils.improve_keys(
data.pop('resource_metadata'))
self.db.resource.update_one(
{'_id': data['resource_id']},
{'$set': {'project_id': data['project_id'],
'user_id': data['user_id'] or 'null',
'metadata': data['resource_metadata'],
'source': data['source'],
},
'$addToSet': {'meter': {'counter_name': data['counter_name'],
'counter_type': data['counter_type'],
'counter_unit': data['counter_unit'],
},
},
},
upsert=True,
)
# Record the raw data for the meter. Use a copy so we do not
# modify a data structure owned by our caller (the driver adds
# a new key '_id').
record = copy.copy(data)
record['recorded_at'] = timeutils.utcnow()
# Make sure that the data does have field _id which db2 wont add
# automatically.
if record.get('_id') is None:
record['_id'] = str(bson.objectid.ObjectId())
self.db.meter.insert_one(record)
def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None,
metaquery=None, resource=None, limit=None):
"""Return an iterable of models.Resource instances
:param user: Optional ID for user that owns the resource.
:param project: Optional ID for project that owns the resource.
:param source: Optional source filter.
:param start_timestamp: Optional modified timestamp start range.
:param start_timestamp_op: Optional start time operator, like gt, ge.
:param end_timestamp: Optional modified timestamp end range.
:param end_timestamp_op: Optional end time operator, like lt, le.
:param metaquery: Optional dict with metadata to match on.
:param resource: Optional resource filter.
:param limit: Maximum number of results to return.
"""
if limit == 0:
return
metaquery = pymongo_utils.improve_keys(metaquery, metaquery=True) or {}
q = {}
if user is not None:
q['user_id'] = user
if project is not None:
q['project_id'] = project
if source is not None:
q['source'] = source
if resource is not None:
q['resource_id'] = resource
# Add resource_ prefix so it matches the field in the db
q.update(dict(('resource_' + k, v)
for (k, v) in six.iteritems(metaquery)))
if start_timestamp or end_timestamp:
# Look for resources matching the above criteria and with
# samples in the time range we care about, then change the
# resource query to return just those resources by id.
ts_range = pymongo_utils.make_timestamp_range(start_timestamp,
end_timestamp,
start_timestamp_op,
end_timestamp_op)
if ts_range:
q['timestamp'] = ts_range
sort_keys = base._handle_sort_key('resource', 'timestamp')
sort_keys.insert(0, 'resource_id')
sort_instructions = self._build_sort_instructions(sort_keys=sort_keys,
sort_dir='desc')
resource = lambda x: x['resource_id']
if limit is not None:
meters = self.db.meter.find(q, sort=sort_instructions,
limit=limit)
else:
meters = self.db.meter.find(q, sort=sort_instructions)
for resource_id, r_meters in itertools.groupby(meters, key=resource):
# Because we have to know first/last timestamp, and we need a full
# list of references to the resource's meters, we need a tuple
# here.
r_meters = tuple(r_meters)
latest_meter = r_meters[0]
last_ts = latest_meter['timestamp']
first_ts = r_meters[-1]['timestamp']
yield models.Resource(resource_id=latest_meter['resource_id'],
project_id=latest_meter['project_id'],
first_sample_timestamp=first_ts,
last_sample_timestamp=last_ts,
source=latest_meter['source'],
user_id=latest_meter['user_id'],
metadata=pymongo_utils.unquote_keys(
latest_meter['resource_metadata']))
def get_meter_statistics(self, sample_filter, period=None, groupby=None,
aggregate=None):
"""Return an iterable of models.Statistics instance.
Items are containing meter statistics described by the query
parameters. The filter must have a meter value set.
"""
if (groupby and
set(groupby) - set(['user_id', 'project_id',
'resource_id', 'source'])):
raise ceilometer.NotImplementedError(
"Unable to group by these fields")
if aggregate:
raise ceilometer.NotImplementedError(
'Selectable aggregates not implemented')
q = pymongo_utils.make_query_from_filter(sample_filter)
if period:
if sample_filter.start_timestamp:
period_start = sample_filter.start_timestamp
else:
period_start = self.db.meter.find(
limit=1, sort=[('timestamp',
pymongo.ASCENDING)])[0]['timestamp']
if groupby:
sort_keys = ['counter_name'] + groupby + ['timestamp']
else:
sort_keys = ['counter_name', 'timestamp']
sort_instructions = self._build_sort_instructions(sort_keys=sort_keys,
sort_dir='asc')
meters = self.db.meter.find(q, sort=sort_instructions)
def _group_key(meter):
# the method to define a key for groupby call
key = {}
for y in sort_keys:
if y == 'timestamp' and period:
key[y] = (timeutils.delta_seconds(period_start,
meter[y]) // period)
elif y != 'timestamp':
key[y] = meter[y]
return key
def _to_offset(periods):
return {'days': (periods * period) // self.SECONDS_IN_A_DAY,
'seconds': (periods * period) % self.SECONDS_IN_A_DAY}
for key, grouped_meters in itertools.groupby(meters, key=_group_key):
stat = models.Statistics(unit=None,
min=sys.maxsize, max=-sys.maxsize,
avg=0, sum=0, count=0,
period=0, period_start=0, period_end=0,
duration=0, duration_start=0,
duration_end=0, groupby=None)
for meter in grouped_meters:
stat.unit = meter.get('counter_unit', '')
m_volume = meter.get('counter_volume')
if stat.min > m_volume:
stat.min = m_volume
if stat.max < m_volume:
stat.max = m_volume
stat.sum += m_volume
stat.count += 1
if stat.duration_start == 0:
stat.duration_start = meter['timestamp']
stat.duration_end = meter['timestamp']
if groupby and not stat.groupby:
stat.groupby = {}
for group_key in groupby:
stat.groupby[group_key] = meter[group_key]
stat.duration = timeutils.delta_seconds(stat.duration_start,
stat.duration_end)
stat.avg = stat.sum / stat.count
if period:
stat.period = period
periods = key.get('timestamp')
stat.period_start = (period_start +
datetime.
timedelta(**(_to_offset(periods))))
stat.period_end = (period_start +
datetime.
timedelta(**(_to_offset(periods + 1))))
else:
stat.period_start = stat.duration_start
stat.period_end = stat.duration_end
yield stat
|
bravominski/PennApps2015-HeartMates
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/__init__.py
|
196
|
"""
HTML parsing library based on the WHATWG "HTML5"
specification. The parser is designed to be compatible with existing
HTML found in the wild and implements well-defined error recovery that
is largely compatible with modern desktop web browsers.
Example usage:
import html5lib
f = open("my_document.html")
tree = html5lib.parse(f)
"""
from __future__ import absolute_import, division, unicode_literals
from .html5parser import HTMLParser, parse, parseFragment
from .treebuilders import getTreeBuilder
from .treewalkers import getTreeWalker
from .serializer import serialize
__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
"getTreeWalker", "serialize"]
__version__ = "1.0b3"
|
Danisan/odoo-1
|
refs/heads/8.0
|
addons/l10n_hn/__init__.py
|
411
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009-2010 Salvatore J. Trimarchi <salvatore@trimarchi.co.cc>
# (http://salvatoreweb.co.cc)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
jitendra29/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/workers/baseurl/beta/worker.py
|
241
|
def main(request, response):
return (302, "Moved"), [("Location", "../gamma/worker.js")], "postMessage('executed redirecting script');"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.