repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Jiangshangmin/mpld3
|
refs/heads/master
|
mpld3/utils.py
|
16
|
"""
mpld3 Utilities
===============
Utility routines for the mpld3 package
"""
import os
import re
import shutil
import warnings
from functools import wraps
from . import urls
# Make sure that DeprecationWarning gets printed
warnings.simplefilter("always", DeprecationWarning)
def html_id_ok(objid, html5=False):
"""Check whether objid is valid as an HTML id attribute.
If html5 == True, then use the more liberal html5 rules.
"""
if html5:
return not re.search('\s', objid)
else:
return bool(re.match("^[a-zA-Z][a-zA-Z0-9\-\.\:\_]*$", objid))
def get_id(obj, suffix="", prefix="el", warn_on_invalid=True):
"""Get a unique id for the object"""
if not suffix:
suffix = ""
if not prefix:
prefix = ""
objid = prefix + str(os.getpid()) + str(id(obj)) + suffix
if warn_on_invalid and not html_id_ok(objid):
warnings.warn('"{0}" is not a valid html ID. This may cause problems')
return objid
def deprecated(func, old_name, new_name):
"""Decorator to mark functions as deprecated."""
@wraps(func)
def new_func(*args, **kwargs):
warnings.warn(("{0} is deprecated and will be removed. "
"Use {1} instead".format(old_name, new_name)),
category=DeprecationWarning)
return func(*args, **kwargs)
new_func.__doc__ = ("*%s is deprecated: use %s instead*\n\n "
% (old_name, new_name)) + new_func.__doc__
return new_func
def write_ipynb_local_js(location=None, d3_src=None, mpld3_src=None):
"""
Write the mpld3 and d3 javascript libraries to the given file location.
This utility is used by the IPython notebook tools to enable easy use
of mpld3 with no web connection.
Parameters
----------
location : string (optioal)
the directory in which the d3 and mpld3 javascript libraries will be
written. If not specified, the IPython nbextensions directory will be
used. If IPython doesn't support nbextensions (< 2.0),
the current working directory will be used.
d3_src : string (optional)
the source location of the d3 library. If not specified, the standard
path in mpld3.urls.D3_LOCAL will be used.
mpld3_src : string (optional)
the source location of the mpld3 library. If not specified, the
standard path in mpld3.urls.MPLD3_LOCAL will be used.
Returns
-------
d3_url, mpld3_url : string
The URLs to be used for loading these js files.
"""
if location is None:
try:
from IPython.html import install_nbextension
except ImportError:
location = os.getcwd()
nbextension = False
else:
nbextension = True
else:
nbextension = False
if d3_src is None:
d3_src = urls.D3_LOCAL
if mpld3_src is None:
mpld3_src = urls.MPLD3_LOCAL
d3js = os.path.basename(d3_src)
mpld3js = os.path.basename(mpld3_src)
if not os.path.exists(d3_src):
raise ValueError("d3 src not found at '{0}'".format(d3_src))
if not os.path.exists(mpld3_src):
raise ValueError("mpld3 src not found at '{0}'".format(mpld3_src))
if nbextension:
# IPython 2.0+.
# This will not work if a url prefix is added
prefix = '/nbextensions/'
try:
install_nbextension([d3_src, mpld3_src])
except IOError:
# files may be read only. We'll try deleting them and re-installing
from IPython.utils.path import get_ipython_dir
nbext = os.path.join(get_ipython_dir(), "nbextensions")
for src in [d3_src, mpld3_src]:
dest = os.path.join(nbext, os.path.basename(src))
if os.path.exists(dest):
os.remove(dest)
install_nbextension([d3_src, mpld3_src])
else:
# IPython < 2.0 or explicit path.
# This won't work if users have changed the kernel directory.
prefix = '/files/'
d3_dest = os.path.join(location, d3js)
mpld3_dest = os.path.join(location, mpld3js)
for src, dest in [(d3_src, d3_dest), (mpld3_src, mpld3_dest)]:
try:
shutil.copyfile(src, dest)
except IOError:
# file may be read only. We'll try deleting it first
if os.path.exists(dest):
os.remove(dest)
shutil.copyfile(src, dest)
return prefix + d3js, prefix + mpld3js
|
ning/collector
|
refs/heads/master
|
src/utils/py/opsAlert/constants.py
|
305
|
#
# Autogenerated by Thrift
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
from thrift.Thrift import *
from ttypes import *
|
kstaniek/csm
|
refs/heads/master
|
csmserver/horizon/package_lib.py
|
1
|
# =============================================================================
#
# Copyright (c) 2013, Cisco Systems
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
import os
import re
PIE = "pie"
ACTIVE = "active"
INACTIVE = "inactive"
COMMITTED = "committed"
ACTIVE_STR = "Active Packages:"
INACTIVE_STR = "Inactive Packages:"
pkg_name = "asr9k-mgbl-px.pie-4.2.3"
nn = "disk0:asr9k-mini-px-4.2.3"
class PackageClass(object):
def __init__(self, org_package=""):
# Platform or domain
self.platform = None
# Package name
self.pkg = None
# Architecture
self.arch = None
# Release version
self.version = None
self.subversion = None
# Package format
self.format = None
# Patch/maintenance version
self.patch_ver = None
# Requires or depends on
self.requires = None
# Supersedes or overrides
self.supersedes = None
# Partition where package exists
self.partition = None
self.org_package = org_package
def __str__(self):
# FIXME: This is a hack. It needs to be recreate back string from attributes
return self.org_package.replace(".SIT_IMAGE", "", 1)
__repr__ = __str__
class NewPackage():
def __init__(self, pkg_lst_file=None):
self.inputfile = pkg_lst_file
self.pkg_named_list = get_pkgs(pkg_lst_file)
self.pkg_list = []
if self.pkg_named_list:
self._update_pkgs()
def _update_pkgs(self):
for pkg_name in self.pkg_named_list:
# Validate the package name
pkg = self.validate_offbox_xrpie_pkg(pkg_name)
if pkg:
self.pkg_list.append(pkg)
def validate_offbox_xrpie_pkg(self, pkg):
# asr9k-px-4.3.2.CSCuj61599.pie
# asr9k-mpls-px.pie-4.3.2
# asr9k-asr9000v-nV-px.pie-5.2.2
# asr9k-mcast-px.pie-5.2.2
# asr9k-asr901-nV-px.pie-5.2.2
# asr9k-mgbl-px.pie-5.2.2
# asr9k-asr903-nV-px.pie-5.2.2
#self.error("package 1",pkg)
pkg_expr_2pkg = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_2pkg_eng1 = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+\..*)\..*')
pkg_expr_2pkg_inac = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_2pkg_inac_eng = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
pkg_expr_2pkg_inac_noarch = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_2pkg_inac_noarch_eng = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
pkg_expr = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_eng = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+)')
pkg_expr_inact = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_inact_eng_noarc=re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
pkg_expr_2pkg_inac = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+)')
smu_expr_eng_int = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+.)\.(?P<PKGNAME>CSC\w+)(?P<PKGFORMAT>-)(?P<SMUVERSION>\d+\.\d+\.\d+.*)')
smu_expr_eng_int1 = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+)\.(?P<PKGNAME>CSC\w+)(?P<PKGFORMAT>-)(?P<SMUVERSION>.*)')
smu_expr = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>\w+)-(?P<VERSION>\d+\.\d+\.\d+)\.(?P<PKGNAME>\w+)\.(?P<PKGFORMAT>\w+)')
smu_expr2 = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>\w+)-(?P<VERSION>\d+\.\d+\.\d+)\.(?P<PKGNAME>\w+)-(?P<SMUVERSION>\d+\.\d+\.\d+)\.(?P<PKGFORMAT>\w+)')
smu_expr3 = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>\w+)-(?P<VERSION>\d+\.\d+\.\d+)\.(?P<PKGNAME>\w+)-(?P<PKGFORMAT>\d+\.\d+\.\d+)')
pkg_expr_2pkg_int = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+[a-zA-Z])')
pkg_expr_int = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)\.(?P<PKGFORMAT>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+[a-zA-Z])')
smu_expr_int = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\w*)\.(?P<PKGNAME>\w+)\.(?P<PKGFORMAT>\w+)')
smu_expr2_int = re.compile(
r'(?P<PLATFORM>\w+)-(?P<ARCH>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\w*)\.(?P<PKGNAME>\w+)-(?P<SMUVERSION>\d+\.\d+\.\d+\.\w*)\.(?P<PKGFORMAT>\w+)')
pkg_expr_2pkg_eng = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\w+)')
pkg_expr_2pkg_eng_test = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+-\w+)-(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\w+)')
pkg_expr_2pkg_sp = re.compile(
r'(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+-\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\w+-\d+\.\d+\.\d+)')
pkg_expr_2pkg_sp1 = re.compile(
r'(?P<PLATFORM>\w+)(?P<PKGNAME>-)(?P<ARCH>p\w+)(?P<PKGFORMAT>-)(?P<VERSION>\d+\.\d+\.\d+\.\w+-\d+\.\d+\.\d+)')
pkg_arch="1"
smu_ver="0"
pkgobj = PackageClass(pkg)
p = pkg_expr_2pkg_eng1.search(pkg)
if not p:
p = pkg_expr_2pkg.search(pkg)
if not p:
p = pkg_expr_2pkg_eng_test.search(pkg)
if not p:
p = pkg_expr_2pkg_sp.search(pkg)
if not p:
p = pkg_expr_2pkg_eng.search(pkg)
if not p:
p = pkg_expr_2pkg_int.search(pkg)
if not p:
p = pkg_expr_int.search(pkg)
if not p:
p = smu_expr2_int.search(pkg)
if not p:
p = pkg_expr_2pkg_inac.search(pkg)
if not p:
p = smu_expr_int.search(pkg)
if not p:
p = pkg_expr.search(pkg)
if not p:
p = smu_expr_eng_int.search(pkg)
smu_ver="1"
if not p:
p = smu_expr_eng_int1.search(pkg)
smu_ver="1"
if not p:
p = smu_expr.search(pkg)
smu_ver=0
if not p:
p = smu_expr3.search(pkg)
smu_ver=0
if not p:
p = smu_expr2.search(pkg)
smu_ver=0
if not p:
p = pkg_expr_inact.search(pkg)
smu_ver=0
if not p:
p = pkg_expr_inact_eng_noarc.search(pkg)
pkg_arch="0"
smu_ver=0
if not p:
p=pkg_expr_2pkg_inac_noarch.search(pkg)
pkg_arch="0"
smu_ver=0
if p:
if p.group("PKGFORMAT") == PIE or p.group("PKGFORMAT")== "-" or p.group("PKGFORMAT") == "1.0.0" or p.group("PKGFORMAT") == ".":
pkgobj.platform = p.group("PLATFORM")
if "SUBPKGNAME" in p.groupdict().keys():
if p.group("PKGNAME")[:8] == 'asr9000v':
packagename = p.group(
"PKGNAME")[3:] + "-" + p.group("SUBPKGNAME")
else:
packagename = p.group(
"PKGNAME") + "-" + p.group("SUBPKGNAME")
else:
packagename = p.group("PKGNAME")
pkgobj.pkg = packagename
if pkg_arch=="0":
pkgobj.arch=""
else:
if p.group("PKGFORMAT") == PIE and packagename == "services-infra":
pkgobj.arch=""
else:
pkgobj.arch = p.group("ARCH")
if p.group("PKGFORMAT") == ".":
pkgobj.format = p.group("PKGFORMAT")+p.group("PKGSUBFORMAT")
else:
pkgobj.format = p.group("PKGFORMAT")
if smu_ver=="1":
pkgobj.format = p.group("SMUVERSION")
pkgobj.version = p.group("VERSION")
return pkgobj
def validate_xrrpm_pkg(self, pkg):
pass
class OnboxPackage():
def __init__(self, pkg_lst_file=None, pkg_state=None):
self.inputfile = None
self.pkg_list = []
self.pkg_state = pkg_state
if pkg_lst_file:
self.inputfile = pkg_lst_file
self.update_pkgs()
def update_pkgs(self):
if os.path.exists(self.inputfile):
data = get_pkgs(self.inputfile)
else:
data = self.inputfile.split("\n")
start_pkg = False
if data:
for line in data:
if line.find(self.pkg_state) < 0 and not start_pkg:
continue
elif not start_pkg:
start_pkg = True
pkg_name = line.strip()
pkg = self.validate_xrpie_pkg(pkg_name)
if not pkg:
pkg = self.validate_xrrpm_pkg(pkg_name)
if pkg:
self.pkg_list.append(pkg)
def validate_xrpie_pkg(self, pkg):
# disk0:asr9k-mini-px-4.3.2
# asr9k-px-4.2.3.CSCue60194-1.0.0
# disk0:asr9k-px-5.3.1.06I.CSCub11122-1.0.0
#self.error("package",pkg)
pkg_expr_2pkg = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+.*)')
pkg_expr_2pkg_eng = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
pkg_expr_2pkg_inac = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_eng = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
smu_expr = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+)\.(?P<PKGNAME>\w+)-(?P<SUBVERSION>\d+\.\d+\.\d+)')
pkg_expr_int = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+[a-zA-Z])')
smu_expr_int = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\w*)\.(?P<PKGNAME>\w+)-(?P<SUBVERSION>\d+\.\d+\.\d+.\w*)')
smu_expr_internal = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<ARCH>p\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\w*)\.(?P<PKGNAME>\w+)-(?P<SUBVERSION>\d+\.\d+\.\d+)')
pkg_expr_noarch = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<VERSION>\d+\.\d+\.\d+)')
pkg_expr_noarch_eng = re.compile(
r'(?P<DISK>\w+):(?P<PLATFORM>\w+)-(?P<PKGNAME>\w+)-(?P<SUBPKGNAME>\w+)-(?P<VERSION>\d+\.\d+\.\d+\.\d+\w+)')
pkgobj = PackageClass()
p = pkg_expr_2pkg_eng.search(pkg)
if not p:
p = pkg_expr_2pkg.search(pkg)
if not p:
p = pkg_expr_int.search(pkg)
if not p:
p = smu_expr_int.search(pkg)
if not p:
p = pkg_expr_eng.search(pkg)
if not p:
p = pkg_expr.search(pkg)
if not p:
p = smu_expr.search(pkg)
if not p:
p = smu_expr_internal.search(pkg)
if not p:
p = pkg_expr_noarch_eng.search(pkg)
if not p:
p = pkg_expr_noarch.search(pkg)
if p:
pkgobj.platform = p.group("PLATFORM")
if "SUBPKGNAME" in p.groupdict().keys():
packagename = p.group("PKGNAME") + "-" + p.group("SUBPKGNAME")
else:
packagename = p.group("PKGNAME")
pkgobj.pkg = packagename
pkgobj.partition = p.group("DISK")
try:
pkgobj.arch = p.group("ARCH")
except:
pkgobj.arch = "px"
pkgobj.version = p.group("VERSION")
if "SUBVERSION" in p.groupdict().keys():
pkgobj.subversion = p.group("SUBVERSION")
return pkgobj
def validate_xrrpm_pkg(self, pkg):
pass
# FIXME: This needs to be implemented as sets
# Packages in list1 but not in list 2
def missing_pkgs(list1, list2):
missing_lst = []
for pk1 in list1:
missing = True
for pk2 in list2:
if pk1.pkg == pk2.pkg and pk1.version == pk2.version:
missing = False
if missing:
missing_lst.append(pk1)
return missing_lst
# Packages in list2 but not in list 1
def extra_pkgs(list1, list2):
extra_lst = []
for pk2 in list2:
extra = True
for pk1 in list1:
if pk1.pkg == pk2.pkg and pk1.version == pk2.version:
extra = False
if extra:
extra_lst.append(pk2)
return extra_lst
def package_intersection(new_packages, device_packages):
"""
Produces an intersection of new packages and device packages.
"""
SMU_RE = r'CSC\D\D\d\d\d'
FP_RE = r'fp\d+'
SP_RE = r'sp\d+'
packages = []
for pk1 in new_packages:
for pk2 in device_packages:
if pk1.pkg == pk2.pkg and pk1.version == pk2.version:
if re.match(SMU_RE, pk2.pkg) or re.match(FP_RE, pk2.pkg) or \
re.match(SP_RE, pk2.pkg):
# It's a SMU format is
# disk0:asr9k-px-4.3.2.CSCuj61599-1.0.0
pkg = "%s:%s-%s-%s.%s-%s" % (
pk2.partition, pk2.platform, pk2.arch,
pk2.version, pk2.pkg, pk2.subversion
)
else:
if pk1.arch == "":
pkg = "%s:%s-%s-%s" % (
pk2.partition, pk2.platform, pk2.pkg,
pk2.version
)
else:
pkg = "%s:%s-%s-%s-%s" % (
pk2.partition, pk2.platform, pk2.pkg, pk2.arch,
pk2.version
)
packages.append(pkg)
return packages
def parse_xr_show_platform(output):
inventory = {}
lines = output.split('\n')
for line in lines:
line = line.strip()
if len(line) > 0 and line[0].isdigit():
node = line[:15].strip()
entry = {
'type': line[16:41].strip(),
'state': line[42:58].strip(),
'config_state': line[59:].strip()
}
inventory[node] = entry
return inventory
def validate_xr_node_state(inventory, device):
valid_state = [
'IOS XR RUN',
'PRESENT',
'UNPOWERED',
'READY',
'UNPOWERED',
'FAILED',
'OK',
'ADMIN DOWN',
'DISABLED'
]
for key, value in inventory.items():
if 'CPU' in key:
if value['state'] not in valid_state:
break
else:
device.store_property('inventory', inventory)
return True
return False
protocols = ['tftp', 'ftp:', 'sftp']
def get_pkgs(pkg_lst):
if isinstance(pkg_lst, list):
return pkg_lst
elif isinstance(pkg_lst, str):
fd = open(pkg_lst, "r")
pkg_names = fd.readlines()
fd.close()
pkg_list = [x for x in [p.split("#")[0].strip() for p in pkg_names if p] if x[:4] not in protocols]
if pkg_list:
pkg_list = [p for p in pkg_list if p]
return pkg_list
def get_repo(pkg_lst_file):
fd = open(pkg_lst_file, "r")
pkg_names = fd.readlines()
fd.close()
repo = [x for x in [p.split("#")[0].strip()
for p in pkg_names if p] if x[:4] in protocols]
if repo:
repo = [p for p in repo if p]
return repo[-1]
|
vmindru/ansible-modules-core
|
refs/heads/devel
|
cloud/openstack/_quantum_subnet.py
|
41
|
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <benno@ansible.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
try:
from neutronclient.neutron import client
except ImportError:
from quantumclient.quantum import client
from keystoneclient.v2_0 import client as ksclient
HAVE_DEPS = True
except ImportError:
HAVE_DEPS = False
DOCUMENTATION = '''
---
module: quantum_subnet
deprecated: Deprecated in 2.0. Use os_subnet instead
version_added: "1.2"
short_description: Add/remove subnet from a network
description:
- Add/remove subnet from a network
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- Password of login user
required: true
default: True
login_tenant_name:
description:
- The tenant name of the login user
required: true
default: True
auth_url:
description:
- The keystone URL for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- Name of the region
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
network_name:
description:
- Name of the network to which the subnet should be attached
required: true
default: None
name:
description:
- The name of the subnet that should be created
required: true
default: None
cidr:
description:
- The CIDR representation of the subnet that should be assigned to the subnet
required: true
default: None
tenant_name:
description:
- The name of the tenant for whom the subnet should be created
required: false
default: None
ip_version:
description:
- The IP version of the subnet 4 or 6
required: false
default: 4
enable_dhcp:
description:
- Whether DHCP should be enabled for this subnet.
required: false
default: true
gateway_ip:
description:
- The ip that would be assigned to the gateway for this subnet
required: false
default: None
dns_nameservers:
description:
- DNS nameservers for this subnet, comma-separated
required: false
default: None
version_added: "1.4"
allocation_pool_start:
description:
- From the subnet pool the starting address from which the IP should be allocated
required: false
default: None
allocation_pool_end:
description:
- From the subnet pool the last IP that should be assigned to the virtual machines
required: false
default: None
requirements:
- "python >= 2.6"
- "python-neutronclient or python-quantumclient"
- "python-keystoneclient"
'''
EXAMPLES = '''
# Create a subnet for a tenant with the specified subnet
- quantum_subnet: state=present login_username=admin login_password=admin
login_tenant_name=admin tenant_name=tenant1
network_name=network1 name=net1subnet cidr=192.168.0.0/24"
'''
_os_keystone = None
_os_tenant_id = None
_os_network_id = None
def _get_ksclient(module, kwargs):
try:
kclient = ksclient.Client(username=kwargs.get('login_username'),
password=kwargs.get('login_password'),
tenant_name=kwargs.get('login_tenant_name'),
auth_url=kwargs.get('auth_url'))
except Exception as e:
module.fail_json(msg = "Error authenticating to the keystone: %s" %e.message)
global _os_keystone
_os_keystone = kclient
return kclient
def _get_endpoint(module, ksclient):
try:
endpoint = ksclient.service_catalog.url_for(service_type='network', endpoint_type='publicURL')
except Exception as e:
module.fail_json(msg = "Error getting network endpoint: %s" % e.message)
return endpoint
def _get_neutron_client(module, kwargs):
_ksclient = _get_ksclient(module, kwargs)
token = _ksclient.auth_token
endpoint = _get_endpoint(module, _ksclient)
kwargs = {
'token': token,
'endpoint_url': endpoint
}
try:
neutron = client.Client('2.0', **kwargs)
except Exception as e:
module.fail_json(msg = " Error in connecting to neutron: %s" % e.message)
return neutron
def _set_tenant_id(module):
global _os_tenant_id
if not module.params['tenant_name']:
_os_tenant_id = _os_keystone.tenant_id
else:
tenant_name = module.params['tenant_name']
for tenant in _os_keystone.tenants.list():
if tenant.name == tenant_name:
_os_tenant_id = tenant.id
break
if not _os_tenant_id:
module.fail_json(msg = "The tenant id cannot be found, please check the parameters")
def _get_net_id(neutron, module):
kwargs = {
'tenant_id': _os_tenant_id,
'name': module.params['network_name'],
}
try:
networks = neutron.list_networks(**kwargs)
except Exception as e:
module.fail_json("Error in listing neutron networks: %s" % e.message)
if not networks['networks']:
return None
return networks['networks'][0]['id']
def _get_subnet_id(module, neutron):
global _os_network_id
subnet_id = None
_os_network_id = _get_net_id(neutron, module)
if not _os_network_id:
module.fail_json(msg = "network id of network not found.")
else:
kwargs = {
'tenant_id': _os_tenant_id,
'name': module.params['name'],
}
try:
subnets = neutron.list_subnets(**kwargs)
except Exception as e:
module.fail_json( msg = " Error in getting the subnet list:%s " % e.message)
if not subnets['subnets']:
return None
return subnets['subnets'][0]['id']
def _create_subnet(module, neutron):
neutron.format = 'json'
subnet = {
'name': module.params['name'],
'ip_version': module.params['ip_version'],
'enable_dhcp': module.params['enable_dhcp'],
'tenant_id': _os_tenant_id,
'gateway_ip': module.params['gateway_ip'],
'dns_nameservers': module.params['dns_nameservers'],
'network_id': _os_network_id,
'cidr': module.params['cidr'],
}
if module.params['allocation_pool_start'] and module.params['allocation_pool_end']:
allocation_pools = [
{
'start' : module.params['allocation_pool_start'],
'end' : module.params['allocation_pool_end']
}
]
subnet.update({'allocation_pools': allocation_pools})
if not module.params['gateway_ip']:
subnet.pop('gateway_ip')
if module.params['dns_nameservers']:
subnet['dns_nameservers'] = module.params['dns_nameservers'].split(',')
else:
subnet.pop('dns_nameservers')
try:
new_subnet = neutron.create_subnet(dict(subnet=subnet))
except Exception as e:
module.fail_json(msg = "Failure in creating subnet: %s" % e.message)
return new_subnet['subnet']['id']
def _delete_subnet(module, neutron, subnet_id):
try:
neutron.delete_subnet(subnet_id)
except Exception as e:
module.fail_json( msg = "Error in deleting subnet: %s" % e.message)
return True
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
network_name = dict(required=True),
cidr = dict(required=True),
tenant_name = dict(default=None),
state = dict(default='present', choices=['absent', 'present']),
ip_version = dict(default='4', choices=['4', '6']),
enable_dhcp = dict(default='true', type='bool'),
gateway_ip = dict(default=None),
dns_nameservers = dict(default=None),
allocation_pool_start = dict(default=None),
allocation_pool_end = dict(default=None),
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAVE_DEPS:
module.fail_json(msg='python-keystoneclient and either python-neutronclient or python-quantumclient are required')
neutron = _get_neutron_client(module, module.params)
_set_tenant_id(module)
if module.params['state'] == 'present':
subnet_id = _get_subnet_id(module, neutron)
if not subnet_id:
subnet_id = _create_subnet(module, neutron)
module.exit_json(changed = True, result = "Created" , id = subnet_id)
else:
module.exit_json(changed = False, result = "success" , id = subnet_id)
else:
subnet_id = _get_subnet_id(module, neutron)
if not subnet_id:
module.exit_json(changed = False, result = "success")
else:
_delete_subnet(module, neutron, subnet_id)
module.exit_json(changed = True, result = "deleted")
# this is magic, see lib/ansible/module.params['common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
leansoft/edx-platform
|
refs/heads/master
|
lms/djangoapps/bulk_email/migrations/0001_initial.py
|
182
|
# -*- coding: utf-8 -*-
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'CourseEmail'
db.create_table('bulk_email_courseemail', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('sender', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['auth.User'], null=True, blank=True)),
('hash', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
('subject', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('html_message', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('to', self.gf('django.db.models.fields.CharField')(default='myself', max_length=64)),
))
db.send_create_signal('bulk_email', ['CourseEmail'])
# Adding model 'Optout'
db.create_table('bulk_email_optout', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('email', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('course_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
))
db.send_create_signal('bulk_email', ['Optout'])
# Adding unique constraint on 'Optout', fields ['email', 'course_id']
db.create_unique('bulk_email_optout', ['email', 'course_id'])
def backwards(self, orm):
# Removing unique constraint on 'Optout', fields ['email', 'course_id']
db.delete_unique('bulk_email_optout', ['email', 'course_id'])
# Deleting model 'CourseEmail'
db.delete_table('bulk_email_courseemail')
# Deleting model 'Optout'
db.delete_table('bulk_email_optout')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'bulk_email.courseemail': {
'Meta': {'object_name': 'CourseEmail'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'html_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'to': ('django.db.models.fields.CharField', [], {'default': "'myself'", 'max_length': '64'})
},
'bulk_email.optout': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'Optout'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['bulk_email']
|
DirtyUnicorns/android_external_chromium_org
|
refs/heads/lollipop
|
tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py
|
29
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import value
from telemetry.page import page_set
from telemetry.value import list_of_scalar_values
from telemetry.value import none_values
class TestBase(unittest.TestCase):
def setUp(self):
self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
return self.page_set.pages
class ValueTest(TestBase):
def testListSamePageMergingWithSamePageConcatenatePolicy(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1,2], same_page_merge_policy=value.CONCATENATE)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[3,4], same_page_merge_policy=value.CONCATENATE)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromSamePage([v0, v1]))
self.assertEquals(page0, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.CONCATENATE, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2, 3, 4], vM.values)
def testListSamePageMergingWithPickFirstPolicy(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1,2], same_page_merge_policy=value.PICK_FIRST)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[3,4], same_page_merge_policy=value.PICK_FIRST)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromSamePage([v0, v1]))
self.assertEquals(page0, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.PICK_FIRST, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2], vM.values)
def testListDifferentPageMerging(self):
page0 = self.pages[0]
page1 = self.pages[1]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1, 2], same_page_merge_policy=value.CONCATENATE)
v1 = list_of_scalar_values.ListOfScalarValues(
page1, 'x', 'unit',
[3, 4], same_page_merge_policy=value.CONCATENATE)
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromDifferentPages([v0, v1]))
self.assertEquals(None, vM.page)
self.assertEquals('x', vM.name)
self.assertEquals('unit', vM.units)
self.assertEquals(value.CONCATENATE, vM.same_page_merge_policy)
self.assertEquals(True, vM.important)
self.assertEquals([1, 2, 3, 4], vM.values)
def testListWithNoneValueMerging(self):
page0 = self.pages[0]
v0 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
[1, 2], same_page_merge_policy=value.CONCATENATE)
v1 = list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit',
None, same_page_merge_policy=value.CONCATENATE, none_value_reason='n')
self.assertTrue(v1.IsMergableWith(v0))
vM = (list_of_scalar_values.ListOfScalarValues.
MergeLikeValuesFromSamePage([v0, v1]))
self.assertEquals(None, vM.values)
self.assertEquals(none_values.MERGE_FAILURE_REASON,
vM.none_value_reason)
def testListWithNoneValueMustHaveNoneReason(self):
page0 = self.pages[0]
self.assertRaises(none_values.NoneValueMissingReason,
lambda: list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit', None))
def testListWithNoneReasonMustHaveNoneValue(self):
page0 = self.pages[0]
self.assertRaises(none_values.ValueMustHaveNoneValue,
lambda: list_of_scalar_values.ListOfScalarValues(
page0, 'x', 'unit', [1, 2],
none_value_reason='n'))
def testAsDict(self):
v = list_of_scalar_values.ListOfScalarValues(
None, 'x', 'unit', [1, 2],
same_page_merge_policy=value.PICK_FIRST, important=False)
d = v.AsDictWithoutBaseClassEntries()
self.assertEquals(d, {
'values': [1, 2]
})
def testNoneValueAsDict(self):
v = list_of_scalar_values.ListOfScalarValues(
None, 'x', 'unit', None, same_page_merge_policy=value.PICK_FIRST,
important=False, none_value_reason='n')
d = v.AsDictWithoutBaseClassEntries()
self.assertEquals(d, {
'values': None,
'none_value_reason': 'n'
})
def testFromDictInts(self):
d = {
'type': 'list_of_scalar_values',
'name': 'x',
'units': 'unit',
'values': [1, 2]
}
v = value.Value.FromDict(d, {})
self.assertTrue(isinstance(v, list_of_scalar_values.ListOfScalarValues))
self.assertEquals(v.values, [1, 2])
def testFromDictFloats(self):
d = {
'type': 'list_of_scalar_values',
'name': 'x',
'units': 'unit',
'values': [1.3, 2.7]
}
v = value.Value.FromDict(d, {})
self.assertTrue(isinstance(v, list_of_scalar_values.ListOfScalarValues))
self.assertEquals(v.values, [1.3, 2.7])
def testFromDictNoneValue(self):
d = {
'type': 'list_of_scalar_values',
'name': 'x',
'units': 'unit',
'values': None,
'none_value_reason': 'n'
}
v = value.Value.FromDict(d, {})
self.assertTrue(isinstance(v, list_of_scalar_values.ListOfScalarValues))
self.assertEquals(v.values, None)
self.assertEquals(v.none_value_reason, 'n')
|
openwns/wrowser
|
refs/heads/master
|
openwns/wrowser/Time.py
|
1
|
###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: info@openwns.org
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import datetime
import time
class Delta:
def __init__(self, timedelta):
assert(isinstance(timedelta, datetime.timedelta))
self.timedelta = timedelta
def __str__(self):
return str(self.timedelta)
def __getYears(self):
return self.timedelta.days / 365
def __getDays(self):
return self.timedelta.days % 365
def __getHours(self):
return self.timedelta.seconds / 60 / 24
def __getMinutes(self):
return self.timedelta.seconds / 60 - self.hours * 60
def __getSeconds(self):
return self.timedelta.seconds % 60
def __getMicroSeconds(self):
return self.timedelta.milliseconds
def asString(self):
s = str(self.seconds) + "s"
for value, unit in [(self.minutes, "m"),
(self.hours, "h"),
(self.days, "d"),
(self.years, "y")]:
if value > 0:
s = str(value) + unit + " " + s
return s
years = property(__getYears)
days = property(__getDays)
hours = property(__getHours)
minutes = property(__getMinutes)
seconds = property(__getSeconds)
microseconds = property(__getMicroSeconds)
|
jamespcole/home-assistant
|
refs/heads/master
|
homeassistant/components/input_text/__init__.py
|
10
|
"""Support to enter a value into a text box."""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, CONF_ICON, CONF_NAME, CONF_MODE)
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.restore_state import RestoreEntity
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'input_text'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
CONF_INITIAL = 'initial'
CONF_MIN = 'min'
CONF_MAX = 'max'
MODE_TEXT = 'text'
MODE_PASSWORD = 'password'
ATTR_VALUE = 'value'
ATTR_MIN = 'min'
ATTR_MAX = 'max'
ATTR_PATTERN = 'pattern'
ATTR_MODE = 'mode'
SERVICE_SET_VALUE = 'set_value'
SERVICE_SET_VALUE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_VALUE): cv.string,
})
def _cv_input_text(cfg):
"""Configure validation helper for input box (voluptuous)."""
minimum = cfg.get(CONF_MIN)
maximum = cfg.get(CONF_MAX)
if minimum > maximum:
raise vol.Invalid('Max len ({}) is not greater than min len ({})'
.format(minimum, maximum))
state = cfg.get(CONF_INITIAL)
if state is not None and (len(state) < minimum or len(state) > maximum):
raise vol.Invalid('Initial value {} length not in range {}-{}'
.format(state, minimum, maximum))
return cfg
CONFIG_SCHEMA = vol.Schema({
DOMAIN: cv.schema_with_slug_keys(
vol.All({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MIN, default=0): vol.Coerce(int),
vol.Optional(CONF_MAX, default=100): vol.Coerce(int),
vol.Optional(CONF_INITIAL, ''): cv.string,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(ATTR_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(ATTR_PATTERN): cv.string,
vol.Optional(CONF_MODE, default=MODE_TEXT):
vol.In([MODE_TEXT, MODE_PASSWORD]),
}, _cv_input_text)
)
}, required=True, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Set up an input text box."""
component = EntityComponent(_LOGGER, DOMAIN, hass)
entities = []
for object_id, cfg in config[DOMAIN].items():
name = cfg.get(CONF_NAME)
minimum = cfg.get(CONF_MIN)
maximum = cfg.get(CONF_MAX)
initial = cfg.get(CONF_INITIAL)
icon = cfg.get(CONF_ICON)
unit = cfg.get(ATTR_UNIT_OF_MEASUREMENT)
pattern = cfg.get(ATTR_PATTERN)
mode = cfg.get(CONF_MODE)
entities.append(InputText(
object_id, name, initial, minimum, maximum, icon, unit,
pattern, mode))
if not entities:
return False
component.async_register_entity_service(
SERVICE_SET_VALUE, SERVICE_SET_VALUE_SCHEMA,
'async_set_value'
)
await component.async_add_entities(entities)
return True
class InputText(RestoreEntity):
"""Represent a text box."""
def __init__(self, object_id, name, initial, minimum, maximum, icon,
unit, pattern, mode):
"""Initialize a text input."""
self.entity_id = ENTITY_ID_FORMAT.format(object_id)
self._name = name
self._current_value = initial
self._minimum = minimum
self._maximum = maximum
self._icon = icon
self._unit = unit
self._pattern = pattern
self._mode = mode
@property
def should_poll(self):
"""If entity should be polled."""
return False
@property
def name(self):
"""Return the name of the text input entity."""
return self._name
@property
def icon(self):
"""Return the icon to be used for this entity."""
return self._icon
@property
def state(self):
"""Return the state of the component."""
return self._current_value
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def state_attributes(self):
"""Return the state attributes."""
return {
ATTR_MIN: self._minimum,
ATTR_MAX: self._maximum,
ATTR_PATTERN: self._pattern,
ATTR_MODE: self._mode,
}
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
if self._current_value is not None:
return
state = await self.async_get_last_state()
value = state and state.state
# Check against None because value can be 0
if value is not None and self._minimum <= len(value) <= self._maximum:
self._current_value = value
async def async_set_value(self, value):
"""Select new value."""
if len(value) < self._minimum or len(value) > self._maximum:
_LOGGER.warning("Invalid value: %s (length range %s - %s)",
value, self._minimum, self._maximum)
return
self._current_value = value
await self.async_update_ha_state()
|
nkgilley/home-assistant
|
refs/heads/dev
|
homeassistant/components/zha/core/channels/__init__.py
|
7
|
"""Channels module for Zigbee Home Automation."""
import asyncio
import logging
from typing import Any, Dict, List, Optional, Tuple, Union
import zigpy.zcl.clusters.closures
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from . import ( # noqa: F401 # pylint: disable=unused-import
base,
closures,
general,
homeautomation,
hvac,
lighting,
lightlink,
manufacturerspecific,
measurement,
protocol,
security,
smartenergy,
)
from .. import (
const,
device as zha_core_device,
discovery as zha_disc,
registries as zha_regs,
typing as zha_typing,
)
_LOGGER = logging.getLogger(__name__)
ChannelsDict = Dict[str, zha_typing.ChannelType]
class Channels:
"""All discovered channels of a device."""
def __init__(self, zha_device: zha_typing.ZhaDeviceType) -> None:
"""Initialize instance."""
self._pools: List[zha_typing.ChannelPoolType] = []
self._power_config = None
self._identify = None
self._semaphore = asyncio.Semaphore(3)
self._unique_id = str(zha_device.ieee)
self._zdo_channel = base.ZDOChannel(zha_device.device.endpoints[0], zha_device)
self._zha_device = zha_device
@property
def pools(self) -> List["ChannelPool"]:
"""Return channel pools list."""
return self._pools
@property
def power_configuration_ch(self) -> zha_typing.ChannelType:
"""Return power configuration channel."""
return self._power_config
@power_configuration_ch.setter
def power_configuration_ch(self, channel: zha_typing.ChannelType) -> None:
"""Power configuration channel setter."""
if self._power_config is None:
self._power_config = channel
@property
def identify_ch(self) -> zha_typing.ChannelType:
"""Return power configuration channel."""
return self._identify
@identify_ch.setter
def identify_ch(self, channel: zha_typing.ChannelType) -> None:
"""Power configuration channel setter."""
if self._identify is None:
self._identify = channel
@property
def semaphore(self) -> asyncio.Semaphore:
"""Return semaphore for concurrent tasks."""
return self._semaphore
@property
def zdo_channel(self) -> zha_typing.ZDOChannelType:
"""Return ZDO channel."""
return self._zdo_channel
@property
def zha_device(self) -> zha_typing.ZhaDeviceType:
"""Return parent zha device."""
return self._zha_device
@property
def unique_id(self):
"""Return the unique id for this channel."""
return self._unique_id
@property
def zigbee_signature(self) -> Dict[int, Dict[str, Any]]:
"""Get the zigbee signatures for the pools in channels."""
return {
signature[0]: signature[1]
for signature in [pool.zigbee_signature for pool in self.pools]
}
@classmethod
def new(cls, zha_device: zha_typing.ZhaDeviceType) -> "Channels":
"""Create new instance."""
channels = cls(zha_device)
for ep_id in sorted(zha_device.device.endpoints):
channels.add_pool(ep_id)
return channels
def add_pool(self, ep_id: int) -> None:
"""Add channels for a specific endpoint."""
if ep_id == 0:
return
self._pools.append(ChannelPool.new(self, ep_id))
async def async_initialize(self, from_cache: bool = False) -> None:
"""Initialize claimed channels."""
await self.zdo_channel.async_initialize(from_cache)
self.zdo_channel.debug("'async_initialize' stage succeeded")
await asyncio.gather(
*(pool.async_initialize(from_cache) for pool in self.pools)
)
async def async_configure(self) -> None:
"""Configure claimed channels."""
await self.zdo_channel.async_configure()
self.zdo_channel.debug("'async_configure' stage succeeded")
await asyncio.gather(*(pool.async_configure() for pool in self.pools))
@callback
def async_new_entity(
self,
component: str,
entity_class: zha_typing.CALLABLE_T,
unique_id: str,
channels: List[zha_typing.ChannelType],
):
"""Signal new entity addition."""
if self.zha_device.status == zha_core_device.DeviceStatus.INITIALIZED:
return
self.zha_device.hass.data[const.DATA_ZHA][component].append(
(entity_class, (unique_id, self.zha_device, channels))
)
@callback
def async_send_signal(self, signal: str, *args: Any) -> None:
"""Send a signal through hass dispatcher."""
async_dispatcher_send(self.zha_device.hass, signal, *args)
@callback
def zha_send_event(self, event_data: Dict[str, Union[str, int]]) -> None:
"""Relay events to hass."""
self.zha_device.hass.bus.async_fire(
"zha_event",
{
const.ATTR_DEVICE_IEEE: str(self.zha_device.ieee),
const.ATTR_UNIQUE_ID: self.unique_id,
**event_data,
},
)
class ChannelPool:
"""All channels of an endpoint."""
def __init__(self, channels: Channels, ep_id: int):
"""Initialize instance."""
self._all_channels: ChannelsDict = {}
self._channels: Channels = channels
self._claimed_channels: ChannelsDict = {}
self._id: int = ep_id
self._client_channels: Dict[str, zha_typing.ClientChannelType] = {}
self._unique_id: str = f"{channels.unique_id}-{ep_id}"
@property
def all_channels(self) -> ChannelsDict:
"""All server channels of an endpoint."""
return self._all_channels
@property
def claimed_channels(self) -> ChannelsDict:
"""Channels in use."""
return self._claimed_channels
@property
def client_channels(self) -> Dict[str, zha_typing.ClientChannelType]:
"""Return a dict of client channels."""
return self._client_channels
@property
def endpoint(self) -> zha_typing.ZigpyEndpointType:
"""Return endpoint of zigpy device."""
return self._channels.zha_device.device.endpoints[self.id]
@property
def id(self) -> int:
"""Return endpoint id."""
return self._id
@property
def nwk(self) -> int:
"""Device NWK for logging."""
return self._channels.zha_device.nwk
@property
def is_mains_powered(self) -> bool:
"""Device is_mains_powered."""
return self._channels.zha_device.is_mains_powered
@property
def manufacturer(self) -> Optional[str]:
"""Return device manufacturer."""
return self._channels.zha_device.manufacturer
@property
def manufacturer_code(self) -> Optional[int]:
"""Return device manufacturer."""
return self._channels.zha_device.manufacturer_code
@property
def hass(self):
"""Return hass."""
return self._channels.zha_device.hass
@property
def model(self) -> Optional[str]:
"""Return device model."""
return self._channels.zha_device.model
@property
def skip_configuration(self) -> bool:
"""Return True if device does not require channel configuration."""
return self._channels.zha_device.skip_configuration
@property
def unique_id(self):
"""Return the unique id for this channel."""
return self._unique_id
@property
def zigbee_signature(self) -> Tuple[int, Dict[str, Any]]:
"""Get the zigbee signature for the endpoint this pool represents."""
return (
self.endpoint.endpoint_id,
{
const.ATTR_PROFILE_ID: self.endpoint.profile_id,
const.ATTR_DEVICE_TYPE: f"0x{self.endpoint.device_type:04x}"
if self.endpoint.device_type is not None
else "",
const.ATTR_IN_CLUSTERS: [
f"0x{cluster_id:04x}"
for cluster_id in sorted(self.endpoint.in_clusters)
],
const.ATTR_OUT_CLUSTERS: [
f"0x{cluster_id:04x}"
for cluster_id in sorted(self.endpoint.out_clusters)
],
},
)
@classmethod
def new(cls, channels: Channels, ep_id: int) -> "ChannelPool":
"""Create new channels for an endpoint."""
pool = cls(channels, ep_id)
pool.add_all_channels()
pool.add_client_channels()
zha_disc.PROBE.discover_entities(pool)
return pool
@callback
def add_all_channels(self) -> None:
"""Create and add channels for all input clusters."""
for cluster_id, cluster in self.endpoint.in_clusters.items():
channel_class = zha_regs.ZIGBEE_CHANNEL_REGISTRY.get(
cluster_id, base.ZigbeeChannel
)
# really ugly hack to deal with xiaomi using the door lock cluster
# incorrectly.
if (
hasattr(cluster, "ep_attribute")
and cluster_id == zigpy.zcl.clusters.closures.DoorLock.cluster_id
and cluster.ep_attribute == "multistate_input"
):
channel_class = general.MultistateInput
# end of ugly hack
channel = channel_class(cluster, self)
if channel.name == const.CHANNEL_POWER_CONFIGURATION:
if (
self._channels.power_configuration_ch
or self._channels.zha_device.is_mains_powered
):
# on power configuration channel per device
continue
self._channels.power_configuration_ch = channel
elif channel.name == const.CHANNEL_IDENTIFY:
self._channels.identify_ch = channel
self.all_channels[channel.id] = channel
@callback
def add_client_channels(self) -> None:
"""Create client channels for all output clusters if in the registry."""
for cluster_id, channel_class in zha_regs.CLIENT_CHANNELS_REGISTRY.items():
cluster = self.endpoint.out_clusters.get(cluster_id)
if cluster is not None:
channel = channel_class(cluster, self)
self.client_channels[channel.id] = channel
async def async_initialize(self, from_cache: bool = False) -> None:
"""Initialize claimed channels."""
await self._execute_channel_tasks("async_initialize", from_cache)
async def async_configure(self) -> None:
"""Configure claimed channels."""
await self._execute_channel_tasks("async_configure")
async def _execute_channel_tasks(self, func_name: str, *args: Any) -> None:
"""Add a throttled channel task and swallow exceptions."""
async def _throttle(coro):
async with self._channels.semaphore:
return await coro
channels = [*self.claimed_channels.values(), *self.client_channels.values()]
tasks = [_throttle(getattr(ch, func_name)(*args)) for ch in channels]
results = await asyncio.gather(*tasks, return_exceptions=True)
for channel, outcome in zip(channels, results):
if isinstance(outcome, Exception):
channel.warning("'%s' stage failed: %s", func_name, str(outcome))
continue
channel.debug("'%s' stage succeeded", func_name)
@callback
def async_new_entity(
self,
component: str,
entity_class: zha_typing.CALLABLE_T,
unique_id: str,
channels: List[zha_typing.ChannelType],
):
"""Signal new entity addition."""
self._channels.async_new_entity(component, entity_class, unique_id, channels)
@callback
def async_send_signal(self, signal: str, *args: Any) -> None:
"""Send a signal through hass dispatcher."""
self._channels.async_send_signal(signal, *args)
@callback
def claim_channels(self, channels: List[zha_typing.ChannelType]) -> None:
"""Claim a channel."""
self.claimed_channels.update({ch.id: ch for ch in channels})
@callback
def unclaimed_channels(self) -> List[zha_typing.ChannelType]:
"""Return a list of available (unclaimed) channels."""
claimed = set(self.claimed_channels)
available = set(self.all_channels)
return [self.all_channels[chan_id] for chan_id in (available - claimed)]
@callback
def zha_send_event(self, event_data: Dict[str, Union[str, int]]) -> None:
"""Relay events to hass."""
self._channels.zha_send_event(
{
const.ATTR_UNIQUE_ID: self.unique_id,
const.ATTR_ENDPOINT_ID: self.id,
**event_data,
}
)
|
brburns/netdisco
|
refs/heads/master
|
example_service.py
|
5
|
"""
Example use of DiscoveryService.
Will scan every 10 seconds and print out new found entries.
Will quit after 2 minutes.
"""
from __future__ import print_function
import logging
from datetime import datetime
import time
from netdisco.service import DiscoveryService
logging.basicConfig(level=logging.INFO)
# Scan every 10 seconds
nd = DiscoveryService(10)
def new_service_listener(discoverable, service):
""" Print out a new service found message. """
print("{} - Found new service: {} {}".format(
datetime.now(), discoverable, service))
nd.add_listener(new_service_listener)
nd.start()
time.sleep(120)
nd.stop()
|
petemounce/ansible
|
refs/heads/devel
|
lib/ansible/plugins/inventory/yaml.py
|
4
|
# Copyright 2017 RedHat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
'''
DOCUMENTATION:
inventory: yaml
version_added: "2.4"
short_description: Uses a specifically YAML file as inventory source.
description:
- YAML based inventory, starts with the 'all' group and has hosts/vars/children entries.
- Host entries can have sub-entries defined, which will be treated as variables.
- Vars entries are normal group vars.
- Children are 'child groups', which can also have their own vars/hosts/children and so on.
- File MUST have a valid extension: yaml, yml, json.
notes:
- It takes the place of the previously hardcoded YAML inventory.
- To function it requires being whitelisted in configuration.
options:
_yaml_extensions:
description: list of 'valid' extensions for files containing YAML
EXAMPLES:
all: # keys must be unique, i.e. only one 'hosts' per group
hosts:
test1:
test2:
var1: value1
vars:
group_var1: value2
children: # key order does not matter, indentation does
other_group:
children:
group_x:
hosts:
test5
vars:
g2_var2: value3
hosts:
test4:
ansible_host: 127.0.0.1
last_group:
hosts:
test1 # same host as above, additional group membership
vars:
last_var: MYVALUE
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import os
from ansible import constants as C
from ansible.errors import AnsibleParserError
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins.inventory import BaseFileInventoryPlugin, detect_range, expand_hostname_range
class InventoryModule(BaseFileInventoryPlugin):
NAME = 'yaml'
def __init__(self):
super(InventoryModule, self).__init__()
self.patterns = {}
self._compile_patterns()
def verify_file(self, path):
valid = False
b_path = to_bytes(path)
if super(InventoryModule, self).verify_file(b_path):
file_name, ext = os.path.splitext(b_path)
if ext and ext in C.YAML_FILENAME_EXTENSIONS:
valid = True
return valid
def parse(self, inventory, loader, path, cache=True):
''' parses the inventory file '''
super(InventoryModule, self).parse(inventory, loader, path)
try:
data = self.loader.load_from_file(path)
except Exception as e:
raise AnsibleParserError(e)
if not data:
return False
# We expect top level keys to correspond to groups, iterate over them
# to get host, vars and subgroups (which we iterate over recursivelly)
if isinstance(data, dict):
for group_name in data:
self._parse_group(group_name, data[group_name])
else:
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % data)
def _parse_group(self, group, group_data):
if self.patterns['groupname'].match(group):
self.inventory.add_group(group)
if isinstance(group_data, dict):
#make sure they are dicts
for section in ['vars', 'children', 'hosts']:
if section in group_data and isinstance(group_data[section], string_types):
group_data[section] = {group_data[section]: None}
if 'vars' in group_data:
for var in group_data['vars']:
self.inventory.set_variable(group, var, group_data['vars'][var])
if 'children' in group_data:
for subgroup in group_data['children']:
self._parse_group(subgroup, group_data['children'][subgroup])
self.inventory.add_child(group, subgroup)
if 'hosts' in group_data:
for host_pattern in group_data['hosts']:
hosts, port = self._parse_host(host_pattern)
self.populate_host_vars(hosts, group_data['hosts'][host_pattern], group, port)
else:
self.display.warning("Skipping '%s' as this is not a valid group name" % group)
def _parse_host(self, host_pattern):
'''
Each host key can be a pattern, try to process it and add variables as needed
'''
(hostnames, port) = self._expand_hostpattern(host_pattern)
return hostnames, port
def _expand_hostpattern(self, hostpattern):
'''
Takes a single host pattern and returns a list of hostnames and an
optional port number that applies to all of them.
'''
# Can the given hostpattern be parsed as a host with an optional port
# specification?
try:
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
except:
# not a recognizable host pattern
pattern = hostpattern
port = None
# Once we have separated the pattern, we expand it into list of one or
# more hostnames, depending on whether it contains any [x:y] ranges.
if detect_range(pattern):
hostnames = expand_hostname_range(pattern)
else:
hostnames = [pattern]
return (hostnames, port)
def _compile_patterns(self):
'''
Compiles the regular expressions required to parse the inventory and stores them in self.patterns.
'''
self.patterns['groupname'] = re.compile( r'''^[A-Za-z_][A-Za-z0-9_]*$''')
|
NathanW2/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/VectorLayerHistogram.py
|
2
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
VectorLayerHistogram.py
---------------------
Date : January 2013
Copyright : (C) 2013 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'January 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import plotly as plt
import plotly.graph_objs as go
from qgis.core import (QgsProcessingParameterFeatureSource,
QgsProcessingParameterField,
QgsProcessingParameterNumber,
QgsProcessingParameterFileDestination)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from processing.tools import vector
class VectorLayerHistogram(QgisAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
FIELD = 'FIELD'
BINS = 'BINS'
def group(self):
return self.tr('Graphics')
def groupId(self):
return 'graphics'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Attribute'), parentLayerParameterName=self.INPUT,
type=QgsProcessingParameterField.Numeric))
self.addParameter(QgsProcessingParameterNumber(self.BINS,
self.tr('number of bins'), minValue=2, defaultValue=10))
self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT, self.tr('Histogram'), self.tr('HTML files (*.html)')))
def name(self):
return 'vectorlayerhistogram'
def displayName(self):
return self.tr('Vector layer histogram')
def processAlgorithm(self, parameters, context, feedback):
source = self.parameterAsSource(parameters, self.INPUT, context)
fieldname = self.parameterAsString(parameters, self.FIELD, context)
bins = self.parameterAsInt(parameters, self.BINS, context)
output = self.parameterAsFileOutput(parameters, self.OUTPUT, context)
values = vector.values(source, fieldname)
data = [go.Histogram(x=values[fieldname],
nbinsx=bins)]
plt.offline.plot(data, filename=output, auto_open=False)
return {self.OUTPUT: output}
|
xuxiao19910803/edx
|
refs/heads/master
|
lms/djangoapps/instructor/tests/test_legacy_xss.py
|
46
|
"""
Tests of various instructor dashboard features that include lists of students
"""
from django.conf import settings
from django.test.client import RequestFactory
from markupsafe import escape
from nose.plugins.attrib import attr
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from edxmako.tests import mako_middleware_process_request
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from instructor.views import legacy
# pylint: disable=missing-docstring
@attr('shard_1')
class TestXss(ModuleStoreTestCase):
def setUp(self):
super(TestXss, self).setUp()
self._request_factory = RequestFactory()
self._course = CourseFactory.create()
self._evil_student = UserFactory.create(
email="robot+evil@edx.org",
username="evil-robot",
profile__name='<span id="evil">Evil Robot</span>',
)
self._instructor = UserFactory.create(
email="robot+instructor@edx.org",
username="instructor",
is_staff=True
)
CourseEnrollmentFactory.create(
user=self._evil_student,
course_id=self._course.id
)
def _test_action(self, action):
"""
Test for XSS vulnerability in the given action
Build a request with the given action, call the instructor dashboard
view, and check that HTML code in a user's name is properly escaped.
"""
req = self._request_factory.post(
"dummy_url",
data={"action": action}
)
req.user = self._instructor
req.session = {}
mako_middleware_process_request(req)
resp = legacy.instructor_dashboard(req, self._course.id.to_deprecated_string())
respUnicode = resp.content.decode(settings.DEFAULT_CHARSET)
self.assertNotIn(self._evil_student.profile.name, respUnicode)
self.assertIn(escape(self._evil_student.profile.name), respUnicode)
def test_list_enrolled(self):
self._test_action("List enrolled students")
def test_dump_list_of_enrolled(self):
self._test_action("Dump list of enrolled students")
|
deathmetalland/IkaLog
|
refs/heads/youtube_sample
|
tools/IkaWatcher.py
|
3
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2016 Takeshi HASEGAWA
# Copyright (C) 2016 Hiroyuki KOMATSU
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ----
#
# IkaWatcher is a watchdog of the specified directory. If the directory newly
# has video files, IkaLog.py is triggered with the new video file.
#
# This is an experimental implementation, only tested with AVT C875 on Mac.
#
# Usage: ./tools/IkaWatcher.py --video_dir=~/Videos --video_ext=.avi
#
# ----
#
# This command additionally requires watchdog library.
# https://pypi.python.org/pypi/watchdog
import argparse
import os.path
import queue
import subprocess
import threading
import time
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--video_dir', '-d', dest='video_dir',
required=True, type=str)
parser.add_argument('--video_ext', '-e', dest='video_ext',
default='.mp4', type=str)
return vars(parser.parse_args())
def get_time(msec):
return time.strftime("%Y%m%d_%H%M%S", time.localtime(msec))
def print_file_info(path):
print('atime: %s' % get_time(os.path.getatime(path)))
print('mtime: %s' % get_time(os.path.getmtime(path)))
print('ctime: %s' % get_time(os.path.getctime(path)))
print('size: %s' % '{:,d}'.format(os.path.getsize(path)))
print('')
def ikalog_with_queue(video_queue):
ika_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
video_path = ''
while True:
# Wait for video_path.
while video_path is '':
video_path = video_queue.get()
# Termination check.
if video_path is None:
return
# Wait for the next file until a timeout.
try:
queued_data = video_queue.get(timeout=180)
except queue.Empty:
queued_data = ''
# If not a new file, keep listening.
# Note: video_path is not '' or None here.
if queued_data == video_path:
continue
command = [os.path.join(ika_path, 'IkaLog.py'), '-f', video_path]
subprocess.call(command)
video_path = queued_data
class WatchdogHandler(FileSystemEventHandler):
def __init__(self, video_queue, video_ext):
super(WatchdogHandler, self).__init__()
self._video_queue = video_queue
self._video_ext = video_ext
def on_created(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_created(%s)' % (get_time(time.time()), path))
print_file_info(path)
self._video_queue.put('')
def on_modified(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_modified(%s)' % (get_time(time.time()), path))
print_file_info(path)
self._video_queue.put(path)
def on_deleted(self, event):
path = event.src_path
if not path.endswith(self._video_ext):
return
print('%s: on_deleted(%s)' % (get_time(time.time()), path))
def main():
video_queue = queue.Queue()
args = get_args()
video_dir = args['video_dir']
video_ext = args['video_ext']
watchdog_dir = os.path.expanduser(args['video_dir'])
watchdog_handler = WatchdogHandler(video_queue, args['video_ext'])
observer = Observer()
observer.schedule(watchdog_handler, watchdog_dir, recursive=False)
observer.start()
ika_thread = threading.Thread(
target=ikalog_with_queue, name='ikalog', args=(video_queue,))
ika_thread.start()
print('==== Started IkaWatcher ====')
print('Automatically run IkaLog when the following files are created.')
print('Target video files: %s' % os.path.join(video_dir, '*%s' % video_ext))
try:
while True:
input('') # Wait a key input.
key = input('Start processing? ([Y] or N): ')
if key in ['', 'Y']:
video_queue.put('') # '' triggers processing a queued value.
except KeyboardInterrupt:
print('==== KeyboardInterrupt ====')
observer.stop()
video_queue.put(None) # None in the queue stops ika_thread.
ika_thread.join()
if __name__ in '__main__':
main()
|
cuteio/cute-python-client
|
refs/heads/master
|
cute.py
|
1
|
def cute():
print 'This is cute egg'
|
madhurrajn/samashthi
|
refs/heads/master
|
lib/gevent/greenlet.py
|
9
|
# Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details.
import sys
from gevent.hub import GreenletExit
from gevent.hub import InvalidSwitchError
from gevent.hub import PY3
from gevent.hub import PYPY
from gevent.hub import Waiter
from gevent.hub import get_hub
from gevent.hub import getcurrent
from gevent.hub import greenlet
from gevent.hub import iwait
from gevent.hub import reraise
from gevent.hub import wait
from gevent.timeout import Timeout
from gevent._tblib import dump_traceback
from gevent._tblib import load_traceback
from collections import deque
__all__ = ['Greenlet',
'joinall',
'killall']
if PYPY:
import _continuation
_continulet = _continuation.continulet
class SpawnedLink(object):
"""A wrapper around link that calls it in another greenlet.
Can be called only from main loop.
"""
__slots__ = ['callback']
def __init__(self, callback):
if not callable(callback):
raise TypeError("Expected callable: %r" % (callback, ))
self.callback = callback
def __call__(self, source):
g = greenlet(self.callback, get_hub())
g.switch(source)
def __hash__(self):
return hash(self.callback)
def __eq__(self, other):
return self.callback == getattr(other, 'callback', other)
def __str__(self):
return str(self.callback)
def __repr__(self):
return repr(self.callback)
def __getattr__(self, item):
assert item != 'callback'
return getattr(self.callback, item)
class SuccessSpawnedLink(SpawnedLink):
"""A wrapper around link that calls it in another greenlet only if source succeed.
Can be called only from main loop.
"""
__slots__ = []
def __call__(self, source):
if source.successful():
return SpawnedLink.__call__(self, source)
class FailureSpawnedLink(SpawnedLink):
"""A wrapper around link that calls it in another greenlet only if source failed.
Can be called only from main loop.
"""
__slots__ = []
def __call__(self, source):
if not source.successful():
return SpawnedLink.__call__(self, source)
class _lazy(object):
def __init__(self, func):
self.data = (func, func.__name__)
def __get__(self, inst, class_):
if inst is None:
return self
func, name = self.data
value = func(inst)
inst.__dict__[name] = value
return value
class Greenlet(greenlet):
"""A light-weight cooperatively-scheduled execution unit.
"""
value = None
_exc_info = ()
_notifier = None
#: An event, such as a timer or a callback that fires. It is established in
#: start() and start_later() as those two objects, respectively.
#: Once this becomes non-None, the Greenlet cannot be started again. Conversely,
#: kill() and throw() check for non-None to determine if this object has ever been
#: scheduled for starting. A placeholder _dummy_event is assigned by them to prevent
#: the greenlet from being started in the future, if necessary.
_start_event = None
args = ()
_kwargs = None
def __init__(self, run=None, *args, **kwargs):
"""
Greenlet constructor.
:param args: The arguments passed to the ``run`` function.
:param kwargs: The keyword arguments passed to the ``run`` function.
:keyword run: The callable object to run. If not given, this object's
`_run` method will be invoked (typically defined by subclasses).
.. versionchanged:: 1.1b1
The ``run`` argument to the constructor is now verified to be a callable
object. Previously, passing a non-callable object would fail after the greenlet
was spawned.
"""
# greenlet.greenlet(run=None, parent=None)
# Calling it with both positional arguments instead of a keyword
# argument (parent=get_hub()) speeds up creation of this object ~30%:
# python -m timeit -s 'import gevent' 'gevent.Greenlet()'
# Python 3.5: 2.70usec with keywords vs 1.94usec with positional
# Python 3.4: 2.32usec with keywords vs 1.74usec with positional
# Python 3.3: 2.55usec with keywords vs 1.92usec with positional
# Python 2.7: 1.73usec with keywords vs 1.40usec with positional
greenlet.__init__(self, None, get_hub())
if run is not None:
self._run = run
# If they didn't pass a callable at all, then they must
# already have one. Note that subclassing to override the run() method
# itself has never been documented or supported.
if not callable(self._run):
raise TypeError("The run argument or self._run must be callable")
if args:
self.args = args
if kwargs:
self._kwargs = kwargs
@property
def kwargs(self):
return self._kwargs or {}
@_lazy
def _links(self):
return deque()
def _has_links(self):
return '_links' in self.__dict__ and self._links
def _raise_exception(self):
reraise(*self.exc_info)
@property
def loop(self):
# needed by killall
return self.parent.loop
def __bool__(self):
return self._start_event is not None and self._exc_info is Greenlet._exc_info
__nonzero__ = __bool__
### Lifecycle
if PYPY:
# oops - pypy's .dead relies on __nonzero__ which we overriden above
@property
def dead(self):
if self._greenlet__main:
return False
if self.__start_cancelled_by_kill or self.__started_but_aborted:
return True
return self._greenlet__started and not _continulet.is_pending(self)
else:
@property
def dead(self):
return self.__start_cancelled_by_kill or self.__started_but_aborted or greenlet.dead.__get__(self)
@property
def __never_started_or_killed(self):
return self._start_event is None
@property
def __start_pending(self):
return (self._start_event is not None
and (self._start_event.pending or getattr(self._start_event, 'active', False)))
@property
def __start_cancelled_by_kill(self):
return self._start_event is _cancelled_start_event
@property
def __start_completed(self):
return self._start_event is _start_completed_event
@property
def __started_but_aborted(self):
return (not self.__never_started_or_killed # we have been started or killed
and not self.__start_cancelled_by_kill # we weren't killed, so we must have been started
and not self.__start_completed # the start never completed
and not self.__start_pending) # and we're not pending, so we must have been aborted
def __cancel_start(self):
if self._start_event is None:
# prevent self from ever being started in the future
self._start_event = _cancelled_start_event
# cancel any pending start event
# NOTE: If this was a real pending start event, this will leave a
# "dangling" callback/timer object in the hub.loop.callbacks list;
# depending on where we are in the event loop, it may even be in a local
# variable copy of that list (in _run_callbacks). This isn't a problem,
# except for the leak-tests.
self._start_event.stop()
def __handle_death_before_start(self, *args):
# args is (t, v, tb) or simply t or v
if self._exc_info is Greenlet._exc_info and self.dead:
# the greenlet was never switched to before and it will never be, _report_error was not called
# the result was not set and the links weren't notified. let's do it here.
# checking that self.dead is true is essential, because throw() does not necessarily kill the greenlet
# (if the exception raised by throw() is caught somewhere inside the greenlet).
if len(args) == 1:
arg = args[0]
#if isinstance(arg, type):
if type(arg) is type(Exception):
args = (arg, arg(), None)
else:
args = (type(arg), arg, None)
elif not args:
args = (GreenletExit, GreenletExit(), None)
self._report_error(args)
@property
def started(self):
# DEPRECATED
return bool(self)
def ready(self):
"""
Return a true value if and only if the greenlet has finished
execution.
.. versionchanged:: 1.1
This function is only guaranteed to return true or false *values*, not
necessarily the literal constants ``True`` or ``False``.
"""
return self.dead or self._exc_info
def successful(self):
"""
Return a true value if and only if the greenlet has finished execution
successfully, that is, without raising an error.
.. tip:: A greenlet that has been killed with the default
:class:`GreenletExit` exception is considered successful.
That is, ``GreenletExit`` is not considered an error.
.. note:: This function is only guaranteed to return true or false *values*,
not necessarily the literal constants ``True`` or ``False``.
"""
return self._exc_info and self._exc_info[1] is None
def __repr__(self):
classname = self.__class__.__name__
result = '<%s at %s' % (classname, hex(id(self)))
formatted = self._formatinfo()
if formatted:
result += ': ' + formatted
return result + '>'
def _formatinfo(self):
try:
return self._formatted_info
except AttributeError:
pass
try:
result = getfuncname(self.__dict__['_run'])
except Exception:
pass
else:
args = []
if self.args:
args = [repr(x)[:50] for x in self.args]
if self._kwargs:
args.extend(['%s=%s' % (key, repr(value)[:50]) for (key, value) in self._kwargs.items()])
if args:
result += '(' + ', '.join(args) + ')'
# it is important to save the result here, because once the greenlet exits '_run' attribute will be removed
self._formatted_info = result
return result
return ''
@property
def exception(self):
"""Holds the exception instance raised by the function if the greenlet has finished with an error.
Otherwise ``None``.
"""
return self._exc_info[1] if self._exc_info else None
@property
def exc_info(self):
"""Holds the exc_info three-tuple raised by the function if the greenlet finished with an error.
Otherwise a false value."""
e = self._exc_info
if e:
return (e[0], e[1], load_traceback(e[2]))
def throw(self, *args):
"""Immediatelly switch into the greenlet and raise an exception in it.
Should only be called from the HUB, otherwise the current greenlet is left unscheduled forever.
To raise an exception in a safe manner from any greenlet, use :meth:`kill`.
If a greenlet was started but never switched to yet, then also
a) cancel the event that will start it
b) fire the notifications as if an exception was raised in a greenlet
"""
self.__cancel_start()
try:
if not self.dead:
# Prevent switching into a greenlet *at all* if we had never
# started it. Usually this is the same thing that happens by throwing,
# but if this is done from the hub with nothing else running, prevents a
# LoopExit.
greenlet.throw(self, *args)
finally:
self.__handle_death_before_start(*args)
def start(self):
"""Schedule the greenlet to run in this loop iteration"""
if self._start_event is None:
self._start_event = self.parent.loop.run_callback(self.switch)
def start_later(self, seconds):
"""Schedule the greenlet to run in the future loop iteration *seconds* later"""
if self._start_event is None:
self._start_event = self.parent.loop.timer(seconds)
self._start_event.start(self.switch)
@classmethod
def spawn(cls, *args, **kwargs):
"""
Create a new :class:`Greenlet` object and schedule it to run ``function(*args, **kwargs)``.
This can be used as ``gevent.spawn`` or ``Greenlet.spawn``.
The arguments are passed to :meth:`Greenlet.__init__`.
.. versionchanged:: 1.1b1
If a *function* is given that is not callable, immediately raise a :exc:`TypeError`
instead of spawning a greenlet that will raise an uncaught TypeError.
"""
g = cls(*args, **kwargs)
g.start()
return g
@classmethod
def spawn_later(cls, seconds, *args, **kwargs):
"""
Create and return a new Greenlet object scheduled to run ``function(*args, **kwargs)``
in the future loop iteration *seconds* later. This can be used as ``Greenlet.spawn_later``
or ``gevent.spawn_later``.
The arguments are passed to :meth:`Greenlet.__init__`.
.. versionchanged:: 1.1b1
If an argument that's meant to be a function (the first argument in *args*, or the ``run`` keyword )
is given to this classmethod (and not a classmethod of a subclass),
it is verified to be callable. Previously, the spawned greenlet would have failed
when it started running.
"""
if cls is Greenlet and not args and 'run' not in kwargs:
raise TypeError("")
g = cls(*args, **kwargs)
g.start_later(seconds)
return g
def kill(self, exception=GreenletExit, block=True, timeout=None):
"""
Raise the ``exception`` in the greenlet.
If ``block`` is ``True`` (the default), wait until the greenlet dies or the optional timeout expires.
If block is ``False``, the current greenlet is not unscheduled.
The function always returns ``None`` and never raises an error.
.. note::
Depending on what this greenlet is executing and the state
of the event loop, the exception may or may not be raised
immediately when this greenlet resumes execution. It may
be raised on a subsequent green call, or, if this greenlet
exits before making such a call, it may not be raised at
all. As of 1.1, an example where the exception is raised
later is if this greenlet had called :func:`sleep(0)
<gevent.sleep>`; an example where the exception is raised
immediately is if this greenlet had called
:func:`sleep(0.1) <gevent.sleep>`.
See also :func:`gevent.kill`.
:keyword type exception: The type of exception to raise in the greenlet. The default
is :class:`GreenletExit`, which indicates a :meth:`successful` completion
of the greenlet.
.. versionchanged:: 0.13.0
*block* is now ``True`` by default.
.. versionchanged:: 1.1a2
If this greenlet had never been switched to, killing it will prevent it from ever being switched to.
"""
self.__cancel_start()
if self.dead:
self.__handle_death_before_start(exception)
else:
waiter = Waiter() if block else None
self.parent.loop.run_callback(_kill, self, exception, waiter)
if block:
waiter.get()
self.join(timeout)
# it should be OK to use kill() in finally or kill a greenlet from more than one place;
# thus it should not raise when the greenlet is already killed (= not started)
def get(self, block=True, timeout=None):
"""Return the result the greenlet has returned or re-raise the exception it has raised.
If block is ``False``, raise :class:`gevent.Timeout` if the greenlet is still alive.
If block is ``True``, unschedule the current greenlet until the result is available
or the timeout expires. In the latter case, :class:`gevent.Timeout` is raised.
"""
if self.ready():
if self.successful():
return self.value
self._raise_exception()
if not block:
raise Timeout()
switch = getcurrent().switch
self.rawlink(switch)
try:
t = Timeout._start_new_or_dummy(timeout)
try:
result = self.parent.switch()
if result is not self:
raise InvalidSwitchError('Invalid switch into Greenlet.get(): %r' % (result, ))
finally:
t.cancel()
except:
# unlinking in 'except' instead of finally is an optimization:
# if switch occurred normally then link was already removed in _notify_links
# and there's no need to touch the links set.
# Note, however, that if "Invalid switch" assert was removed and invalid switch
# did happen, the link would remain, causing another invalid switch later in this greenlet.
self.unlink(switch)
raise
if self.ready():
if self.successful():
return self.value
self._raise_exception()
def join(self, timeout=None):
"""Wait until the greenlet finishes or *timeout* expires.
Return ``None`` regardless.
"""
if self.ready():
return
switch = getcurrent().switch
self.rawlink(switch)
try:
t = Timeout._start_new_or_dummy(timeout)
try:
result = self.parent.switch()
if result is not self:
raise InvalidSwitchError('Invalid switch into Greenlet.join(): %r' % (result, ))
finally:
t.cancel()
except Timeout as ex:
self.unlink(switch)
if ex is not t:
raise
except:
self.unlink(switch)
raise
def _report_result(self, result):
self._exc_info = (None, None, None)
self.value = result
if self._has_links() and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
def _report_error(self, exc_info):
if isinstance(exc_info[1], GreenletExit):
self._report_result(exc_info[1])
return
self._exc_info = exc_info[0], exc_info[1], dump_traceback(exc_info[2])
if self._has_links() and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
try:
self.parent.handle_error(self, *exc_info)
finally:
del exc_info
def run(self):
try:
self.__cancel_start()
self._start_event = _start_completed_event
try:
result = self._run(*self.args, **self.kwargs)
except:
self._report_error(sys.exc_info())
return
self._report_result(result)
finally:
self.__dict__.pop('_run', None)
self.__dict__.pop('args', None)
self.__dict__.pop('kwargs', None)
def _run(self):
"""Subclasses may override this method to take any number of arguments and keyword arguments.
.. versionadded:: 1.1a3
Previously, if no callable object was passed to the constructor, the spawned greenlet would
later fail with an AttributeError.
"""
return
def rawlink(self, callback):
"""Register a callable to be executed when the greenlet finishes execution.
The *callback* will be called with this instance as an argument.
.. caution:: The callable will be called in the HUB greenlet.
"""
if not callable(callback):
raise TypeError('Expected callable: %r' % (callback, ))
self._links.append(callback)
if self.ready() and self._links and not self._notifier:
self._notifier = self.parent.loop.run_callback(self._notify_links)
def link(self, callback, SpawnedLink=SpawnedLink):
"""Link greenlet's completion to a callable.
The *callback* will be called with this instance as an argument
once this greenlet's dead. A callable is called in its own greenlet.
"""
self.rawlink(SpawnedLink(callback))
def unlink(self, callback):
"""Remove the callback set by :meth:`link` or :meth:`rawlink`"""
try:
self._links.remove(callback)
except ValueError:
pass
def link_value(self, callback, SpawnedLink=SuccessSpawnedLink):
"""Like :meth:`link` but *callback* is only notified when the greenlet has completed successfully."""
self.link(callback, SpawnedLink=SpawnedLink)
def link_exception(self, callback, SpawnedLink=FailureSpawnedLink):
"""Like :meth:`link` but *callback* is only notified when the greenlet dies because of an unhandled exception."""
self.link(callback, SpawnedLink=SpawnedLink)
def _notify_links(self):
while self._links:
link = self._links.popleft()
try:
link(self)
except:
self.parent.handle_error((link, self), *sys.exc_info())
class _dummy_event(object):
pending = False
active = False
def stop(self):
pass
_cancelled_start_event = _dummy_event()
_start_completed_event = _dummy_event()
del _dummy_event
def _kill(greenlet, exception, waiter):
try:
greenlet.throw(exception)
except:
# XXX do we need this here?
greenlet.parent.handle_error(greenlet, *sys.exc_info())
if waiter is not None:
waiter.switch()
def joinall(greenlets, timeout=None, raise_error=False, count=None):
"""
Wait for the ``greenlets`` to finish.
:param greenlets: A sequence (supporting :func:`len`) of greenlets to wait for.
:keyword float timeout: If given, the maximum number of seconds to wait.
:return: A sequence of the greenlets that finished before the timeout (if any)
expired.
"""
if not raise_error:
return wait(greenlets, timeout=timeout, count=count)
done = []
for obj in iwait(greenlets, timeout=timeout, count=count):
if getattr(obj, 'exception', None) is not None:
if hasattr(obj, '_raise_exception'):
obj._raise_exception()
else:
raise obj.exception
done.append(obj)
return done
def _killall3(greenlets, exception, waiter):
diehards = []
for g in greenlets:
if not g.dead:
try:
g.throw(exception)
except:
g.parent.handle_error(g, *sys.exc_info())
if not g.dead:
diehards.append(g)
waiter.switch(diehards)
def _killall(greenlets, exception):
for g in greenlets:
if not g.dead:
try:
g.throw(exception)
except:
g.parent.handle_error(g, *sys.exc_info())
def killall(greenlets, exception=GreenletExit, block=True, timeout=None):
"""
Forceably terminate all the ``greenlets`` by causing them to raise ``exception``.
:param greenlets: A **bounded** iterable of the non-None greenlets to terminate.
*All* the items in this iterable must be greenlets that belong to the same thread.
:keyword exception: The exception to raise in the greenlets. By default this is
:class:`GreenletExit`.
:keyword bool block: If True (the default) then this function only returns when all the
greenlets are dead; the current greenlet is unscheduled during that process.
If greenlets ignore the initial exception raised in them,
then they will be joined (with :func:`gevent.joinall`) and allowed to die naturally.
If False, this function returns immediately and greenlets will raise
the exception asynchronously.
:keyword float timeout: A time in seconds to wait for greenlets to die. If given, it is
only honored when ``block`` is True.
:raise Timeout: If blocking and a timeout is given that elapses before
all the greenlets are dead.
.. versionchanged:: 1.1a2
*greenlets* can be any iterable of greenlets, like an iterator or a set.
Previously it had to be a list or tuple.
"""
# support non-indexable containers like iterators or set objects
greenlets = list(greenlets)
if not greenlets:
return
loop = greenlets[0].loop
if block:
waiter = Waiter()
loop.run_callback(_killall3, greenlets, exception, waiter)
t = Timeout._start_new_or_dummy(timeout)
try:
alive = waiter.get()
if alive:
joinall(alive, raise_error=False)
finally:
t.cancel()
else:
loop.run_callback(_killall, greenlets, exception)
if PY3:
_meth_self = "__self__"
else:
_meth_self = "im_self"
def getfuncname(func):
if not hasattr(func, _meth_self):
try:
funcname = func.__name__
except AttributeError:
pass
else:
if funcname != '<lambda>':
return funcname
return repr(func)
|
lmmsoft/LeetCode
|
refs/heads/master
|
LeetCode-Algorithm/0852. Peak Index in a Mountain Array/852.py
|
1
|
from typing import List
class Solution:
def peakIndexInMountainArray1(self, A: List[int]) -> int:
return A.index(max(A))
def peakIndexInMountainArray2(self, A: List[int]) -> int:
l, r = 0, len(A) - 1
while l < r:
mid = (l + r) // 2
if A[mid] > A[mid + 1]: # 因为 mid 是向下取整,所以 A[mid+1]不越界
r = mid
else:
l = mid + 1 # 这里 A[mid] < A[mid+1],所以A[mid]肯定不是最大值,可以放心取 l = mid+1, 如果只取l=mid,会死循环
return l
# Golden-section search
def peakIndexInMountainArray(self, A: List[int]) -> int:
def get_left(l, r):
return l + int(round(r - l) * 0.382)
def get_right(l, r):
return l + int(round(r - l) * 0.618)
l, r = 0, len(A) - 1
x, y = get_left(l, r), get_right(l, r)
while x < y:
if A[x] < A[y]:
# in [x, r]
l = x
x = y
y = get_right(l, r)
else:
# in [l,y]
r = y
y = x
x = get_left(l, r)
# return A.index(max(A[l:r + 1]), l )
return A.index(max(A[l:r + 1]))
if __name__ == '__main__':
assert Solution().peakIndexInMountainArray([0, 1, 0]) == 1
assert Solution().peakIndexInMountainArray([0, 2, 1, 0]) == 1
assert Solution().peakIndexInMountainArray([3, 4, 5, 1]) == 2
|
xen0l/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/edgeos/edgeos_facts.py
|
68
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: edgeos_facts
version_added: "2.5"
author:
- Nathaniel Case (@qalthos)
- Sam Doran (@samdoran)
short_description: Collect facts from remote devices running EdgeOS
description:
- Collects a base set of device facts from a remote device that
is running EdgeOS. This module prepends all of the
base network fact keys with U(ansible_net_<fact>). The facts
module will always collect a base set of facts from the device
and can enable or disable collection of additional facts.
notes:
- Tested against EdgeOS 1.9.7
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all, default, config, and neighbors. Can specify a list of
values to include a larger subset. Values can also be used
with an initial C(M(!)) to specify that a specific subset should
not be collected.
required: false
default: "!config"
"""
EXAMPLES = """
- name: collect all facts from the device
edgeos_facts:
gather_subset: all
- name: collect only the config and default facts
edgeos_facts:
gather_subset: config
- name: collect everything exception the config
edgeos_facts:
gather_subset: "!config"
"""
RETURN = """
ansible_net_config:
description: The running-config from the device
returned: when config is configured
type: str
ansible_net_commits:
description: The set of available configuration revisions
returned: when present
type: list
ansible_net_hostname:
description: The configured system hostname
returned: always
type: str
ansible_net_model:
description: The device model string
returned: always
type: str
ansible_net_serialnum:
description: The serial number of the device
returned: always
type: str
ansible_net_version:
description: The version of the software running
returned: always
type: str
ansible_net_neighbors:
description: The set of LLDP neighbors
returned: when interface is configured
type: list
ansible_net_gather_subset:
description: The list of subsets gathered by the module
returned: always
type: list
"""
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.edgeos.edgeos import run_commands
class FactsBase(object):
COMMANDS = frozenset()
def __init__(self, module):
self.module = module
self.facts = dict()
self.responses = None
def populate(self):
self.responses = run_commands(self.module, list(self.COMMANDS))
class Default(FactsBase):
COMMANDS = [
'show version',
'show host name',
]
def populate(self):
super(Default, self).populate()
data = self.responses[0]
self.facts['version'] = self.parse_version(data)
self.facts['serialnum'] = self.parse_serialnum(data)
self.facts['model'] = self.parse_model(data)
self.facts['hostname'] = self.responses[1]
def parse_version(self, data):
match = re.search(r'Version:\s*v(\S+)', data)
if match:
return match.group(1)
def parse_model(self, data):
match = re.search(r'HW model:\s*([A-Za-z0-9- ]+)', data)
if match:
return match.group(1)
def parse_serialnum(self, data):
match = re.search(r'HW S/N:\s+(\S+)', data)
if match:
return match.group(1)
class Config(FactsBase):
COMMANDS = [
'show configuration commands',
'show system commit',
]
def populate(self):
super(Config, self).populate()
self.facts['config'] = self.responses
commits = self.responses[1]
entries = list()
entry = None
for line in commits.split('\n'):
match = re.match(r'(\d+)\s+(.+)by(.+)via(.+)', line)
if match:
if entry:
entries.append(entry)
entry = dict(revision=match.group(1),
datetime=match.group(2),
by=str(match.group(3)).strip(),
via=str(match.group(4)).strip(),
comment=None)
elif entry:
entry['comment'] = line.strip()
self.facts['commits'] = entries
class Neighbors(FactsBase):
COMMANDS = [
'show lldp neighbors',
'show lldp neighbors detail',
]
def populate(self):
super(Neighbors, self).populate()
all_neighbors = self.responses[0]
if 'LLDP not configured' not in all_neighbors:
neighbors = self.parse(
self.responses[1]
)
self.facts['neighbors'] = self.parse_neighbors(neighbors)
def parse(self, data):
parsed = list()
values = None
for line in data.split('\n'):
if not line:
continue
elif line[0] == ' ':
values += '\n%s' % line
elif line.startswith('Interface'):
if values:
parsed.append(values)
values = line
if values:
parsed.append(values)
return parsed
def parse_neighbors(self, data):
facts = dict()
for item in data:
interface = self.parse_interface(item)
host = self.parse_host(item)
port = self.parse_port(item)
if interface not in facts:
facts[interface] = list()
facts[interface].append(dict(host=host, port=port))
return facts
def parse_interface(self, data):
match = re.search(r'^Interface:\s+(\S+),', data)
return match.group(1)
def parse_host(self, data):
match = re.search(r'SysName:\s+(.+)$', data, re.M)
if match:
return match.group(1)
def parse_port(self, data):
match = re.search(r'PortDescr:\s+(.+)$', data, re.M)
if match:
return match.group(1)
FACT_SUBSETS = dict(
default=Default,
neighbors=Neighbors,
config=Config
)
VALID_SUBSETS = frozenset(FACT_SUBSETS.keys())
def main():
spec = dict(
gather_subset=dict(default=['!config'], type='list')
)
module = AnsibleModule(argument_spec=spec,
supports_check_mode=True)
warnings = list()
gather_subset = module.params['gather_subset']
runable_subsets = set()
exclude_subsets = set()
for subset in gather_subset:
if subset == 'all':
runable_subsets.update(VALID_SUBSETS)
continue
if subset.startswith('!'):
subset = subset[1:]
if subset == 'all':
exclude_subsets.update(VALID_SUBSETS)
continue
exclude = True
else:
exclude = False
if subset not in VALID_SUBSETS:
module.fail_json(msg='Subset must be one of [%s], got %s' %
(', '.join(VALID_SUBSETS), subset))
if exclude:
exclude_subsets.add(subset)
else:
runable_subsets.add(subset)
if not runable_subsets:
runable_subsets.update(VALID_SUBSETS)
runable_subsets.difference_update(exclude_subsets)
runable_subsets.add('default')
facts = dict()
facts['gather_subset'] = list(runable_subsets)
instances = list()
for key in runable_subsets:
instances.append(FACT_SUBSETS[key](module))
for inst in instances:
inst.populate()
facts.update(inst.facts)
ansible_facts = dict()
for key, value in iteritems(facts):
key = 'ansible_net_%s' % key
ansible_facts[key] = value
module.exit_json(ansible_facts=ansible_facts, warnings=warnings)
if __name__ == '__main__':
main()
|
TNick/pylearn2
|
refs/heads/master
|
pylearn2/datasets/tests/test_wiskott.py
|
45
|
"""module to test datasets.wiskott"""
from pylearn2.datasets.wiskott import Wiskott
import unittest
from pylearn2.testing.skip import skip_if_no_data
from pylearn2.utils import isfinite
import numpy as np
def test_wiskott():
"""loads wiskott dataset"""
skip_if_no_data()
data = Wiskott()
assert isfinite(data.X)
|
AlanZatarain/visvis.dev
|
refs/heads/master
|
core/light.py
|
5
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein
#
# Visvis is distributed under the terms of the (new) BSD License.
# The full license can be found in 'license.txt'.
""" Module light
Defines a light source to light polygonal surfaces. Each axes has up
to eight lights associated with it.
"""
import OpenGL.GL as gl
from visvis.core import misc
from visvis.core.misc import PropWithDraw, DrawAfter
def _testColor(value, canBeScalar=True):
""" _testColor(value, canBeScalar=True)
Tests a color whether it is a sequence of 3 or 4 values.
It returns a 4 element tuple or raises an error if the suplied
data is incorrect.
"""
# Deal with named colors
if isinstance(value, basestring):
value = misc.getColor(value)
# Value can be a scalar
if canBeScalar and isinstance(value, (int, float)):
if value <= 0:
value = 0.0
if value >= 1:
value = 1.0
return value
# Otherwise it must be a sequence of 3 or 4 elements
elif not hasattr(value, '__len__'):
raise ValueError("Given value can not represent a color.")
elif len(value) == 4:
return (value[0], value[1], value[2], value[3])
elif len(value) == 3:
return (value[0], value[1], value[2], 1.0)
else:
raise ValueError("Given value can not represent a color.")
def _getColor(color, ref):
""" _getColor(color, reference)
Get the real color as a 4 element tuple, using the reference
color if the given color is a scalar.
"""
if isinstance(color, float):
return (color*ref[0], color*ref[1], color*ref[2], ref[3])
else:
return color
# todo: implement spot light and attenuation
class Light(object):
""" Light(axes, index)
A Light object represents a light source in the scene. It
determines how lit objects (such as Mesh objects) are visualized.
Each axes has 8 light sources, of which the 0th is turned on
by default. De 0th light source provides the ambient light in the
scene (the ambient component is 0 by default for the other light
sources). Obtain the lights using the axes.light0 and axes.lights
properties.
The 0th light source is a directional camera light by default; it
shines in the direction in which you look. The other lights are
oriented at the origin by default.
"""
def __init__(self, axes, index):
# Store axes and index of the light (OpenGl can handle up to 8 lights)
self._axes = axes.GetWeakref()
self._index = index
self._on = False
# The three light properties
self._color = (1, 1, 1, 1)
self._ambient = 0.0
self._diffuse = 1.0
self._specular = 1.0
# The main light has an ambien component by default
if index == 0:
self._ambient = 0.2
# Position or direction
if index == 0:
self._position = (0,0,1,0)
self._camLight = True
else:
self._position = (0,0,0,1)
self._camLight = False
def Draw(self):
# Draw axes
axes = self._axes()
if axes:
axes.Draw()
@PropWithDraw
def color():
""" Get/Set the reference color of the light. If the ambient,
diffuse or specular properties specify a scalar, that scalar
represents the fraction of *this* color.
"""
def fget(self):
return self._color
def fset(self, value):
self._color = _testColor(value, True)
return locals()
@PropWithDraw
def ambient():
""" Get/Set the ambient color of the light. This is the color
that is everywhere, coming from all directions, independent of
the light position.
The value can be a 3- or 4-element tuple, a character in
"rgbycmkw", or a scalar between 0 and 1 that indicates the
fraction of the reference color.
"""
def fget(self):
return self._ambient
def fset(self, value):
self._ambient = _testColor(value)
return locals()
@PropWithDraw
def diffuse():
""" Get/Set the diffuse color of the light. This component is the
light that comes from one direction, so it's brighter if it comes
squarely down on a surface than if it barely glances off the
surface. It depends on the light position how a material is lit.
"""
def fget(self):
return self._diffuse
def fset(self, value):
self._diffuse = _testColor(value)
return locals()
@PropWithDraw
def specular():
""" Get/Set the specular color of the light. This component
represents the light that comes from the light source and bounces
off a surface in a particular direction. This is what makes
materials appear shiny.
The value can be a 3- or 4-element tuple, a character in
"rgbycmkw", or a scalar between 0 and 1 that indicates the
fraction of the reference color.
"""
def fget(self):
return self._specular
def fset(self, value):
self._specular = _testColor(value)
return locals()
@PropWithDraw
def position():
""" Get/Set the position of the light. Can be represented as a
3 or 4 element tuple. If the fourth element is a 1, the light
has a position, if it is a 0, it represents a direction (i.o.w. the
light is a directional light, like the sun).
"""
def fget(self):
return self._position
def fset(self, value):
if len(value) == 3:
self._position = value[0], value[1], value[2], 1
elif len(value) == 4:
self._position = value[0], value[1], value[2], value[3]
else:
tmp = "Light position should be a 3 or 4 element sequence."
raise ValueError(tmp)
return locals()
@PropWithDraw
def isDirectional():
""" Get/Set whether the light is a directional light. A directional
light has no real position (it can be thought of as infinitely far
away), but shines in a particular direction. The sun is a good
example of a directional light.
"""
def fget(self):
return self._position[3] == 0
def fset(self, value):
# Get fourth element
if value:
fourth = 0
else:
fourth = 1
# Set position
tmp = self._position
self._position = tmp[0], tmp[1], tmp[2], fourth
return locals()
@PropWithDraw
def isCamLight():
""" Get/Set whether the light is a camera light. A camera light
moves along with the camera, like the lamp on a miner's hat.
"""
def fget(self):
return self._camLight
def fset(self, value):
self._camLight = bool(value)
return locals()
@DrawAfter
def On(self, on=True):
""" On(on=True)
Turn the light on.
"""
self._on = bool(on)
@DrawAfter
def Off(self):
""" Off()
Turn the light off.
"""
self._on = False
@property
def isOn(self):
""" Get whether the light is on.
"""
return self._on
def _Apply(self):
""" _Apply()
Apply the light position and other properties.
"""
thisLight = gl.GL_LIGHT0 + self._index
if self._on:
# Enable and set position
gl.glEnable(thisLight)
gl.glLightfv(thisLight, gl.GL_POSITION, self._position)
# Set colors
amb, dif, spe = gl.GL_AMBIENT, gl.GL_DIFFUSE, gl.GL_SPECULAR
gl.glLightfv(thisLight, amb, _getColor(self._ambient, self._color))
gl.glLightfv(thisLight, dif, _getColor(self._diffuse, self._color))
gl.glLightfv(thisLight, spe, _getColor(self._specular, self._color))
else:
# null-position means that the ligth is off
gl.glLightfv(thisLight, gl.GL_POSITION, (0.0, 0.0, 0.0, 0.0))
gl.glDisable(thisLight)
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/backends_conversion.py
|
69
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Convenience wrapper for starting an appengine tool."""
import os
import sys
import time
sys_path = sys.path
try:
sys.path = [os.path.dirname(__file__)] + sys.path
import wrapper_util
finally:
sys.path = sys_path
wrapper_util.reject_old_python_versions((2, 5))
if sys.version_info < (2, 6):
sys.stderr.write(
'WARNING: In an upcoming release the SDK will no longer support Python'
' 2.5. Users should upgrade to Python 2.6 or higher.\n')
time.sleep(1)
def get_dir_path(sibling):
"""Get a path to the directory of this script.
By default, the canonical path (symlinks resolved) will be returned. In some
environments the canonical directory is not sufficient because different
parts of the SDK are referenced by symlinks, including this very module's
file. In this case, the non-canonical path to this file's directory will be
returned (i.e., the directory where the symlink lives, not the directory
where it points).
Args:
sibling: Relative path to a sibling of this module file. Choose a sibling
that is potentially symlinked into the parent directory.
Returns:
A directory name.
Raises:
ValueError: If no proper path could be determined.
"""
return wrapper_util.get_dir_path(__file__, sibling)
DIR_PATH = get_dir_path(os.path.join('lib', 'ipaddr'))
_PATHS = wrapper_util.Paths(DIR_PATH)
SCRIPT_DIR = _PATHS.default_script_dir
GOOGLE_SQL_DIR = _PATHS.google_sql_dir
EXTRA_PATHS = _PATHS.v1_extra_paths
API_SERVER_EXTRA_PATHS = _PATHS.api_server_extra_paths
ENDPOINTSCFG_EXTRA_PATHS = _PATHS.endpointscfg_extra_paths
OAUTH_CLIENT_EXTRA_PATHS = _PATHS.oauth_client_extra_paths
GOOGLE_SQL_EXTRA_PATHS = _PATHS.google_sql_extra_paths
def fix_sys_path(extra_extra_paths=()):
"""Fix the sys.path to include our extra paths."""
sys.path = EXTRA_PATHS + list(extra_extra_paths) + sys.path
def run_file(file_path, globals_):
"""Execute the given script with the passed-in globals.
Args:
file_path: the path to the wrapper for the given script. This will usually
be a copy of this file.
globals_: the global bindings to be used while executing the wrapped script.
"""
script_name = os.path.basename(file_path)
sys.path = (_PATHS.script_paths(script_name) +
_PATHS.scrub_path(script_name, sys.path))
if 'google' in sys.modules:
del sys.modules['google']
execfile(_PATHS.script_file(script_name), globals_)
if __name__ == '__main__':
run_file(__file__, globals())
|
stefanv/scipy3
|
refs/heads/master
|
scipy/sparse/extract.py
|
3
|
"""Functions to extract parts of sparse matrices
"""
__docformat__ = "restructuredtext en"
__all__ = ['find', 'tril', 'triu']
from coo import coo_matrix
def find(A):
"""Return the indices and values of the nonzero elements of a matrix
Parameters
----------
A : dense or sparse matrix
Matrix whose nonzero elements are desired.
Returns
-------
(I,J,V) : tuple of arrays
I,J, and V contain the row indices, column indices, and values
of the nonzero matrix entries.
Example
-------
>>> from scipy.sparse import csr_matrix
>>> A = csr_matrix([[7.0, 8.0, 0],[0, 0, 9.0]])
>>> find(A)
(array([0, 0, 1], dtype=int32), array([0, 1, 2], dtype=int32), array([ 7., 8., 9.]))
"""
A = coo_matrix(A).tocsr() #sums duplicates
A.eliminate_zeros() #removes explicit zeros
A = A.tocoo(copy=False) #(cheaply) convert to COO
return A.row,A.col,A.data
def tril(A, k=0, format=None):
"""Return the lower triangular portion of a matrix in sparse format
Returns the elements on or below the k-th diagonal of the matrix A.
- k = 0 corresponds to the main diagonal
- k > 0 is above the main diagonal
- k < 0 is below the main diagonal
Parameters
----------
A : dense or sparse matrix
Matrix whose lower trianglar portion is desired.
k : integer : optional
The top-most diagonal of the lower triangle.
format : string
Sparse format of the result, e.g. format="csr", etc.
Returns
-------
L : sparse matrix
Lower triangular portion of A in sparse format.
See Also
--------
triu : upper triangle in sparse format
Examples
--------
>>> from scipy.sparse import csr_matrix
>>> A = csr_matrix( [[1,2,0,0,3],[4,5,0,6,7],[0,0,8,9,0]], dtype='int32' )
>>> A.todense()
matrix([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> tril(A).todense()
matrix([[1, 0, 0, 0, 0],
[4, 5, 0, 0, 0],
[0, 0, 8, 0, 0]])
>>> tril(A).nnz
4
>>> tril(A, k=1).todense()
matrix([[1, 2, 0, 0, 0],
[4, 5, 0, 0, 0],
[0, 0, 8, 9, 0]])
>>> tril(A, k=-1).todense()
matrix([[0, 0, 0, 0, 0],
[4, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
>>> tril(A, format='csc')
<3x5 sparse matrix of type '<type 'numpy.int32'>'
with 4 stored elements in Compressed Sparse Column format>
"""
# convert to COOrdinate format where things are easy
A = coo_matrix(A, copy=False)
mask = A.row + k >= A.col
row = A.row[mask]
col = A.col[mask]
data = A.data[mask]
return coo_matrix( (data,(row,col)), shape=A.shape ).asformat(format)
def triu(A, k=0, format=None):
"""Return the upper triangular portion of a matrix in sparse format
Returns the elements on or above the k-th diagonal of the matrix A.
- k = 0 corresponds to the main diagonal
- k > 0 is above the main diagonal
- k < 0 is below the main diagonal
Parameters
----------
A : dense or sparse matrix
Matrix whose upper trianglar portion is desired.
k : integer : optional
The bottom-most diagonal of the upper triangle.
format : string
Sparse format of the result, e.g. format="csr", etc.
Returns
-------
L : sparse matrix
Upper triangular portion of A in sparse format.
See Also
--------
tril : lower triangle in sparse format
Examples
--------
>>> from scipy.sparse import csr_matrix
>>> A = csr_matrix( [[1,2,0,0,3],[4,5,0,6,7],[0,0,8,9,0]], dtype='int32' )
>>> A.todense()
matrix([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A).todense()
matrix([[1, 2, 0, 0, 3],
[0, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A).nnz
8
>>> triu(A, k=1).todense()
matrix([[0, 2, 0, 0, 3],
[0, 0, 0, 6, 7],
[0, 0, 0, 9, 0]])
>>> triu(A, k=-1).todense()
matrix([[1, 2, 0, 0, 3],
[4, 5, 0, 6, 7],
[0, 0, 8, 9, 0]])
>>> triu(A, format='csc')
<3x5 sparse matrix of type '<type 'numpy.int32'>'
with 8 stored elements in Compressed Sparse Column format>
"""
# convert to COOrdinate format where things are easy
A = coo_matrix(A, copy=False)
mask = A.row + k <= A.col
row = A.row[mask]
col = A.col[mask]
data = A.data[mask]
return coo_matrix( (data,(row,col)), shape=A.shape ).asformat(format)
|
LaoZhongGu/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/test/test_with.py
|
83
|
#!/usr/bin/env python3
"""Unit tests for the with statement specified in PEP 343."""
__author__ = "Mike Bland"
__email__ = "mbland at acm dot org"
import sys
import unittest
from collections import deque
from contextlib import _GeneratorContextManager, contextmanager
from test.support import run_unittest
class MockContextManager(_GeneratorContextManager):
def __init__(self, func, *args, **kwds):
super().__init__(func, *args, **kwds)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return _GeneratorContextManager.__enter__(self)
def __exit__(self, type, value, traceback):
self.exit_called = True
self.exit_args = (type, value, traceback)
return _GeneratorContextManager.__exit__(self, type,
value, traceback)
def mock_contextmanager(func):
def helper(*args, **kwds):
return MockContextManager(func, *args, **kwds)
return helper
class MockResource(object):
def __init__(self):
self.yielded = False
self.stopped = False
@mock_contextmanager
def mock_contextmanager_generator():
mock = MockResource()
try:
mock.yielded = True
yield mock
finally:
mock.stopped = True
class Nested(object):
def __init__(self, *managers):
self.managers = managers
self.entered = None
def __enter__(self):
if self.entered is not None:
raise RuntimeError("Context is not reentrant")
self.entered = deque()
vars = []
try:
for mgr in self.managers:
vars.append(mgr.__enter__())
self.entered.appendleft(mgr)
except:
if not self.__exit__(*sys.exc_info()):
raise
return vars
def __exit__(self, *exc_info):
# Behave like nested with statements
# first in, last out
# New exceptions override old ones
ex = exc_info
for mgr in self.entered:
try:
if mgr.__exit__(*ex):
ex = (None, None, None)
except:
ex = sys.exc_info()
self.entered = None
if ex is not exc_info:
raise ex[0](ex[1]).with_traceback(ex[2])
class MockNested(Nested):
def __init__(self, *managers):
Nested.__init__(self, *managers)
self.enter_called = False
self.exit_called = False
self.exit_args = None
def __enter__(self):
self.enter_called = True
return Nested.__enter__(self)
def __exit__(self, *exc_info):
self.exit_called = True
self.exit_args = exc_info
return Nested.__exit__(self, *exc_info)
class FailureTestCase(unittest.TestCase):
def testNameError(self):
def fooNotDeclared():
with foo: pass
self.assertRaises(NameError, fooNotDeclared)
def testEnterAttributeError(self):
class LacksEnter(object):
def __exit__(self, type, value, traceback):
pass
def fooLacksEnter():
foo = LacksEnter()
with foo: pass
self.assertRaises(AttributeError, fooLacksEnter)
def testExitAttributeError(self):
class LacksExit(object):
def __enter__(self):
pass
def fooLacksExit():
foo = LacksExit()
with foo: pass
self.assertRaises(AttributeError, fooLacksExit)
def assertRaisesSyntaxError(self, codestr):
def shouldRaiseSyntaxError(s):
compile(s, '', 'single')
self.assertRaises(SyntaxError, shouldRaiseSyntaxError, codestr)
def testAssignmentToNoneError(self):
self.assertRaisesSyntaxError('with mock as None:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None):\n'
' pass')
def testAssignmentToEmptyTupleError(self):
self.assertRaisesSyntaxError(
'with mock as ():\n'
' pass')
def testAssignmentToTupleOnlyContainingNoneError(self):
self.assertRaisesSyntaxError('with mock as None,:\n pass')
self.assertRaisesSyntaxError(
'with mock as (None,):\n'
' pass')
def testAssignmentToTupleContainingNoneError(self):
self.assertRaisesSyntaxError(
'with mock as (foo, None, bar):\n'
' pass')
def testEnterThrows(self):
class EnterThrows(object):
def __enter__(self):
raise RuntimeError("Enter threw")
def __exit__(self, *args):
pass
def shouldThrow():
ct = EnterThrows()
self.foo = None
with ct as self.foo:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertEqual(self.foo, None)
def testExitThrows(self):
class ExitThrows(object):
def __enter__(self):
return
def __exit__(self, *args):
raise RuntimeError(42)
def shouldThrow():
with ExitThrows():
pass
self.assertRaises(RuntimeError, shouldThrow)
class ContextmanagerAssertionMixin(object):
def setUp(self):
self.TEST_EXCEPTION = RuntimeError("test exception")
def assertInWithManagerInvariants(self, mock_manager):
self.assertTrue(mock_manager.enter_called)
self.assertFalse(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, None)
def assertAfterWithManagerInvariants(self, mock_manager, exit_args):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
self.assertEqual(mock_manager.exit_args, exit_args)
def assertAfterWithManagerInvariantsNoError(self, mock_manager):
self.assertAfterWithManagerInvariants(mock_manager,
(None, None, None))
def assertInWithGeneratorInvariants(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertFalse(mock_generator.stopped)
def assertAfterWithGeneratorInvariantsNoError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
def raiseTestException(self):
raise self.TEST_EXCEPTION
def assertAfterWithManagerInvariantsWithError(self, mock_manager,
exc_type=None):
self.assertTrue(mock_manager.enter_called)
self.assertTrue(mock_manager.exit_called)
if exc_type is None:
self.assertEqual(mock_manager.exit_args[1], self.TEST_EXCEPTION)
exc_type = type(self.TEST_EXCEPTION)
self.assertEqual(mock_manager.exit_args[0], exc_type)
# Test the __exit__ arguments. Issue #7853
self.assertIsInstance(mock_manager.exit_args[1], exc_type)
self.assertIsNot(mock_manager.exit_args[2], None)
def assertAfterWithGeneratorInvariantsWithError(self, mock_generator):
self.assertTrue(mock_generator.yielded)
self.assertTrue(mock_generator.stopped)
class NonexceptionalTestCase(unittest.TestCase, ContextmanagerAssertionMixin):
def testInlineGeneratorSyntax(self):
with mock_contextmanager_generator():
pass
def testUnboundGenerator(self):
mock = mock_contextmanager_generator()
with mock:
pass
self.assertAfterWithManagerInvariantsNoError(mock)
def testInlineGeneratorBoundSyntax(self):
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
# FIXME: In the future, we'll try to keep the bound names from leaking
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToExistingVariable(self):
foo = None
with mock_contextmanager_generator() as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithGeneratorInvariantsNoError(foo)
def testInlineGeneratorBoundToDottedVariable(self):
with mock_contextmanager_generator() as self.foo:
self.assertInWithGeneratorInvariants(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.foo)
def testBoundGenerator(self):
mock = mock_contextmanager_generator()
with mock as foo:
self.assertInWithGeneratorInvariants(foo)
self.assertInWithManagerInvariants(mock)
self.assertAfterWithGeneratorInvariantsNoError(foo)
self.assertAfterWithManagerInvariantsNoError(mock)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
with mock_a as foo:
mock_b = mock_contextmanager_generator()
with mock_b as bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(foo)
self.assertInWithGeneratorInvariants(bar)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsNoError(bar)
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(foo)
self.assertAfterWithManagerInvariantsNoError(mock_a)
self.assertAfterWithGeneratorInvariantsNoError(foo)
class NestedNonexceptionalTestCase(unittest.TestCase,
ContextmanagerAssertionMixin):
def testSingleArgInlineGeneratorSyntax(self):
with Nested(mock_contextmanager_generator()):
pass
def testSingleArgBoundToNonTuple(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as foo:
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToSingleElementParenthesizedList(self):
m = mock_contextmanager_generator()
# This will bind all the arguments to nested() into a single list
# assigned to foo.
with Nested(m) as (foo):
self.assertInWithManagerInvariants(m)
self.assertAfterWithManagerInvariantsNoError(m)
def testSingleArgBoundToMultipleElementTupleError(self):
def shouldThrowValueError():
with Nested(mock_contextmanager_generator()) as (foo, bar):
pass
self.assertRaises(ValueError, shouldThrowValueError)
def testSingleArgUnbound(self):
mock_contextmanager = mock_contextmanager_generator()
mock_nested = MockNested(mock_contextmanager)
with mock_nested:
self.assertInWithManagerInvariants(mock_contextmanager)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(mock_contextmanager)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgUnbound(self):
m = mock_contextmanager_generator()
n = mock_contextmanager_generator()
o = mock_contextmanager_generator()
mock_nested = MockNested(m, n, o)
with mock_nested:
self.assertInWithManagerInvariants(m)
self.assertInWithManagerInvariants(n)
self.assertInWithManagerInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithManagerInvariantsNoError(m)
self.assertAfterWithManagerInvariantsNoError(n)
self.assertAfterWithManagerInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
def testMultipleArgBound(self):
mock_nested = MockNested(mock_contextmanager_generator(),
mock_contextmanager_generator(), mock_contextmanager_generator())
with mock_nested as (m, n, o):
self.assertInWithGeneratorInvariants(m)
self.assertInWithGeneratorInvariants(n)
self.assertInWithGeneratorInvariants(o)
self.assertInWithManagerInvariants(mock_nested)
self.assertAfterWithGeneratorInvariantsNoError(m)
self.assertAfterWithGeneratorInvariantsNoError(n)
self.assertAfterWithGeneratorInvariantsNoError(o)
self.assertAfterWithManagerInvariantsNoError(mock_nested)
class ExceptionalTestCase(ContextmanagerAssertionMixin, unittest.TestCase):
def testSingleResource(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
self.assertInWithManagerInvariants(cm)
self.assertInWithGeneratorInvariants(self.resource)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm)
self.assertAfterWithGeneratorInvariantsWithError(self.resource)
def testExceptionNormalized(self):
cm = mock_contextmanager_generator()
def shouldThrow():
with cm as self.resource:
# Note this relies on the fact that 1 // 0 produces an exception
# that is not normalized immediately.
1 // 0
self.assertRaises(ZeroDivisionError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm, ZeroDivisionError)
def testNestedSingleStatements(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsWithError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsWithError(self.bar)
def testMultipleResourcesInSingleStatement(self):
cm_a = mock_contextmanager_generator()
cm_b = mock_contextmanager_generator()
mock_nested = MockNested(cm_a, cm_b)
def shouldThrow():
with mock_nested as (self.resource_a, self.resource_b):
self.assertInWithManagerInvariants(cm_a)
self.assertInWithManagerInvariants(cm_b)
self.assertInWithManagerInvariants(mock_nested)
self.assertInWithGeneratorInvariants(self.resource_a)
self.assertInWithGeneratorInvariants(self.resource_b)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(cm_a)
self.assertAfterWithManagerInvariantsWithError(cm_b)
self.assertAfterWithManagerInvariantsWithError(mock_nested)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_a)
self.assertAfterWithGeneratorInvariantsWithError(self.resource_b)
def testNestedExceptionBeforeInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
self.bar = None
def shouldThrow():
with mock_a as self.foo:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithGeneratorInvariants(self.foo)
self.raiseTestException()
with mock_b as self.bar:
pass
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
# The inner statement stuff should never have been touched
self.assertEqual(self.bar, None)
self.assertFalse(mock_b.enter_called)
self.assertFalse(mock_b.exit_called)
self.assertEqual(mock_b.exit_args, None)
def testNestedExceptionAfterInnerStatement(self):
mock_a = mock_contextmanager_generator()
mock_b = mock_contextmanager_generator()
def shouldThrow():
with mock_a as self.foo:
with mock_b as self.bar:
self.assertInWithManagerInvariants(mock_a)
self.assertInWithManagerInvariants(mock_b)
self.assertInWithGeneratorInvariants(self.foo)
self.assertInWithGeneratorInvariants(self.bar)
self.raiseTestException()
self.assertRaises(RuntimeError, shouldThrow)
self.assertAfterWithManagerInvariantsWithError(mock_a)
self.assertAfterWithManagerInvariantsNoError(mock_b)
self.assertAfterWithGeneratorInvariantsWithError(self.foo)
self.assertAfterWithGeneratorInvariantsNoError(self.bar)
def testRaisedStopIteration1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration2(self):
# From bug 1462485
class cm(object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise StopIteration("from with")
self.assertRaises(StopIteration, shouldThrow)
def testRaisedStopIteration3(self):
# Another variant where the exception hasn't been instantiated
# From bug 1705170
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise next(iter([]))
self.assertRaises(StopIteration, shouldThrow)
def testRaisedGeneratorExit1(self):
# From bug 1462485
@contextmanager
def cm():
yield
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testRaisedGeneratorExit2(self):
# From bug 1462485
class cm (object):
def __enter__(self):
pass
def __exit__(self, type, value, traceback):
pass
def shouldThrow():
with cm():
raise GeneratorExit("from with")
self.assertRaises(GeneratorExit, shouldThrow)
def testErrorsInBool(self):
# issue4589: __exit__ return code may raise an exception
# when looking at its truth value.
class cm(object):
def __init__(self, bool_conversion):
class Bool:
def __bool__(self):
return bool_conversion()
self.exit_result = Bool()
def __enter__(self):
return 3
def __exit__(self, a, b, c):
return self.exit_result
def trueAsBool():
with cm(lambda: True):
self.fail("Should NOT see this")
trueAsBool()
def falseAsBool():
with cm(lambda: False):
self.fail("Should raise")
self.assertRaises(AssertionError, falseAsBool)
def failAsBool():
with cm(lambda: 1//0):
self.fail("Should NOT see this")
self.assertRaises(ZeroDivisionError, failAsBool)
class NonLocalFlowControlTestCase(unittest.TestCase):
def testWithBreak(self):
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
break
counter += 100 # Not reached
self.assertEqual(counter, 11)
def testWithContinue(self):
counter = 0
while True:
counter += 1
if counter > 2:
break
with mock_contextmanager_generator():
counter += 10
continue
counter += 100 # Not reached
self.assertEqual(counter, 12)
def testWithReturn(self):
def foo():
counter = 0
while True:
counter += 1
with mock_contextmanager_generator():
counter += 10
return counter
counter += 100 # Not reached
self.assertEqual(foo(), 11)
def testWithYield(self):
def gen():
with mock_contextmanager_generator():
yield 12
yield 13
x = list(gen())
self.assertEqual(x, [12, 13])
def testWithRaise(self):
counter = 0
try:
counter += 1
with mock_contextmanager_generator():
counter += 10
raise RuntimeError
counter += 100 # Not reached
except RuntimeError:
self.assertEqual(counter, 11)
else:
self.fail("Didn't raise RuntimeError")
class AssignmentTargetTestCase(unittest.TestCase):
def testSingleComplexTarget(self):
targets = {1: [0, 1, 2]}
with mock_contextmanager_generator() as targets[1][0]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][0].__class__, MockResource)
with mock_contextmanager_generator() as list(targets.values())[0][1]:
self.assertEqual(list(targets.keys()), [1])
self.assertEqual(targets[1][1].__class__, MockResource)
with mock_contextmanager_generator() as targets[2]:
keys = list(targets.keys())
keys.sort()
self.assertEqual(keys, [1, 2])
class C: pass
blah = C()
with mock_contextmanager_generator() as blah.foo:
self.assertEqual(hasattr(blah, "foo"), True)
def testMultipleComplexTargets(self):
class C:
def __enter__(self): return 1, 2, 3
def __exit__(self, t, v, tb): pass
targets = {1: [0, 1, 2]}
with C() as (targets[1][0], targets[1][1], targets[1][2]):
self.assertEqual(targets, {1: [1, 2, 3]})
with C() as (list(targets.values())[0][2], list(targets.values())[0][1], list(targets.values())[0][0]):
self.assertEqual(targets, {1: [3, 2, 1]})
with C() as (targets[1], targets[2], targets[3]):
self.assertEqual(targets, {1: 1, 2: 2, 3: 3})
class B: pass
blah = B()
with C() as (blah.one, blah.two, blah.three):
self.assertEqual(blah.one, 1)
self.assertEqual(blah.two, 2)
self.assertEqual(blah.three, 3)
class ExitSwallowsExceptionTestCase(unittest.TestCase):
def testExitTrueSwallowsException(self):
class AfricanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return True
try:
with AfricanSwallow():
1/0
except ZeroDivisionError:
self.fail("ZeroDivisionError should have been swallowed")
def testExitFalseDoesntSwallowException(self):
class EuropeanSwallow:
def __enter__(self): pass
def __exit__(self, t, v, tb): return False
try:
with EuropeanSwallow():
1/0
except ZeroDivisionError:
pass
else:
self.fail("ZeroDivisionError should have been raised")
class NestedWith(unittest.TestCase):
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
def testNoExceptions(self):
with self.Dummy() as a, self.Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with self.Dummy() as a, self.InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with self.Dummy() as a, self.EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with self.Dummy(gobble=True) as a, self.ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with self.Dummy(value=(1,2)) as (a1, a2), \
self.Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
def test_main():
run_unittest(FailureTestCase, NonexceptionalTestCase,
NestedNonexceptionalTestCase, ExceptionalTestCase,
NonLocalFlowControlTestCase,
AssignmentTargetTestCase,
ExitSwallowsExceptionTestCase,
NestedWith)
if __name__ == '__main__':
test_main()
|
zhengyongbo/phantomjs
|
refs/heads/master
|
src/breakpad/src/tools/gyp/pylib/gyp/common.py
|
137
|
#!/usr/bin/python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import errno
import filecmp
import os.path
import re
import tempfile
import sys
def ExceptionAppend(e, msg):
"""Append a message to the given exception's message."""
if not e.args:
e.args = (msg,)
elif len(e.args) == 1:
e.args = (str(e.args[0]) + ' ' + msg,)
else:
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
target_split = target.rsplit(':', 1)
if len(target_split) == 2:
[build_file, target] = target_split
else:
build_file = None
target_split = target.rsplit('#', 1)
if len(target_split) == 2:
[target, toolset] = target_split
else:
toolset = None
return [build_file, target, toolset]
def ResolveTarget(build_file, target, toolset):
# This function resolves a target into a canonical form:
# - a fully defined build file, either absolute or relative to the current
# directory
# - a target name
# - a toolset
#
# build_file is the file relative to which 'target' is defined.
# target is the qualified target.
# toolset is the default toolset for that target.
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
if parsed_build_file:
if build_file:
# If a relative path, parsed_build_file is relative to the directory
# containing build_file. If build_file is not in the current directory,
# parsed_build_file is not a usable path as-is. Resolve it by
# interpreting it as relative to build_file. If parsed_build_file is
# absolute, it is usable as a path regardless of the current directory,
# and os.path.join will return it as-is.
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
parsed_build_file))
else:
build_file = parsed_build_file
if parsed_toolset:
toolset = parsed_toolset
return [build_file, target, toolset]
def BuildFile(fully_qualified_target):
# Extracts the build file from the fully qualified target.
return ParseQualifiedTarget(fully_qualified_target)[0]
def QualifiedTarget(build_file, target, toolset):
# "Qualified" means the file that a target was defined in and the target
# name, separated by a colon, suffixed by a # and the toolset name:
# /path/to/file.gyp:target_name#toolset
fully_qualified = build_file + ':' + target
if toolset:
fully_qualified = fully_qualified + '#' + toolset
return fully_qualified
def RelativePath(path, relative_to):
# Assuming both |path| and |relative_to| are relative to the current
# directory, returns a relative path that identifies path relative to
# relative_to.
# Convert to absolute (and therefore normalized paths).
path = os.path.abspath(path)
relative_to = os.path.abspath(relative_to)
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
# Determine how much of the prefix the two paths share.
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
# Put enough ".." components to back up out of relative_to to the common
# prefix, and then append the part of path_split after the common prefix.
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
path_split[prefix_len:]
if len(relative_split) == 0:
# The paths were the same.
return ''
# Turn it back into a string and we're done.
return os.path.join(*relative_split)
def FixIfRelativePath(path, relative_to):
# Like RelativePath but returns |path| unchanged if it is absolute.
if os.path.isabs(path):
return path
return RelativePath(path, relative_to)
def UnrelativePath(path, relative_to):
# Assuming that |relative_to| is relative to the current directory, and |path|
# is a path relative to the dirname of |relative_to|, returns a path that
# identifies |path| relative to the current directory.
rel_dir = os.path.dirname(relative_to)
return os.path.normpath(os.path.join(rel_dir, path))
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
# and the documentation for various shells.
# _quote is a pattern that should match any argument that needs to be quoted
# with double-quotes by EncodePOSIXShellArgument. It matches the following
# characters appearing anywhere in an argument:
# \t, \n, space parameter separators
# # comments
# $ expansions (quoted to always expand within one argument)
# % called out by IEEE 1003.1 XCU.2.2
# & job control
# ' quoting
# (, ) subshell execution
# *, ?, [ pathname expansion
# ; command delimiter
# <, >, | redirection
# = assignment
# {, } brace expansion (bash)
# ~ tilde expansion
# It also matches the empty string, because "" (or '') is the only way to
# represent an empty string literal argument to a POSIX shell.
#
# This does not match the characters in _escape, because those need to be
# backslash-escaped regardless of whether they appear in a double-quoted
# string.
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
# _escape is a pattern that should match any character that needs to be
# escaped with a backslash, whether or not the argument matched the _quote
# pattern. _escape is used with re.sub to backslash anything in _escape's
# first match group, hence the (parentheses) in the regular expression.
#
# _escape matches the following characters appearing anywhere in an argument:
# " to prevent POSIX shells from interpreting this character for quoting
# \ to prevent POSIX shells from interpreting this character for escaping
# ` to prevent POSIX shells from interpreting this character for command
# substitution
# Missing from this list is $, because the desired behavior of
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
#
# Also missing from this list is !, which bash will interpret as the history
# expansion character when history is enabled. bash does not enable history
# by default in non-interactive shells, so this is not thought to be a problem.
# ! was omitted from this list because bash interprets "\!" as a literal string
# including the backslash character (avoiding history expansion but retaining
# the backslash), which would not be correct for argument encoding. Handling
# this case properly would also be problematic because bash allows the history
# character to be changed with the histchars shell variable. Fortunately,
# as history is not enabled in non-interactive shells and
# EncodePOSIXShellArgument is only expected to encode for non-interactive
# shells, there is no room for error here by ignoring !.
_escape = re.compile(r'(["\\`])')
def EncodePOSIXShellArgument(argument):
"""Encodes |argument| suitably for consumption by POSIX shells.
argument may be quoted and escaped as necessary to ensure that POSIX shells
treat the returned value as a literal representing the argument passed to
this function. Parameter (variable) expansions beginning with $ are allowed
to remain intact without escaping the $, to allow the argument to contain
references to variables to be expanded by the shell.
"""
if not isinstance(argument, str):
argument = str(argument)
if _quote.search(argument):
quote = '"'
else:
quote = ''
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
return encoded
def EncodePOSIXShellList(list):
"""Encodes |list| suitably for consumption by POSIX shells.
Returns EncodePOSIXShellArgument for each item in list, and joins them
together using the space character as an argument separator.
"""
encoded_arguments = []
for argument in list:
encoded_arguments.append(EncodePOSIXShellArgument(argument))
return ' '.join(encoded_arguments)
def DeepDependencyTargets(target_dicts, roots):
"""Returns the recursive list of target dependencies.
"""
dependencies = set()
for r in roots:
spec = target_dicts[r]
r_deps = list(set((spec.get('dependencies', []) +
spec.get('dependencies_original', []))))
for d in r_deps:
if d not in roots:
dependencies.add(d)
for d in DeepDependencyTargets(target_dicts, r_deps):
if d not in roots:
dependencies.add(d)
return list(dependencies)
def BuildFileTargets(target_list, build_file):
"""From a target_list, returns the subset from the specified build_file.
"""
return [p for p in target_list if BuildFile(p) == build_file]
def AllTargets(target_list, target_dicts, build_file):
"""Returns all targets (direct and dependencies) for the specified build_file.
"""
bftargets = BuildFileTargets(target_list, build_file)
deptargets = DeepDependencyTargets(target_dicts, bftargets)
return bftargets + deptargets
def WriteOnDiff(filename):
"""Write to a file only if the new contents differ.
Arguments:
filename: name of the file to potentially write to.
Returns:
A file like object which will write to temporary file and only overwrite
the target if it differs (on close).
"""
class Writer:
"""Wrapper around file which only covers the target if it differs."""
def __init__(self):
# Pick temporary file.
tmp_fd, self.tmp_path = tempfile.mkstemp(
suffix='.tmp',
prefix=os.path.split(filename)[1] + '.gyp.',
dir=os.path.split(filename)[0])
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
def __getattr__(self, attrname):
# Delegate everything else to self.tmp_file
return getattr(self.tmp_file, attrname)
def close(self):
try:
# Close tmp file.
self.tmp_file.close()
# Determine if different.
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(self.tmp_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(self.tmp_path, 0666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
# is no way to make the switch atomic.
os.remove(filename)
os.rename(self.tmp_path, filename)
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
raise
return Writer()
# From Alex Martelli,
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
# ASPN: Python Cookbook: Remove duplicates from a sequence
# First comment, dated 2001/10/13.
# (Also in the printed Python Cookbook.)
def uniquer(seq, idfun=None):
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
|
bratatidas9/Impala-1
|
refs/heads/cdh5-trunk
|
tests/query_test/test_delimited_text.py
|
13
|
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Targeted Impala tests for different tuple delimiters, field delimiters,
# and escape characters.
#
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
from tests.common.test_dimensions import create_exec_option_dimension
from tests.common.test_dimensions import create_uncompressed_text_dimension
class TestDelimitedText(ImpalaTestSuite):
"""
Tests delimited text files with different tuple delimiters, field delimiters
and escape characters.
"""
TEST_DB_NAME = "delim_text_test_db"
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestDelimitedText, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(create_single_exec_option_dimension())
# Only run on delimited text with no compression.
cls.TestMatrix.add_dimension(create_uncompressed_text_dimension(cls.get_workload()))
def setup_method(self, method):
# cleanup and create a fresh test database
self.__cleanup()
self.execute_query("create database %s" % self.TEST_DB_NAME)
def teardown_method(self, method):
self.__cleanup()
def __cleanup(self):
self.client.execute('use default')
self.client.execute('drop table if exists %s.cbn' % self.TEST_DB_NAME)
self.client.execute('drop table if exists %s.dhp' % self.TEST_DB_NAME)
self.client.execute('drop table if exists %s.tecn' % self.TEST_DB_NAME)
self.client.execute('drop database if exists %s' % self.TEST_DB_NAME)
def test_delimited_text(self, vector):
self.run_test_case('QueryTest/delimited-text', vector)
@pytest.mark.execute_serially
def test_delimited_text_latin_chars(self, vector):
"""Verifies Impala is able to properly handle delimited text that contains
extended ASCII/latin characters. Marked as running serial because of shared
cleanup/setup"""
self.run_test_case('QueryTest/delimited-latin-text', vector, encoding="latin-1")
|
gabriel-laet/graphql-py
|
refs/heads/master
|
graphql/core/execution/middlewares/sync.py
|
2
|
from graphql.core.defer import Deferred
from graphql.core.error import GraphQLError
class SynchronousExecutionMiddleware(object):
def run_resolve_fn(self, resolver, original_resolver):
result = resolver()
if isinstance(result, Deferred):
raise GraphQLError('You cannot return a Deferred from a resolver when using SynchronousExecutionMiddleware')
return result
def execution_result(self, executor):
result = executor()
return result.result
|
yamada-h/ryu
|
refs/heads/master
|
ryu/tests/unit/cmd/dummy_openflow_app.py
|
56
|
# Copyright (C) 2013,2014 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013,2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ryu.base import app_manager
from ryu.ofproto import ofproto_v1_3
class DummyOpenFlowApp(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
|
gardner/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/moevideo.py
|
112
|
# coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
int_or_none,
)
class MoeVideoIE(InfoExtractor):
IE_DESC = 'LetitBit video services: moevideo.net, playreplay.net and videochart.net'
_VALID_URL = r'''(?x)
https?://(?P<host>(?:www\.)?
(?:(?:moevideo|playreplay|videochart)\.net))/
(?:video|framevideo)/(?P<id>[0-9]+\.[0-9A-Za-z]+)'''
_API_URL = 'http://api.letitbit.net/'
_API_KEY = 'tVL0gjqo5'
_TESTS = [
{
'url': 'http://moevideo.net/video/00297.0036103fe3d513ef27915216fd29',
'md5': '129f5ae1f6585d0e9bb4f38e774ffb3a',
'info_dict': {
'id': '00297.0036103fe3d513ef27915216fd29',
'ext': 'flv',
'title': 'Sink cut out machine',
'description': 'md5:f29ff97b663aefa760bf7ca63c8ca8a8',
'thumbnail': 're:^https?://.*\.jpg$',
'width': 540,
'height': 360,
'duration': 179,
'filesize': 17822500,
}
},
{
'url': 'http://playreplay.net/video/77107.7f325710a627383d40540d8e991a',
'md5': '74f0a014d5b661f0f0e2361300d1620e',
'info_dict': {
'id': '77107.7f325710a627383d40540d8e991a',
'ext': 'flv',
'title': 'Operacion Condor.',
'description': 'md5:7e68cb2fcda66833d5081c542491a9a3',
'thumbnail': 're:^https?://.*\.jpg$',
'width': 480,
'height': 296,
'duration': 6027,
'filesize': 588257923,
},
'skip': 'Video has been removed',
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(
'http://%s/video/%s' % (mobj.group('host'), video_id),
video_id, 'Downloading webpage')
title = self._og_search_title(webpage)
thumbnail = self._og_search_thumbnail(webpage)
description = self._og_search_description(webpage)
r = [
self._API_KEY,
[
'preview/flv_link',
{
'uid': video_id,
},
],
]
r_json = json.dumps(r)
post = compat_urllib_parse.urlencode({'r': r_json})
req = compat_urllib_request.Request(self._API_URL, post)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
response = self._download_json(req, video_id)
if response['status'] != 'OK':
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, response['data']),
expected=True
)
item = response['data'][0]
video_url = item['link']
duration = int_or_none(item['length'])
width = int_or_none(item['width'])
height = int_or_none(item['height'])
filesize = int_or_none(item['convert_size'])
formats = [{
'format_id': 'sd',
'http_headers': {'Range': 'bytes=0-'}, # Required to download
'url': video_url,
'width': width,
'height': height,
'filesize': filesize,
}]
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'description': description,
'duration': duration,
'formats': formats,
}
|
amjith/python-prompt-toolkit
|
refs/heads/master
|
examples/auto-suggestion.py
|
3
|
#!/usr/bin/env python
"""
Simple example of a CLI that demonstrates fish-style auto suggestion.
When you type some input, it will match the input against the history. If One
entry of the history starts with the given input, then it will show the
remaining part as a suggestion. Pressing the right arrow will insert this
suggestion.
"""
from __future__ import unicode_literals, print_function
from prompt_toolkit import prompt
from prompt_toolkit.history import InMemoryHistory
from prompt_toolkit.interface import AbortAction
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
def main():
# Create some history first. (Easy for testing.)
history = InMemoryHistory()
history.append('import os')
history.append('print("hello")')
history.append('print("world")')
history.append('import path')
# Print help.
print('This CLI has fish-style auto-suggestion enable.')
print('Type for instance "pri", then you\'ll see a suggestion.')
print('Press the right arrow to insert the suggestion.')
print('Press Control-C to retry. Control-D to exit.')
print()
text = prompt('Say something: ', history=history,
auto_suggest=AutoSuggestFromHistory(),
enable_history_search=True,
on_abort=AbortAction.RETRY)
print('You said: %s' % text)
if __name__ == '__main__':
main()
|
krintoxi/NoobSec-Toolkit
|
refs/heads/master
|
NoobSecToolkit /tools/sqli/waf/cloudflare.py
|
10
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.enums import HTTP_HEADER
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "CloudFlare Web Application Firewall (CloudFlare)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, headers, code = get_page(get=vector)
retval = re.search(r"cloudflare-nginx", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
retval |= re.search(r"\A__cfduid=", headers.get(HTTP_HEADER.SET_COOKIE, ""), re.I) is not None
if retval:
break
return retval
|
brianrock/brianrock-ringo
|
refs/heads/master
|
third_party/simplejson/tests/test_encode_for_html.py
|
132
|
import unittest
import simplejson.decoder
import simplejson.encoder
class TestEncodeForHTML(unittest.TestCase):
def setUp(self):
self.decoder = simplejson.decoder.JSONDecoder()
self.encoder = simplejson.encoder.JSONEncoderForHTML()
def test_basic_encode(self):
self.assertEqual(r'"\u0026"', self.encoder.encode('&'))
self.assertEqual(r'"\u003c"', self.encoder.encode('<'))
self.assertEqual(r'"\u003e"', self.encoder.encode('>'))
def test_basic_roundtrip(self):
for char in '&<>':
self.assertEqual(
char, self.decoder.decode(
self.encoder.encode(char)))
def test_prevent_script_breakout(self):
bad_string = '</script><script>alert("gotcha")</script>'
self.assertEqual(
r'"\u003c/script\u003e\u003cscript\u003e'
r'alert(\"gotcha\")\u003c/script\u003e"',
self.encoder.encode(bad_string))
self.assertEqual(
bad_string, self.decoder.decode(
self.encoder.encode(bad_string)))
|
sigmavirus24/rpc-openstack
|
refs/heads/master
|
maas/plugins/horizon_check.py
|
1
|
#!/usr/bin/env python
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import re
import ipaddr
from lxml import html
from maas_common import get_auth_details
from maas_common import metric
from maas_common import metric_bool
from maas_common import print_output
from maas_common import status_err
from maas_common import status_ok
import requests
from requests import exceptions as exc
def check(args):
# disable warning for insecure cert on horizon
if requests.__build__ >= 0x020400:
requests.packages.urllib3.disable_warnings()
splash_status_code = 0
splash_milliseconds = 0.0
login_status_code = 0
login_milliseconds = 0.0
is_up = True
auth_details = get_auth_details()
OS_USERNAME = auth_details['OS_USERNAME']
OS_PASSWORD = auth_details['OS_PASSWORD']
OS_USER_DOMAIN_NAME = auth_details['OS_USER_DOMAIN_NAME']
HORIZON_URL = 'https://{ip}'.format(ip=args.ip)
HORIZON_PORT = '443'
s = requests.Session()
try:
r = s.get('%s:%s' % (HORIZON_URL, HORIZON_PORT),
verify=False,
timeout=10)
except (exc.ConnectionError,
exc.HTTPError,
exc.Timeout) as e:
is_up = False
else:
if not (r.ok and
re.search(args.site_name_regexp, r.content, re.IGNORECASE)):
status_err('could not load login page')
splash_status_code = r.status_code
splash_milliseconds = r.elapsed.total_seconds() * 1000
parsed_html = html.fromstring(r.content)
csrf_token = parsed_html.xpath(
'//input[@name="csrfmiddlewaretoken"]/@value')[0]
region = parsed_html.xpath(
'//input[@name="region"]/@value')[0]
domain = parsed_html.xpath('//input[@name="domain"]')
s.headers.update(
{'Content-type': 'application/x-www-form-urlencoded',
'Referer': HORIZON_URL})
payload = {'username': OS_USERNAME,
'password': OS_PASSWORD,
'csrfmiddlewaretoken': csrf_token,
'region': region}
if domain:
payload['domain'] = OS_USER_DOMAIN_NAME
try:
l = s.post(
('%s:%s/auth/login/') % (HORIZON_URL, HORIZON_PORT),
data=payload,
verify=False)
except (exc.ConnectionError,
exc.HTTPError,
exc.Timeout) as e:
status_err('While logging in: %s' % e)
if not (l.ok and re.search('overview', l.content, re.IGNORECASE)):
status_err('could not log in')
login_status_code = l.status_code
login_milliseconds = l.elapsed.total_seconds() * 1000
status_ok()
metric_bool('horizon_local_status', is_up)
if is_up:
metric('splash_status_code', 'uint32', splash_status_code, 'http_code')
metric('splash_milliseconds', 'double', splash_milliseconds, 'ms')
metric('login_status_code', 'uint32', login_status_code, 'http_code')
metric('login_milliseconds', 'double', login_milliseconds, 'ms')
def main(args):
check(args)
if __name__ == "__main__":
with print_output():
parser = argparse.ArgumentParser(description='Check horizon dashboard')
parser.add_argument('ip',
type=ipaddr.IPv4Address,
help='horizon dashboard IP address')
parser.add_argument('site_name_regexp',
type=str,
default='openstack dashboard',
help='Horizon Site Name')
args = parser.parse_args()
main(args)
|
edxnercel/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/management/commands/utils.py
|
112
|
"""
Common methods for cms commands to use
"""
from django.contrib.auth.models import User
def user_from_str(identifier):
"""
Return a user identified by the given string. The string could be an email
address, or a stringified integer corresponding to the ID of the user in
the database. If no user could be found, a User.DoesNotExist exception
will be raised.
"""
try:
user_id = int(identifier)
except ValueError:
return User.objects.get(email=identifier)
return User.objects.get(id=user_id)
|
ibinti/intellij-community
|
refs/heads/master
|
python/testData/completion/instanceFromFunctionAssignedToCallAttr.py
|
39
|
class Foo(object):
bar = True
class FooMaker(object):
def foo(self):
return Foo()
__call__ = foo
fm = FooMaker()
f3 = fm()
f3.b<caret>
|
xzYue/odoo
|
refs/heads/8.0
|
addons/l10n_si/__init__.py
|
439
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright: (C) 2012 - Mentis d.o.o., Dravograd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_wizard
|
171121130/SWI
|
refs/heads/master
|
venv/Lib/site-packages/openpyxl/chart/label.py
|
3
|
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Typed,
String,
Integer,
Bool,
Set,
Float,
Sequence,
Alias
)
from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.nested import (
NestedNoneSet,
NestedBool,
NestedString,
NestedInteger,
)
from .shapes import GraphicalProperties
from .text import RichText
class _DataLabelBase(Serialisable):
numFmt = NestedString(allow_none=True, attribute="formatCode")
spPr = Typed(expected_type=GraphicalProperties, allow_none=True)
graphicalProperties = Alias('spPr')
txPr = Typed(expected_type=RichText, allow_none=True)
textProperties = Alias('txPr')
dLblPos = NestedNoneSet(values=['bestFit', 'b', 'ctr', 'inBase', 'inEnd',
'l', 'outEnd', 'r', 't'])
position = Alias('dLblPos')
showLegendKey = NestedBool(allow_none=True)
showVal = NestedBool(allow_none=True)
showCatName = NestedBool(allow_none=True)
showSerName = NestedBool(allow_none=True)
showPercent = NestedBool(allow_none=True)
showBubbleSize = NestedBool(allow_none=True)
showLeaderLines = NestedBool(allow_none=True)
separator = NestedString(allow_none=True)
extLst = Typed(expected_type=ExtensionList, allow_none=True)
__elements__ = ("numFmt", "spPr", "txPr", "dLblPos", "showLegendKey",
"showVal", "showCatName", "showSerName", "showPercent", "showBubbleSize",
"showLeaderLines", "separator")
def __init__(self,
numFmt=None,
spPr=None,
txPr=None,
dLblPos=None,
showLegendKey=None,
showVal=None,
showCatName=None,
showSerName=None,
showPercent=None,
showBubbleSize=None,
showLeaderLines=None,
separator=None,
extLst=None,
):
self.numFmt = numFmt
self.spPr = spPr
self.txPr = txPr
self.dLblPos = dLblPos
self.showLegendKey = showLegendKey
self.showVal = showVal
self.showCatName = showCatName
self.showSerName = showSerName
self.showPercent = showPercent
self.showBubbleSize = showBubbleSize
self.showLeaderLines = showLeaderLines
self.separator = separator
class DataLabel(_DataLabelBase):
tagname = "dLbl"
idx = NestedInteger()
numFmt = _DataLabelBase.numFmt
spPr = _DataLabelBase.spPr
txPr = _DataLabelBase.txPr
dLblPos = _DataLabelBase.dLblPos
showLegendKey = _DataLabelBase.showLegendKey
showVal = _DataLabelBase.showVal
showCatName = _DataLabelBase.showCatName
showSerName = _DataLabelBase.showSerName
showPercent = _DataLabelBase.showPercent
showBubbleSize = _DataLabelBase.showBubbleSize
showLeaderLines = _DataLabelBase.showLeaderLines
separator = _DataLabelBase.separator
extLst = _DataLabelBase.extLst
__elements__ = ("idx",) + _DataLabelBase.__elements__
def __init__(self, idx=0, **kw ):
self.idx = idx
super(DataLabel, self).__init__(**kw)
class DataLabelList(_DataLabelBase):
tagname = "dLbls"
dLbl = Sequence(expected_type=DataLabel, allow_none=True)
delete = NestedBool(allow_none=True)
numFmt = _DataLabelBase.numFmt
spPr = _DataLabelBase.spPr
txPr = _DataLabelBase.txPr
dLblPos = _DataLabelBase.dLblPos
showLegendKey = _DataLabelBase.showLegendKey
showVal = _DataLabelBase.showVal
showCatName = _DataLabelBase.showCatName
showSerName = _DataLabelBase.showSerName
showPercent = _DataLabelBase.showPercent
showBubbleSize = _DataLabelBase.showBubbleSize
showLeaderLines = _DataLabelBase.showLeaderLines
separator = _DataLabelBase.separator
extLst = _DataLabelBase.extLst
__elements__ = ("delete", "dLbl",) + _DataLabelBase.__elements__
def __init__(self, dLbl=(), delete=None, **kw):
self.dLbl = dLbl
self.delete = delete
super(DataLabelList, self).__init__(**kw)
|
kittiu/sale-workflow
|
refs/heads/10.0
|
sale_procurement_group_by_line/model/__init__.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2013-2014 Camptocamp SA - Guewen Baconnier
# © 2016 Eficent Business and IT Consulting Services S.L.
# © 2016 Serpent Consulting Services Pvt. Ltd.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import sale
|
mxjl620/scikit-learn
|
refs/heads/master
|
sklearn/manifold/tests/test_mds.py
|
324
|
import numpy as np
from numpy.testing import assert_array_almost_equal
from nose.tools import assert_raises
from sklearn.manifold import mds
def test_smacof():
# test metric smacof using the data of "Modern Multidimensional Scaling",
# Borg & Groenen, p 154
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
Z = np.array([[-.266, -.539],
[.451, .252],
[.016, -.238],
[-.200, .524]])
X, _ = mds.smacof(sim, init=Z, n_components=2, max_iter=1, n_init=1)
X_true = np.array([[-1.415, -2.471],
[1.633, 1.107],
[.249, -.067],
[-.468, 1.431]])
assert_array_almost_equal(X, X_true, decimal=3)
def test_smacof_error():
# Not symmetric similarity matrix:
sim = np.array([[0, 5, 9, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
assert_raises(ValueError, mds.smacof, sim)
# Not squared similarity matrix:
sim = np.array([[0, 5, 9, 4],
[5, 0, 2, 2],
[4, 2, 1, 0]])
assert_raises(ValueError, mds.smacof, sim)
# init not None and not correct format:
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
Z = np.array([[-.266, -.539],
[.016, -.238],
[-.200, .524]])
assert_raises(ValueError, mds.smacof, sim, init=Z, n_init=1)
def test_MDS():
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
mds_clf = mds.MDS(metric=False, n_jobs=3, dissimilarity="precomputed")
mds_clf.fit(sim)
|
labcodes/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_yesno.py
|
430
|
from django.template.defaultfilters import yesno
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_true(self):
self.assertEqual(yesno(True), 'yes')
def test_false(self):
self.assertEqual(yesno(False), 'no')
def test_none(self):
self.assertEqual(yesno(None), 'maybe')
def test_true_arguments(self):
self.assertEqual(yesno(True, 'certainly,get out of town,perhaps'), 'certainly')
def test_false_arguments(self):
self.assertEqual(yesno(False, 'certainly,get out of town,perhaps'), 'get out of town')
def test_none_two_arguments(self):
self.assertEqual(yesno(None, 'certainly,get out of town'), 'get out of town')
def test_none_three_arguments(self):
self.assertEqual(yesno(None, 'certainly,get out of town,perhaps'), 'perhaps')
|
lancezlin/pyjs
|
refs/heads/master
|
pyjswidgets/pyjamas/Canvas/ImageLoaderhulahop.py
|
7
|
"""
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
from __pyjamas__ import JS
from pyjamas import DOM
"""*
* Static internal collection of ImageLoader instances.
* ImageLoader is not instantiable externally.
"""
imageLoaders = []
"""*
* Provides a mechanism for deferred execution of a callback
* method once all specified Images are loaded.
"""
class ImageLoader:
def __init__(self):
self.images = []
self.callBack = None
self.loadedImages = 0
self.totalImages = 0
"""*
* Stores the ImageElement reference so that when all the images report
* an onload, we can return the array of all the ImageElements.
* @param img
"""
def addHandle(self, img):
self.totalImages += 1
self.images.append(img)
"""*
* Invokes the onImagesLoaded method in the CallBack if all the
* images are loaded AND we have a CallBack specified.
*
* Called from the JSNI onload event handler.
"""
def dispatchIfComplete(self):
if self.callBack is not None and self.isAllLoaded():
self.callBack.onImagesLoaded(self.images)
# remove the image loader
imageLoaders.remove(self)
"""*
* Sets the callback object for the ImageLoader.
* Once this is set, we may invoke the callback once all images that
* need to be loaded report in from their onload event handlers.
*
* @param cb
"""
def finalize(self, cb):
self.callBack = cb
def incrementLoadedImages(self):
self.loadedImages += 1
def isAllLoaded(self):
return (self.loadedImages == self.totalImages)
def _onload(self, form, event, something):
if not self.__formAction:
return
self._listener.onFrameLoad()
def _onsubmit(self, form, event, something):
print form, event, something
try:
event = get_main_frame().gobject_wrap(event) # webkit HACK!
form = get_main_frame().gobject_wrap(form) # webkit HACK!
except:
pass
if self.iframe:
self.__formAction = form.action
return self._listener.onFormSubmit()
# FormPanelImpl.hookEvents
def hookEvents(self, iframe, form, listener):
# TODO: might have to fix this, use DOM.set_listener()
self._listener = listener
if iframe:
wf = mf = get_main_frame()
self._onload_listener = mf.addEventListener(iframe, "load",
self._onload)
self._onsubmit_listener = mf.addEventListener(form, "onsubmit",
self._onsubmit)
"""*
* Returns a handle to an img object. Ties back to the ImageLoader instance
"""
def prepareImage(self, url):
img = Image()
JS("""
// if( callback specified )
// do nothing
var __this = this;
@{{img}}['onload'] = function() {
if(!@{{img}}['__isLoaded']) {
// __isLoaded should be set for the first time here.
// if for some reason img fires a second onload event
// we do not want to execute the following again (hence the guard)
@{{img}}['__isLoaded'] = true;
__this['incrementLoadedImages']();
@{{img}}['onload'] = null;
// we call this function each time onload fires
// It will see if we are ready to invoke the callback
__this['dispatchIfComplete']();
} else {
// we invoke the callback since we are already loaded
__this['dispatchIfComplete']();
}
}
return @{{img}};
""")
def init():
global imageLoadArray
imageLoadArray = {}
|
abdullah2891/remo
|
refs/heads/master
|
vendor-local/lib/python/unidecode/x095.py
|
252
|
data = (
'Xiao ', # 0x00
'Suo ', # 0x01
'Li ', # 0x02
'Zheng ', # 0x03
'Chu ', # 0x04
'Guo ', # 0x05
'Gao ', # 0x06
'Tie ', # 0x07
'Xiu ', # 0x08
'Cuo ', # 0x09
'Lue ', # 0x0a
'Feng ', # 0x0b
'Xin ', # 0x0c
'Liu ', # 0x0d
'Kai ', # 0x0e
'Jian ', # 0x0f
'Rui ', # 0x10
'Ti ', # 0x11
'Lang ', # 0x12
'Qian ', # 0x13
'Ju ', # 0x14
'A ', # 0x15
'Qiang ', # 0x16
'Duo ', # 0x17
'Tian ', # 0x18
'Cuo ', # 0x19
'Mao ', # 0x1a
'Ben ', # 0x1b
'Qi ', # 0x1c
'De ', # 0x1d
'Kua ', # 0x1e
'Kun ', # 0x1f
'Chang ', # 0x20
'Xi ', # 0x21
'Gu ', # 0x22
'Luo ', # 0x23
'Chui ', # 0x24
'Zhui ', # 0x25
'Jin ', # 0x26
'Zhi ', # 0x27
'Xian ', # 0x28
'Juan ', # 0x29
'Huo ', # 0x2a
'Pou ', # 0x2b
'Tan ', # 0x2c
'Ding ', # 0x2d
'Jian ', # 0x2e
'Ju ', # 0x2f
'Meng ', # 0x30
'Zi ', # 0x31
'Qie ', # 0x32
'Ying ', # 0x33
'Kai ', # 0x34
'Qiang ', # 0x35
'Song ', # 0x36
'E ', # 0x37
'Cha ', # 0x38
'Qiao ', # 0x39
'Zhong ', # 0x3a
'Duan ', # 0x3b
'Sou ', # 0x3c
'Huang ', # 0x3d
'Huan ', # 0x3e
'Ai ', # 0x3f
'Du ', # 0x40
'Mei ', # 0x41
'Lou ', # 0x42
'Zi ', # 0x43
'Fei ', # 0x44
'Mei ', # 0x45
'Mo ', # 0x46
'Zhen ', # 0x47
'Bo ', # 0x48
'Ge ', # 0x49
'Nie ', # 0x4a
'Tang ', # 0x4b
'Juan ', # 0x4c
'Nie ', # 0x4d
'Na ', # 0x4e
'Liu ', # 0x4f
'Hao ', # 0x50
'Bang ', # 0x51
'Yi ', # 0x52
'Jia ', # 0x53
'Bin ', # 0x54
'Rong ', # 0x55
'Biao ', # 0x56
'Tang ', # 0x57
'Man ', # 0x58
'Luo ', # 0x59
'Beng ', # 0x5a
'Yong ', # 0x5b
'Jing ', # 0x5c
'Di ', # 0x5d
'Zu ', # 0x5e
'Xuan ', # 0x5f
'Liu ', # 0x60
'Tan ', # 0x61
'Jue ', # 0x62
'Liao ', # 0x63
'Pu ', # 0x64
'Lu ', # 0x65
'Dui ', # 0x66
'Lan ', # 0x67
'Pu ', # 0x68
'Cuan ', # 0x69
'Qiang ', # 0x6a
'Deng ', # 0x6b
'Huo ', # 0x6c
'Lei ', # 0x6d
'Huan ', # 0x6e
'Zhuo ', # 0x6f
'Lian ', # 0x70
'Yi ', # 0x71
'Cha ', # 0x72
'Biao ', # 0x73
'La ', # 0x74
'Chan ', # 0x75
'Xiang ', # 0x76
'Chang ', # 0x77
'Chang ', # 0x78
'Jiu ', # 0x79
'Ao ', # 0x7a
'Die ', # 0x7b
'Qu ', # 0x7c
'Liao ', # 0x7d
'Mi ', # 0x7e
'Chang ', # 0x7f
'Men ', # 0x80
'Ma ', # 0x81
'Shuan ', # 0x82
'Shan ', # 0x83
'Huo ', # 0x84
'Men ', # 0x85
'Yan ', # 0x86
'Bi ', # 0x87
'Han ', # 0x88
'Bi ', # 0x89
'San ', # 0x8a
'Kai ', # 0x8b
'Kang ', # 0x8c
'Beng ', # 0x8d
'Hong ', # 0x8e
'Run ', # 0x8f
'San ', # 0x90
'Xian ', # 0x91
'Xian ', # 0x92
'Jian ', # 0x93
'Min ', # 0x94
'Xia ', # 0x95
'Yuru ', # 0x96
'Dou ', # 0x97
'Zha ', # 0x98
'Nao ', # 0x99
'Jian ', # 0x9a
'Peng ', # 0x9b
'Xia ', # 0x9c
'Ling ', # 0x9d
'Bian ', # 0x9e
'Bi ', # 0x9f
'Run ', # 0xa0
'He ', # 0xa1
'Guan ', # 0xa2
'Ge ', # 0xa3
'Ge ', # 0xa4
'Fa ', # 0xa5
'Chu ', # 0xa6
'Hong ', # 0xa7
'Gui ', # 0xa8
'Min ', # 0xa9
'Se ', # 0xaa
'Kun ', # 0xab
'Lang ', # 0xac
'Lu ', # 0xad
'Ting ', # 0xae
'Sha ', # 0xaf
'Ju ', # 0xb0
'Yue ', # 0xb1
'Yue ', # 0xb2
'Chan ', # 0xb3
'Qu ', # 0xb4
'Lin ', # 0xb5
'Chang ', # 0xb6
'Shai ', # 0xb7
'Kun ', # 0xb8
'Yan ', # 0xb9
'Min ', # 0xba
'Yan ', # 0xbb
'E ', # 0xbc
'Hun ', # 0xbd
'Yu ', # 0xbe
'Wen ', # 0xbf
'Xiang ', # 0xc0
'Bao ', # 0xc1
'Xiang ', # 0xc2
'Qu ', # 0xc3
'Yao ', # 0xc4
'Wen ', # 0xc5
'Ban ', # 0xc6
'An ', # 0xc7
'Wei ', # 0xc8
'Yin ', # 0xc9
'Kuo ', # 0xca
'Que ', # 0xcb
'Lan ', # 0xcc
'Du ', # 0xcd
'[?] ', # 0xce
'Phwung ', # 0xcf
'Tian ', # 0xd0
'Nie ', # 0xd1
'Ta ', # 0xd2
'Kai ', # 0xd3
'He ', # 0xd4
'Que ', # 0xd5
'Chuang ', # 0xd6
'Guan ', # 0xd7
'Dou ', # 0xd8
'Qi ', # 0xd9
'Kui ', # 0xda
'Tang ', # 0xdb
'Guan ', # 0xdc
'Piao ', # 0xdd
'Kan ', # 0xde
'Xi ', # 0xdf
'Hui ', # 0xe0
'Chan ', # 0xe1
'Pi ', # 0xe2
'Dang ', # 0xe3
'Huan ', # 0xe4
'Ta ', # 0xe5
'Wen ', # 0xe6
'[?] ', # 0xe7
'Men ', # 0xe8
'Shuan ', # 0xe9
'Shan ', # 0xea
'Yan ', # 0xeb
'Han ', # 0xec
'Bi ', # 0xed
'Wen ', # 0xee
'Chuang ', # 0xef
'Run ', # 0xf0
'Wei ', # 0xf1
'Xian ', # 0xf2
'Hong ', # 0xf3
'Jian ', # 0xf4
'Min ', # 0xf5
'Kang ', # 0xf6
'Men ', # 0xf7
'Zha ', # 0xf8
'Nao ', # 0xf9
'Gui ', # 0xfa
'Wen ', # 0xfb
'Ta ', # 0xfc
'Min ', # 0xfd
'Lu ', # 0xfe
'Kai ', # 0xff
)
|
alexconlin/three.js
|
refs/heads/master
|
utils/exporters/blender/addons/io_three/exporter/material.py
|
70
|
from .. import constants, logger
from . import base_classes, utilities, api
class Material(base_classes.BaseNode):
"""Class that wraps material nodes"""
def __init__(self, node, parent):
logger.debug("Material().__init__(%s)", node)
base_classes.BaseNode.__init__(self, node, parent,
constants.MATERIAL)
self._common_attributes()
if self[constants.TYPE] == constants.THREE_PHONG:
self._phong_attributes()
textures = self.parent.options.get(constants.MAPS)
if textures:
self._update_maps()
def _common_attributes(self):
"""Parse the common material attributes"""
logger.debug('Material()._common_attributes()')
dispatch = {
constants.PHONG: constants.THREE_PHONG,
constants.LAMBERT: constants.THREE_LAMBERT,
constants.BASIC: constants.THREE_BASIC
}
shader_type = api.material.type(self.node)
self[constants.TYPE] = dispatch[shader_type]
diffuse = api.material.diffuse_color(self.node)
self[constants.COLOR] = utilities.rgb2int(diffuse)
if self[constants.TYPE] != constants.THREE_BASIC:
ambient = api.material.ambient_color(self.node)
self[constants.AMBIENT] = utilities.rgb2int(ambient)
emissive = api.material.emissive_color(self.node)
self[constants.EMISSIVE] = utilities.rgb2int(emissive)
vertex_color = api.material.use_vertex_colors(self.node)
self[constants.VERTEX_COLORS] = vertex_color
self[constants.BLENDING] = api.material.blending(self.node)
if api.material.transparent(self.node):
self[constants.TRANSPARENT] = True
if api.material.double_sided(self.node):
self[constants.SIDE] = constants.SIDE_DOUBLE
self[constants.DEPTH_TEST] = api.material.depth_test(self.node)
self[constants.DEPTH_WRITE] = api.material.depth_write(self.node)
def _phong_attributes(self):
"""Parse phong specific attributes"""
logger.debug("Material()._phong_attributes()")
specular = api.material.specular_color(self.node)
self[constants.SPECULAR] = utilities.rgb2int(specular)
self[constants.SHININESS] = api.material.specular_coef(self.node)
def _update_maps(self):
"""Parses maps/textures and updates the textures array
with any new nodes found.
"""
logger.debug("Material()._update_maps()")
mapping = (
(api.material.diffuse_map, constants.MAP),
(api.material.specular_map, constants.SPECULAR_MAP),
(api.material.light_map, constants.LIGHT_MAP)
)
for func, key in mapping:
map_node = func(self.node)
if map_node:
logger.info('Found map node %s for %s', map_node, key)
tex_inst = self.scene.texture(map_node.name)
self[key] = tex_inst[constants.UUID]
if self[constants.TYPE] == constants.THREE_PHONG:
mapping = (
(api.material.bump_map, constants.BUMP_MAP,
constants.BUMP_SCALE, api.material.bump_scale),
(api.material.normal_map, constants.NORMAL_MAP,
constants.NORMAL_SCALE, api.material.normal_scale)
)
for func, map_key, scale_key, scale_func in mapping:
map_node = func(self.node)
if not map_node:
continue
logger.info("Found map node %s for %s", map_node, map_key)
tex_inst = self.scene.texture(map_node.name)
self[map_key] = tex_inst[constants.UUID]
self[scale_key] = scale_func(self.node)
|
wrouesnel/ansible-modules-core
|
refs/heads/devel
|
cloud/rackspace/rax_clb_nodes.py
|
157
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_clb_nodes
short_description: add, modify and remove nodes from a Rackspace Cloud Load Balancer
description:
- Adds, modifies and removes nodes from a Rackspace Cloud Load Balancer
version_added: "1.4"
options:
address:
required: false
description:
- IP address or domain name of the node
condition:
required: false
choices:
- enabled
- disabled
- draining
description:
- Condition for the node, which determines its role within the load
balancer
load_balancer_id:
required: true
type: integer
description:
- Load balancer id
node_id:
required: false
type: integer
description:
- Node id
port:
required: false
type: integer
description:
- Port number of the load balanced service on the node
state:
required: false
default: "present"
choices:
- present
- absent
description:
- Indicate desired state of the node
type:
required: false
choices:
- primary
- secondary
description:
- Type of node
wait:
required: false
default: "no"
choices:
- "yes"
- "no"
description:
- Wait for the load balancer to become active before returning
wait_timeout:
required: false
type: integer
default: 30
description:
- How long to wait before giving up and returning an error
weight:
required: false
description:
- Weight of node
author: "Lukasz Kawczynski (@neuroid)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
# Add a new node to the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
address: 10.2.2.3
port: 80
condition: enabled
type: primary
wait: yes
credentials: /path/to/credentials
# Drain connections from a node
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
condition: draining
wait: yes
credentials: /path/to/credentials
# Remove a node from the load balancer
- local_action:
module: rax_clb_nodes
load_balancer_id: 71
node_id: 410
state: absent
wait: yes
credentials: /path/to/credentials
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def _activate_virtualenv(path):
path = os.path.expanduser(path)
activate_this = os.path.join(path, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
def _get_node(lb, node_id=None, address=None, port=None):
"""Return a matching node"""
for node in getattr(lb, 'nodes', []):
match_list = []
if node_id is not None:
match_list.append(getattr(node, 'id', None) == node_id)
if address is not None:
match_list.append(getattr(node, 'address', None) == address)
if port is not None:
match_list.append(getattr(node, 'port', None) == port)
if match_list and all(match_list):
return node
return None
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
address=dict(),
condition=dict(choices=['enabled', 'disabled', 'draining']),
load_balancer_id=dict(required=True, type='int'),
node_id=dict(type='int'),
port=dict(type='int'),
state=dict(default='present', choices=['present', 'absent']),
type=dict(choices=['primary', 'secondary']),
virtualenv=dict(),
wait=dict(default=False, type='bool'),
wait_timeout=dict(default=30, type='int'),
weight=dict(type='int'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
address = module.params['address']
condition = (module.params['condition'] and
module.params['condition'].upper())
load_balancer_id = module.params['load_balancer_id']
node_id = module.params['node_id']
port = module.params['port']
state = module.params['state']
typ = module.params['type'] and module.params['type'].upper()
virtualenv = module.params['virtualenv']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout'] or 1
weight = module.params['weight']
if virtualenv:
try:
_activate_virtualenv(virtualenv)
except IOError, e:
module.fail_json(msg='Failed to activate virtualenv %s (%s)' % (
virtualenv, e))
setup_rax_module(module, pyrax)
if not pyrax.cloud_loadbalancers:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
try:
lb = pyrax.cloud_loadbalancers.get(load_balancer_id)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
node = _get_node(lb, node_id, address, port)
result = rax_clb_node_to_dict(node)
if state == 'absent':
if not node: # Removing a non-existent node
module.exit_json(changed=False, state=state)
try:
lb.delete_node(node)
result = {}
except pyrax.exc.NotFound:
module.exit_json(changed=False, state=state)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
else: # present
if not node:
if node_id: # Updating a non-existent node
msg = 'Node %d not found' % node_id
if lb.nodes:
msg += (' (available nodes: %s)' %
', '.join([str(x.id) for x in lb.nodes]))
module.fail_json(msg=msg)
else: # Creating a new node
try:
node = pyrax.cloudloadbalancers.Node(
address=address, port=port, condition=condition,
weight=weight, type=typ)
resp, body = lb.add_nodes([node])
result.update(body['nodes'][0])
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
else: # Updating an existing node
mutable = {
'condition': condition,
'type': typ,
'weight': weight,
}
for name, value in mutable.items():
if value is None or value == getattr(node, name):
mutable.pop(name)
if not mutable:
module.exit_json(changed=False, state=state, node=result)
try:
# The diff has to be set explicitly to update node's weight and
# type; this should probably be fixed in pyrax
lb.update_node(node, diff=mutable)
result.update(mutable)
except pyrax.exc.PyraxException, e:
module.fail_json(msg='%s' % e.message)
if wait:
pyrax.utils.wait_until(lb, "status", "ACTIVE", interval=1,
attempts=wait_timeout)
if lb.status != 'ACTIVE':
module.fail_json(
msg='Load balancer not active after %ds (current status: %s)' %
(wait_timeout, lb.status.lower()))
kwargs = {'node': result} if result else {}
module.exit_json(changed=True, state=state, **kwargs)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
|
NOAA-PMEL/PyFerret
|
refs/heads/master
|
pyfermod/regrid/regrid2dtests.py
|
1
|
'''
Unit tests for CurvRectRegridder
@author: Karl Smith
'''
from __future__ import print_function
import unittest
import numpy
import ESMP
from esmpcontrol import ESMPControl
from regrid2d import CurvRectRegridder
class CurvRectRegridderTests(unittest.TestCase):
'''
Unit tests for the CurvRectRegridder class
'''
# flag to indicate when to call ESMPControl().stopESMP()
last_test = False
def setUp(self):
'''
Create some repeatedly used test data.
'''
# Use tuples for the arrays to make sure the NumPy
# arrays created in the class methods are always used;
# not arrays that happened to be passed as input.
# Also verifies passed data is not modified.
# Rectilinear coordinates, data. and flags
crn_lons = numpy.linspace(-110, -90, 11)
crn_lats = numpy.linspace(0, 32, 9)
ctr_lons = 0.5 * (crn_lons[:-1] + crn_lons[1:])
ctr_lats = 0.5 * (crn_lats[:-1] + crn_lats[1:])
ctr_lats_mat, ctr_lons_mat = numpy.meshgrid(ctr_lats, ctr_lons)
data = -2.0 * numpy.sin(numpy.deg2rad(ctr_lons_mat)) \
* numpy.cos(numpy.deg2rad(ctr_lats_mat))
ctr_flags = numpy.zeros(data.shape, dtype=numpy.int32)
ctr_flags[:2, :2] = 1
crn_flags = numpy.zeros((crn_lons.shape[0], crn_lats.shape[0]), dtype=numpy.int32)
crn_flags[:2, :2] = 1
# Turn rectilinear arrays into tuples
self.rect_corner_lons = tuple(crn_lons)
self.rect_corner_lats = tuple(crn_lats)
self.rect_center_lons = tuple(ctr_lons)
self.rect_center_lats = tuple(ctr_lats)
self.rect_center_ignr = tuple([tuple(subarr) for subarr in ctr_flags.tolist()])
self.rect_corner_ignr = tuple([tuple(subarr) for subarr in crn_flags.tolist()])
self.rect_data = tuple([tuple(subarr) for subarr in data.tolist()])
# Curvilinear coordindates - one step further out on all sides of the region
crn_lons = numpy.linspace(-112, -88, 13)
crn_lats = numpy.linspace(-4, 36, 11)
crn_lats_mat, crn_lons_mat = numpy.meshgrid(crn_lats, crn_lons)
ctr_lons = 0.5 * (crn_lons[:-1] + crn_lons[1:])
ctr_lats = 0.5 * (crn_lats[:-1] + crn_lats[1:])
ctr_lats_mat, ctr_lons_mat = numpy.meshgrid(ctr_lats, ctr_lons)
# Pull coordinates in some towards the center
crn_lons = crn_lons_mat * numpy.cos(numpy.deg2rad(crn_lats_mat - 16.0) / 2.0)
crn_lats = crn_lats_mat * numpy.cos(numpy.deg2rad(crn_lons_mat + 100.0) / 2.0)
ctr_lons = ctr_lons_mat * numpy.cos(numpy.deg2rad(ctr_lats_mat - 16.0) / 2.0)
ctr_lats = ctr_lats_mat * numpy.cos(numpy.deg2rad(ctr_lons_mat + 100.0) / 2.0)
# Curvilinear data and flags
data = -2.0 * numpy.sin(numpy.deg2rad(ctr_lons)) \
* numpy.cos(numpy.deg2rad(ctr_lats))
ctr_flags = numpy.zeros(data.shape, dtype=numpy.int32)
ctr_flags[:3, :3] = 1
crn_flags = numpy.zeros(crn_lons.shape, dtype=numpy.int32)
crn_flags[:3, :3] = 1
# Turn curvilinear arrays into tuples
self.curv_corner_lons = tuple([tuple(subarr) for subarr in crn_lons.tolist()])
self.curv_corner_lats = tuple([tuple(subarr) for subarr in crn_lats.tolist()])
self.curv_center_lons = tuple([tuple(subarr) for subarr in ctr_lons.tolist()])
self.curv_center_lats = tuple([tuple(subarr) for subarr in ctr_lats.tolist()])
self.curv_center_ignr = tuple([tuple(subarr) for subarr in ctr_flags.tolist()])
self.curv_corner_ignr = tuple([tuple(subarr) for subarr in crn_flags.tolist()])
self.curv_data = tuple([tuple(subarr) for subarr in data.tolist()])
# undef_val must be a numpy array
self.undef_val = numpy.array([1.0E10], dtype=numpy.float64)
if not ESMPControl().startCheckESMP():
self.fail("startCheckESMP did not succeed - test called after last_test set to True")
def test01CurvRectRegridderInit(self):
'''
Test of the CurvRectRegridder.__init__ method.
'''
regridder = CurvRectRegridder()
self.assertTrue(regridder != None, "CurvRectRegridder() returned None")
regridder.finalize()
def test02CreateCurvGrid(self):
'''
Tests the CurvRectRegridder.createCurvGrid method.
Since nothing is returned from this method, just
checks for unexpected/expected Errors being raised.
'''
regridder = CurvRectRegridder()
# Test with all corner and center data
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr, self.curv_corner_lons,
self.curv_corner_lats, self.curv_corner_ignr)
# Test without flags
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
None, self.curv_corner_lons, self.curv_corner_lats)
# Test without corners
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr)
# Test without corners or flags
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats)
# TODO: Test invalid cases
# Done with this regridder
regridder.finalize()
def test03AssignCurvField(self):
'''
Tests the CurvRectRegridder.assignCurvGrid method.
Since nothing is returned from this method, just
checks for unexpected/expected Errors being raised.
'''
regridder = CurvRectRegridder()
# Test with all corner and center data
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr, self.curv_corner_lons,
self.curv_corner_lats, self.curv_corner_ignr)
regridder.assignCurvField()
regridder.assignCurvField(self.curv_data)
# Test without flags
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
None, self.curv_corner_lons, self.curv_corner_lats)
regridder.assignCurvField(self.curv_data)
regridder.assignCurvField()
# Test without corners
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr)
regridder.assignCurvField(self.curv_data)
regridder.assignCurvField()
# Test without corners or flags
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats)
regridder.assignCurvField()
regridder.assignCurvField(self.curv_data)
# TODO: Test invalid cases
# Done with this regridder
regridder.finalize()
def test04CreateRectGrid(self):
'''
Tests the CurvRectRegridder.createRectGrid method.
Since nothing is returned from this method, just
checks for unexpected/expected Errors being raised.
'''
regridder = CurvRectRegridder()
# Test with all corner and center data
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr, self.rect_corner_lons,
self.rect_corner_lats, self.rect_corner_ignr)
# Test without flags
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
None, self.rect_corner_lons, self.rect_corner_lats)
# Test without corners
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr)
# Test without corners or flags
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats)
# TODO: Test invalid cases
# Done with this regridder
regridder.finalize()
def test05AssignRectField(self):
'''
Tests the CurvRectRegridder.assignRectGrid method.
Since nothing is returned from this method, just
checks for unexpected/expected Errors being raised.
'''
regridder = CurvRectRegridder()
# Test with all corner and center data
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr, self.rect_corner_lons,
self.rect_corner_lats, self.rect_corner_ignr)
regridder.assignRectField(self.rect_data)
regridder.assignRectField()
# Test without flags
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
None, self.rect_corner_lons, self.rect_corner_lats)
regridder.assignRectField()
regridder.assignRectField(self.rect_data)
# Test without corners
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr)
regridder.assignRectField()
regridder.assignRectField(self.rect_data)
# Test without corners or flags
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats)
regridder.assignRectField(self.rect_data)
regridder.assignRectField()
# TODO: Test invalid cases
# Done with this regridder
regridder.finalize()
def test06RegridCurvToRectConserve(self):
'''
Tests the CurvRectRegridder.regridCurvToRect method using conservative regridding
'''
regridder = CurvRectRegridder()
# Test with all corner and center data, using conservative regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr, self.curv_corner_lons,
self.curv_corner_lats, self.curv_corner_ignr)
regridder.assignCurvField(self.curv_data)
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr, self.rect_corner_lons,
self.rect_corner_lats, self.rect_corner_ignr)
regridder.assignRectField()
regrid_data = regridder.regridCurvToRect(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_CONSERVE)
expect_data = numpy.array(self.rect_data, dtype=numpy.float64)
undef_flags = numpy.array(self.rect_center_ignr, dtype=numpy.bool)
expect_data[undef_flags] = self.undef_val
mismatch_found = False
# Couple "good" points next to ignored data area are a bit wonky
expect_data[2, 0] = self.undef_val
regrid_data[2, 0] = self.undef_val
expect_data[2, 1] = self.undef_val
regrid_data[2, 1] = self.undef_val
for i in range(expect_data.shape[0]):
for j in range(expect_data.shape[1]):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0007:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %5.1f, " \
"lat = %5.1f" % (expect_data[i, j], regrid_data[i, j],
self.rect_center_lons[i], self.rect_center_lats[j]))
if mismatch_found:
self.fail("data mismatch found")
def test07RegridCurvToRectBilinear(self):
'''
Tests the CurvRectRegridder.regridCurvToRect method using bilinear regridding
'''
regridder = CurvRectRegridder()
# Test with only center data and no flags, using bilinear regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats)
regridder.assignCurvField(self.curv_data)
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats)
regridder.assignRectField()
regrid_data = regridder.regridCurvToRect(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_BILINEAR)
expect_data = numpy.array(self.rect_data, dtype=numpy.float64)
mismatch_found = False
# one point falls outside the curvilinear centerpoints grid?
expect_data[5, 0] = self.undef_val
for i in range(expect_data.shape[0]):
for j in range(expect_data.shape[1]):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0003:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %5.1f, " \
"lat = %5.1f" % (expect_data[i, j], regrid_data[i, j],
self.rect_center_lons[i], self.rect_center_lats[j]))
if mismatch_found:
self.fail("data mismatch found")
def test08RegridCurvToRectPatch(self):
'''
Tests the CurvRectRegridder.regridCurvToRect method using patch regridding
'''
regridder = CurvRectRegridder()
# Test with only center data, and flags only on rectilinear centers,
# using patch regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats)
regridder.assignCurvField(self.curv_data)
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr)
regridder.assignRectField()
regrid_data = regridder.regridCurvToRect(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_PATCH)
expect_data = numpy.array(self.rect_data, dtype=numpy.float64)
undef_flags = numpy.array(self.rect_center_ignr, dtype=numpy.bool)
expect_data[undef_flags] = self.undef_val
# one point falls outside the curvilinear centerpoints grid?
expect_data[5, 0] = self.undef_val
mismatch_found = False
for i in range(expect_data.shape[0]):
for j in range(expect_data.shape[1]):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0011:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %5.1f, " \
"lat = %5.1f" % (expect_data[i, j], regrid_data[i, j],
self.rect_center_lons[i], self.rect_center_lats[j]))
if mismatch_found:
self.fail("data mismatch found")
def test09RegridRectToCurvConserve(self):
'''
Tests the CurvRectRegridder.regridRectToCurv method using conservative regridding
'''
regridder = CurvRectRegridder()
# Test with all corner and center data, using conservative regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr, self.curv_corner_lons,
self.curv_corner_lats, self.curv_corner_ignr)
regridder.assignCurvField()
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats,
self.rect_center_ignr, self.rect_corner_lons,
self.rect_corner_lats, self.rect_corner_ignr)
regridder.assignRectField(self.rect_data)
regrid_data = regridder.regridRectToCurv(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_CONSERVE)
expect_data = numpy.array(self.curv_data, dtype=numpy.float64)
undef_flags = numpy.array(self.curv_center_ignr, dtype=numpy.bool)
expect_data[undef_flags] = self.undef_val
# Couple "good" points next to ignored area are a bit wonky
expect_data[1, 3] = self.undef_val
regrid_data[1, 3] = self.undef_val
expect_data[2, 3] = self.undef_val
regrid_data[2, 3] = self.undef_val
mismatch_found = False
# Ignore outermost edges of curvilinear grid since
# they aren't really well covered by the rectilinear grid
# Also ignore the second east-most edge;
# also not well covered and errors are larger
for i in range(1, expect_data.shape[0] - 2):
for j in range(1, expect_data.shape[1] - 1):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0004:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %7.3f, " \
"lat = %7.3f" % (expect_data[i, j], regrid_data[i, j],
self.curv_center_lons[i][j], self.curv_center_lats[i][j]))
if mismatch_found:
self.fail("data mismatch found")
def test10RegridRectToCurvBilinear(self):
'''
Tests the CurvRectRegridder.regridRectToCurv method using bilinear regridding
'''
regridder = CurvRectRegridder()
# Test with only center data and no flags, using bilinear regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats)
regridder.assignCurvField()
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats)
regridder.assignRectField(self.rect_data)
regrid_data = regridder.regridRectToCurv(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_BILINEAR)
expect_data = numpy.array(self.curv_data, dtype=numpy.float64)
mismatch_found = False
# Ignore outermost edges of curvilinear grid since
# they aren't really well covered by the rectilinear grid
# Also ignore the second east-most edge and second south-most edge;
# also not covered
for i in range(1, expect_data.shape[0] - 2):
for j in range(2, expect_data.shape[1] - 1):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0003:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %7.3f, " \
"lat = %7.3f" % (expect_data[i, j], regrid_data[i, j],
self.curv_center_lons[i][j], self.curv_center_lats[i][j]))
if mismatch_found:
self.fail("data mismatch found")
def test11RegridRectToCurvPatch(self):
'''
Tests the CurvRectRegridder.regridRectToCurv method using patch regridding
'''
# Mark as the last test so ESMPControl().stopESMP will be called
self.last_test = True
regridder = CurvRectRegridder()
# Test with only center data, and flags only on curvilinear centers,
# using patch regridding
regridder.createCurvGrid(self.curv_center_lons, self.curv_center_lats,
self.curv_center_ignr)
regridder.assignCurvField()
regridder.createRectGrid(self.rect_center_lons, self.rect_center_lats)
regridder.assignRectField(self.rect_data)
regrid_data = regridder.regridRectToCurv(self.undef_val,
ESMP.ESMP_REGRIDMETHOD_PATCH)
expect_data = numpy.array(self.curv_data, dtype=numpy.float64)
undef_flags = numpy.array(self.curv_center_ignr, dtype=numpy.bool)
expect_data[undef_flags] = self.undef_val
mismatch_found = False
# Ignore outermost edges of curvilinear grid since
# they aren't really well covered by the rectilinear grid
# Also ignore the second east-most edge and second south-most edge;
# also not covered
for i in range(1, expect_data.shape[0] - 2):
for j in range(2, expect_data.shape[1] - 1):
if numpy.abs(expect_data[i, j] - regrid_data[i, j]) > 0.0011:
mismatch_found = True
print("expect = %#6.4f, found = %#6.4f for lon = %7.3f, " \
"lat = %7.3f" % (expect_data[i, j], regrid_data[i, j],
self.curv_center_lons[i][j], self.curv_center_lats[i][j]))
if mismatch_found:
self.fail("data mismatch found")
def tearDown(self):
'''
Finalize ESMP if it has been initialized and if this is the last test
'''
if self.last_test:
ESMPControl().stopESMP(True)
if __name__ == "__main__":
'''
Run the unit tests in this module.
'''
unittest.main()
|
linglaiyao1314/SFrame
|
refs/heads/master
|
oss_src/unity/python/sframe/meta/asttools/tests/test_sourcegen.py
|
15
|
'''
Created on Aug 3, 2011
@author: sean
'''
from __future__ import print_function
import unittest
import ast
from ...asttools.visitors.pysourcegen import SourceGen
from ...asttools.tests import AllTypesTested
from ...testing import py2only, py3only
tested = AllTypesTested()
def simple_expr(expr):
def test_sourcegen_expr(self):
self.assertSame(expr)
return test_sourcegen_expr
def bin_op(op):
def test_bin_op(self):
self.assertSame('(a %s b)' % (op,))
return test_bin_op
def unary_op(op):
def test_bin_op(self):
self.assertSame('(%sb)' % (op,))
return test_bin_op
def aug_assign(op):
def test_bin_op(self):
self.assertSame('a %s= b' % (op,))
return test_bin_op
class Test(unittest.TestCase):
def assertSame(self, source):
module = ast.parse(source)
tested.update(module)
gen = SourceGen()
gen.visit(module)
generated_source = gen.dumps()
self.assertMultiLineEqual(source, generated_source.strip('\n'))
class TestSimple(Test):
def assertSame(self, source):
module = ast.parse(source)
tested.update(module)
gen = SourceGen()
gen.visit(module)
generated_source = gen.dumps()
self.assertEqual(source, generated_source.strip('\n'))
test_expr = simple_expr('a')
test_del = simple_expr('del a')
test_assign = simple_expr('a = 1')
test_assign_multi = simple_expr('a = b = 1')
test_attr = simple_expr('a.b')
test_assattr = simple_expr('a.b = 1')
test_index = simple_expr('a[b]')
test_index2 = simple_expr('a[b, c]')
test_slice0 = simple_expr('a[:]')
test_slice1 = simple_expr('a[1:]')
test_slice2 = simple_expr('a[1:2]')
test_slice3 = simple_expr('a[1:2:3]')
test_slice4 = simple_expr('a[1::3]')
test_slice5 = simple_expr('a[::3]')
test_slice6 = simple_expr('a[:3]')
test_slice7 = simple_expr('a[...]')
test_raise = simple_expr('raise Foo')
test_raise1 = py2only(simple_expr('raise Foo, bar'))
test_raise2 = py2only(simple_expr('raise Foo, bar, baz'))
test_raise_from = py3only(simple_expr('raise Foo() from bar'))
test_call0 = simple_expr('foo()')
test_call1 = simple_expr('a = foo()')
test_call2 = simple_expr('foo(x)')
test_call3 = simple_expr('foo(x, y)')
test_call4 = simple_expr('foo(x=y)')
test_call5 = simple_expr('foo(z, x=y)')
test_call6 = simple_expr('foo(*z)')
test_call7 = simple_expr('foo(**z)')
test_call8 = simple_expr('foo(a, b=c, *d, **z)')
test_pass = simple_expr('pass')
test_import = simple_expr('import a')
test_import_as = simple_expr('import a as b')
test_from_import = simple_expr('from c import a')
test_from_import_as = simple_expr('from c import a as b')
test_dict0 = simple_expr('{}')
test_dict1 = simple_expr('{a:b}')
test_dict2 = simple_expr('{a:b, c:d}')
test_list0 = simple_expr('[]')
test_list1 = simple_expr('[a]')
test_list2 = simple_expr('[a, b]')
test_set1 = simple_expr('{a}')
test_set2 = simple_expr('{a, b}')
test_exec0 = py2only(simple_expr('exec a in None, None'))
test_exec1 = py2only(simple_expr('exec a in b, None'))
test_exec2 = py2only(simple_expr('exec a in b, c'))
test_assert1 = simple_expr('assert False')
test_assert2 = simple_expr('assert False, msg')
test_global1 = simple_expr('global a')
test_global2 = simple_expr('global a, b')
test_str = simple_expr("x = 'a'")
test_ifexpr = simple_expr("a = b if c else d")
test_lambda = simple_expr("a = lambda a: a")
test_list_comp = simple_expr("[a for b in c]")
test_list_comp_if = simple_expr("[a for b in c if d]")
test_list_comp_if2 = simple_expr("[a for b in c if d if e]")
test_list_comp2 = simple_expr("[a for b in c for d in e]")
test_list_comp3 = simple_expr("[a for b in c for d in e if k for f in g]")
test_set_comp = simple_expr("{a for b in c}")
test_dict_comp = simple_expr("{a:d for b in c}")
test_iadd = aug_assign('+')
test_isub = aug_assign('-')
test_imult = aug_assign('*')
test_ipow = aug_assign('**')
test_idiv = aug_assign('/')
test_ifdiv = aug_assign('//')
test_add = bin_op('+')
test_sub = bin_op('-')
test_mult = bin_op('*')
test_pow = bin_op('**')
test_div = bin_op('/')
test_floordiv = bin_op('//')
test_mod = bin_op('%')
test_eq = bin_op('==')
test_neq = bin_op('!=')
test_lt = bin_op('<')
test_gt = bin_op('>')
test_lte = bin_op('<=')
test_gte = bin_op('>=')
test_lshift = bin_op('<<')
test_rshift = bin_op('>>')
test_lshift = bin_op('and')
test_rshift = bin_op('or')
test_in = bin_op('in')
test_not_in = bin_op('not in')
test_is = bin_op('is')
test_is_not = bin_op('is not')
test_bitand = bin_op('&')
test_bitor = bin_op('|')
test_bitxor = bin_op('^')
test_usub = unary_op('-')
test_uadd = unary_op('+')
test_unot = unary_op('not ')
test_uinvert = unary_op('~')
class ControlFlow(Test):
def test_if(self):
source = '''if a:
b'''
self.assertSame(source)
def test_if_else(self):
source = '''if a:
b
else:
c'''
self.assertSame(source)
def test_elif_else(self):
source = '''if a:
b
elif d:
e
else:
c'''
self.assertSame(source)
def test_while(self):
source = '''while a:
b'''
self.assertSame(source)
def test_break(self):
source = '''while a:
break'''
self.assertSame(source)
def test_continue(self):
source = '''while a:
continue'''
self.assertSame(source)
def test_with0(self):
source = '''with a:
b'''
self.assertSame(source)
def test_with1(self):
source = '''with a as b:
c'''
self.assertSame(source)
def test_function_def(self):
source = '''def foo():
pass'''
self.assertSame(source)
def test_return(self):
source = '''def foo():
return 1.1'''
self.assertSame(source)
def test_yield(self):
source = '''def foo():
yield 1.1'''
self.assertSame(source)
def test_function_args1(self):
source = '''def foo(a):
pass'''
self.assertSame(source)
def test_function_args2(self):
source = '''def foo(a, b):
pass'''
self.assertSame(source)
def test_function_args3(self):
source = '''def foo(b=c):
pass'''
self.assertSame(source)
def test_function_args4(self):
source = '''def foo(b=c, d=e):
pass'''
self.assertSame(source)
def test_function_args5(self):
source = '''def foo(*a):
pass'''
self.assertSame(source)
def test_try_except(self):
source = '''try:
a
except:
b'''
self.assertSame(source)
def test_try_except1(self):
source = '''try:
a
except Exception:
b'''
self.assertSame(source)
def test_try_except2(self):
source = '''try:
a
except Exception as error:
b'''
self.assertSame(source)
def test_try_except3(self):
source = '''try:
a
except Exception as error:
pass
except:
b'''
self.assertSame(source)
def test_try_except_else(self):
source = '''try:
a
except Exception as error:
pass
except:
b
else:
c'''
self.assertSame(source)
def test_try_except_finally(self):
source = '''try:
a
except Exception as error:
pass
except:
b
finally:
c'''
self.assertSame(source)
def test_for(self):
source = '''for i in j:
pass'''
self.assertSame(source)
def test_for_else(self):
source = '''for i in j:
l
else:
k'''
self.assertSame(source)
def test_class_def(self):
source = '''class A():
pass'''
self.assertSame(source)
def test_class_def1(self):
source = '''class A(object):
pass'''
self.assertSame(source)
def test_class_def2(self):
source = '''class A(object, foo):
pass'''
self.assertSame(source)
def test_class_def3(self):
source = '''class A(object, foo):
a = 1
def bar():
pass'''
self.assertSame(source)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_expr']
unittest.main(exit=False)
print(tested.tested())
|
kylebegovich/ProjectEuler
|
refs/heads/master
|
Python/Solved/Page2/Problem57.py
|
1
|
def next_numerator(prev_numerator, prev_denominator):
return prev_numerator + (2*prev_denominator)
def next_denominator(next_numerator, prev_denominator):
# Yes, the numerator is purposefully next, not curr
return next_numerator - prev_denominator
if __name__ == '__main__':
curr_numerator = 3
curr_denominator = 2
count = 0
for i in range(1000):
if len(str(curr_numerator)) > len(str(curr_denominator)):
count += 1
curr_numerator = next_numerator(curr_numerator, curr_denominator)
curr_denominator = next_denominator(curr_numerator, curr_denominator)
print(count)
# SOLVED : 153
|
grnet/synnefo
|
refs/heads/develop
|
snf-cyclades-app/synnefo/volume/urls.py
|
1
|
# Copyright (C) 2010-2016 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from django.conf.urls import patterns, include
from django.http import HttpResponseNotAllowed
from snf_django.lib import api
from synnefo.volume import views, util
from synnefo.volume.versions import versions_list
from snf_django.lib.api import faults, utils
def volume_demux(request):
if request.method == 'GET':
return views.list_volumes(request)
elif request.method == 'POST':
return views.create_volume(request)
else:
return HttpResponseNotAllowed(['GET', 'POST'])
def volume_item_demux(request, volume_id):
if request.method == "GET":
return views.get_volume(request, volume_id)
elif request.method == "PUT":
return views.update_volume(request, volume_id)
elif request.method == "DELETE":
return views.delete_volume(request, volume_id)
else:
return HttpResponseNotAllowed(["GET", "PUT", "DELETE"])
def volume_metadata_demux(request, volume_id):
if request.method == 'GET':
return views.list_volume_metadata(request, volume_id)
elif request.method == 'POST':
return views.update_volume_metadata(request, volume_id, reset=False)
elif request.method == 'PUT':
return views.update_volume_metadata(request, volume_id, reset=True)
else:
return HttpResponseNotAllowed(['GET', 'POST', 'PUT'])
def volume_metadata_item_demux(request, volume_id, key):
if request.method == 'DELETE':
return views.delete_volume_metadata_item(request, volume_id, key)
else:
return HttpResponseNotAllowed(['DELETE'])
VOLUME_ACTIONS = {
"reassign": views.reassign_volume,
}
def volume_action_demux(request, volume_id):
req = utils.get_json_body(request)
if not isinstance(req, dict) and len(req) != 1:
raise faults.BadRequest("Malformed request")
action = req.keys()[0]
if not isinstance(action, basestring):
raise faults.BadRequest("Malformed Request. Invalid action.")
try:
action_func = VOLUME_ACTIONS[action]
except KeyError:
raise faults.BadRequest("Action %s not supported" % action)
action_args = utils.get_attribute(req, action, required=True,
attr_type=dict)
return action_func(request, volume_id, action_args)
def snapshot_demux(request):
if request.method == 'GET':
return views.list_snapshots(request)
elif request.method == 'POST':
return views.create_snapshot(request)
else:
return HttpResponseNotAllowed(['GET', 'POST'])
def snapshot_item_demux(request, snapshot_id):
if request.method == "GET":
return views.get_snapshot(request, snapshot_id)
elif request.method == "PUT":
return views.update_snapshot(request, snapshot_id)
elif request.method == "DELETE":
return views.delete_snapshot(request, snapshot_id)
else:
return HttpResponseNotAllowed(["GET", "PUT", "DELETE"])
def snapshot_metadata_demux(request, snapshot_id):
if request.method == 'GET':
return views.list_snapshot_metadata(request, snapshot_id)
elif request.method == 'POST':
return views.update_snapshot_metadata(request, snapshot_id,
reset=False)
elif request.method == 'PUT':
return views.update_snapshot_metadata(request, snapshot_id, reset=True)
else:
return HttpResponseNotAllowed(['GET', 'POST', 'PUT'])
def snapshot_metadata_item_demux(request, snapshot_id, key):
if request.method == 'DELETE':
return views.delete_snapshot_metadata_item(request, snapshot_id, key)
else:
return HttpResponseNotAllowed(['DELETE'])
volume_v2_patterns = patterns(
'',
(r'^volumes/?(?:.json)?$', volume_demux),
(r'^volumes/detail(?:.json)?$', views.list_volumes, {'detail': True}),
(r'^volumes/(\d+)(?:.json)?$', volume_item_demux),
(r'^volumes/(\d+)/metadata/?(?:.json)?$', volume_metadata_demux),
(r'^volumes/(\d+)/metadata/(.+)(?:.json)?$', volume_metadata_item_demux),
(r'^volumes/(\d+)/action(?:.json|.xml)?$', volume_action_demux),
(r'^types/?(?:.json)?$', views.list_volume_types),
(r'^types/(\d+)(?:.json)?$', views.get_volume_type),
)
if settings.CYCLADES_SNAPSHOTS_ENABLED:
volume_v2_patterns += patterns(
'',
(r'^snapshots/?(?:.json)?$', snapshot_demux),
(r'^snapshots/detail$', views.list_snapshots, {'detail': True}),
(r'^snapshots/([\w-]+)(?:.json)?$', snapshot_item_demux),
(r'^snapshots/([\w-]+)/metadata/?(?:.json)?$',
snapshot_metadata_demux),
(r'^snapshots/([\w-]+)/metadata/(.+)(?:.json)?$',
snapshot_metadata_item_demux),
)
urlpatterns = patterns(
'',
(r'^(?:.json)?$', versions_list),
(r'^v2.0/', include(volume_v2_patterns)),
(r'^v2/', include(volume_v2_patterns)),
(r'^.*', api.api_endpoint_not_found),
)
|
GraemeFulton/job-search
|
refs/heads/master
|
docutils-0.12/test/test_parsers/test_rst/test_option_lists.py
|
18
|
#! /usr/bin/env python
# $Id: test_option_lists.py 7128 2011-09-17 18:00:35Z grubert $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Tests for states.py.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.ParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['option_lists'] = [
["""\
Short options:
-a option -a
-b file option -b
-c name option -c
""",
"""\
<document source="test data">
<paragraph>
Short options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b
<option_list_item>
<option_group>
<option>
<option_string>
-c
<option_argument delimiter=" ">
name
<description>
<paragraph>
option -c
"""],
["""\
Long options:
--aaaa option --aaaa
--bbbb=file option --bbbb
--cccc name option --cccc
--d-e-f-g option --d-e-f-g
--h_i_j_k option --h_i_j_k
""",
"""\
<document source="test data">
<paragraph>
Long options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--aaaa
<description>
<paragraph>
option --aaaa
<option_list_item>
<option_group>
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<description>
<paragraph>
option --bbbb
<option_list_item>
<option_group>
<option>
<option_string>
--cccc
<option_argument delimiter=" ">
name
<description>
<paragraph>
option --cccc
<option_list_item>
<option_group>
<option>
<option_string>
--d-e-f-g
<description>
<paragraph>
option --d-e-f-g
<option_list_item>
<option_group>
<option>
<option_string>
--h_i_j_k
<description>
<paragraph>
option --h_i_j_k
"""],
["""\
Old GNU-style options:
+a option +a
+b file option +b
+c name option +c
""",
"""\
<document source="test data">
<paragraph>
Old GNU-style options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
+a
<description>
<paragraph>
option +a
<option_list_item>
<option_group>
<option>
<option_string>
+b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option +b
<option_list_item>
<option_group>
<option>
<option_string>
+c
<option_argument delimiter=" ">
name
<description>
<paragraph>
option +c
"""],
["""\
VMS/DOS-style options:
/A option /A
/B file option /B
/CCC option /CCC
/DDD string option /DDD
/EEE=int option /EEE
""",
"""\
<document source="test data">
<paragraph>
VMS/DOS-style options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
/A
<description>
<paragraph>
option /A
<option_list_item>
<option_group>
<option>
<option_string>
/B
<option_argument delimiter=" ">
file
<description>
<paragraph>
option /B
<option_list_item>
<option_group>
<option>
<option_string>
/CCC
<description>
<paragraph>
option /CCC
<option_list_item>
<option_group>
<option>
<option_string>
/DDD
<option_argument delimiter=" ">
string
<description>
<paragraph>
option /DDD
<option_list_item>
<option_group>
<option>
<option_string>
/EEE
<option_argument delimiter="=">
int
<description>
<paragraph>
option /EEE
"""],
["""\
Mixed short, long, and VMS/DOS options:
-a option -a
--bbbb=file option -bbbb
/C option /C
--dddd name option --dddd
-e string option -e
/F file option /F
""",
"""\
<document source="test data">
<paragraph>
Mixed short, long, and VMS/DOS options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a
<option_list_item>
<option_group>
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<description>
<paragraph>
option -bbbb
<option_list_item>
<option_group>
<option>
<option_string>
/C
<description>
<paragraph>
option /C
<option_list_item>
<option_group>
<option>
<option_string>
--dddd
<option_argument delimiter=" ">
name
<description>
<paragraph>
option --dddd
<option_list_item>
<option_group>
<option>
<option_string>
-e
<option_argument delimiter=" ">
string
<description>
<paragraph>
option -e
<option_list_item>
<option_group>
<option>
<option_string>
/F
<option_argument delimiter=" ">
file
<description>
<paragraph>
option /F
"""],
["""\
Aliased options:
-a, --aaaa, /A option -a, --aaaa, /A
-b file, --bbbb=file, /B file option -b, --bbbb, /B
""",
"""\
<document source="test data">
<paragraph>
Aliased options:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<option>
<option_string>
--aaaa
<option>
<option_string>
/A
<description>
<paragraph>
option -a, --aaaa, /A
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<option>
<option_string>
--bbbb
<option_argument delimiter="=">
file
<option>
<option_string>
/B
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, --bbbb, /B
"""],
["""\
Multiple lines in descriptions, aligned:
-a option -a, line 1
line 2
-b file option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Multiple lines in descriptions, aligned:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Multiple lines in descriptions, not aligned:
-a option -a, line 1
line 2
-b file option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Multiple lines in descriptions, not aligned:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Descriptions begin on next line:
-a
option -a, line 1
line 2
-b file
option -b, line 1
line 2
""",
"""\
<document source="test data">
<paragraph>
Descriptions begin on next line:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, line 1
line 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, line 1
line 2
"""],
["""\
Multiple body elements in descriptions:
-a option -a, para 1
para 2
-b file
option -b, para 1
para 2
""",
"""\
<document source="test data">
<paragraph>
Multiple body elements in descriptions:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
-a
<description>
<paragraph>
option -a, para 1
<paragraph>
para 2
<option_list_item>
<option_group>
<option>
<option_string>
-b
<option_argument delimiter=" ">
file
<description>
<paragraph>
option -b, para 1
<paragraph>
para 2
"""],
["""\
--option
empty item above, no blank line
""",
"""\
<document source="test data">
<paragraph>
--option
empty item above, no blank line
"""],
["""\
An option list using equals:
--long1=arg1 Description 1
--long2=arg2 Description 2
An option list using spaces:
--long1 arg1 Description 1
--long2 arg2 Description 2
An option list using mixed delimiters:
--long1=arg1 Description 1
--long2 arg2 Description 2
An option list using mixed delimiters in one line:
--long1=arg1, --long2 arg2 Description
""",
"""\
<document source="test data">
<paragraph>
An option list using equals:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter="=">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using spaces:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter=" ">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using mixed delimiters:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<description>
<paragraph>
Description 1
<option_list_item>
<option_group>
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description 2
<paragraph>
An option list using mixed delimiters in one line:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--long1
<option_argument delimiter="=">
arg1
<option>
<option_string>
--long2
<option_argument delimiter=" ">
arg2
<description>
<paragraph>
Description
"""],
["""\
Some edge cases:
--option=arg arg too many arguments
--option=arg,arg not supported (yet?)
--option=arg=arg too many arguments
--option arg arg too many arguments
-a letter arg2 too many arguments
/A letter arg2 too many arguments
--option= argument missing
--=argument option missing
-- everything missing
- this should be a bullet list item
These next ones should be simple paragraphs:
-1
--option
--1
-1 and this one too.
""",
"""\
<document source="test data">
<paragraph>
Some edge cases:
<paragraph>
--option=arg arg too many arguments
<paragraph>
--option=arg,arg not supported (yet?)
<paragraph>
--option=arg=arg too many arguments
<paragraph>
--option arg arg too many arguments
<paragraph>
-a letter arg2 too many arguments
<paragraph>
/A letter arg2 too many arguments
<paragraph>
--option= argument missing
<paragraph>
--=argument option missing
<paragraph>
-- everything missing
<bullet_list bullet="-">
<list_item>
<paragraph>
this should be a bullet list item
<paragraph>
These next ones should be simple paragraphs:
<paragraph>
-1
<paragraph>
--option
<paragraph>
--1
<paragraph>
-1 and this one too.
"""],
["""\
Complex optargs:
--source-url=<URL> Use the supplied <URL> verbatim.
--output-encoding=<name[:handler]>, -o<name[:handler]>
Specify the text encoding for output.
--af=<filter1[=parameter1:parameter2:...],filter2,...>
Setup a chain of audio filters.
Option argument containing delimiter ``=``.
-f <[path]filename> Send output to file.
-d <src dest> Use diff from <src> to <dest>.
--bogus=<x y z> Bogus 3D coordinates.
""",
"""\
<document source="test data">
<paragraph>
Complex optargs:
<option_list>
<option_list_item>
<option_group>
<option>
<option_string>
--source-url
<option_argument delimiter="=">
<URL>
<description>
<paragraph>
Use the supplied <URL> verbatim.
<option_list_item>
<option_group>
<option>
<option_string>
--output-encoding
<option_argument delimiter="=">
<name[:handler]>
<option>
<option_string>
-o
<option_argument delimiter="">
<name[:handler]>
<description>
<paragraph>
Specify the text encoding for output.
<option_list_item>
<option_group>
<option>
<option_string>
--af
<option_argument delimiter="=">
<filter1[=parameter1:parameter2:...],filter2,...>
<description>
<paragraph>
Setup a chain of audio filters.
Option argument containing delimiter \n\
<literal>
=
.
<option_list_item>
<option_group>
<option>
<option_string>
-f
<option_argument delimiter=" ">
<[path]filename>
<description>
<paragraph>
Send output to file.
<option_list_item>
<option_group>
<option>
<option_string>
-d
<option_argument delimiter=" ">
<src dest>
<description>
<paragraph>
Use diff from <src> to <dest>.
<option_list_item>
<option_group>
<option>
<option_string>
--bogus
<option_argument delimiter="=">
<x y z>
<description>
<paragraph>
Bogus 3D coordinates.
"""],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
|
jmcarp/django
|
refs/heads/master
|
tests/gis_tests/geogapp/tests.py
|
253
|
"""
Tests for geography support in PostGIS
"""
from __future__ import unicode_literals
import os
from unittest import skipUnless
from django.contrib.gis.db.models.functions import Area, Distance
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.measure import D
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
from ..utils import oracle, postgis
from .models import City, County, Zipcode
@skipUnlessDBFeature("gis_enabled")
class GeographyTest(TestCase):
fixtures = ['initial']
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test02_distance_lookup(self):
"Testing GeoQuerySet distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
@skipUnlessDBFeature("has_distance_method", "supports_distance_geodetic")
@ignore_warnings(category=RemovedInDjango20Warning)
def test03_distance_method(self):
"Testing GeoQuerySet.distance() support on non-point geography fields."
# `GeoQuerySet.distance` is not allowed geometry fields.
htown = City.objects.get(name='Houston')
Zipcode.objects.distance(htown.point)
@skipUnless(postgis, "This is a PostGIS-specific test")
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
self.assertRaises(ValueError, City.objects.filter(point__within=z.poly).count)
# `@` operator not available.
self.assertRaises(ValueError, City.objects.filter(point__contained=z.poly).count)
# Regression test for #14060, `~=` was never really implemented for PostGIS.
htown = City.objects.get(name='Houston')
self.assertRaises(ValueError, City.objects.get, point__exact=htown.point)
@skipUnless(HAS_GDAL, "GDAL is required.")
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name': 'Name',
'state': 'State',
'mpoly': 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
@skipUnlessDBFeature("has_area_method", "supports_distance_geodetic")
@ignore_warnings(category=RemovedInDjango20Warning)
def test06_geography_area(self):
"Testing that Area calculations work on geography columns."
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
ref_area = 5439100.95415646 if oracle else 5439084.70637573
tol = 5
z = Zipcode.objects.area().get(code='77002')
self.assertAlmostEqual(z.area.sq_m, ref_area, tol)
@skipUnlessDBFeature("gis_enabled")
class GeographyFunctionTests(TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_function(self):
"""
Testing Distance() support on non-point geography fields.
"""
ref_dists = [0, 4891.20, 8071.64, 9123.95]
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.annotate(distance=Distance('poly', htown.point))
for z, ref in zip(qs, ref_dists):
self.assertAlmostEqual(z.distance.m, ref, 2)
@skipUnlessDBFeature("has_Area_function", "supports_distance_geodetic")
def test_geography_area(self):
"""
Testing that Area calculations work on geography columns.
"""
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
ref_area = 5439100.95415646 if oracle else 5439084.70637573
tol = 5
z = Zipcode.objects.annotate(area=Area('poly')).get(code='77002')
self.assertAlmostEqual(z.area.sq_m, ref_area, tol)
|
damdam-s/OpenUpgrade
|
refs/heads/8.0
|
addons/account_payment/wizard/account_payment_populate_statement.py
|
274
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
class account_payment_populate_statement(osv.osv_memory):
_name = "account.payment.populate.statement"
_description = "Account Payment Populate Statement"
_columns = {
'lines': fields.many2many('payment.line', 'payment_line_rel_', 'payment_id', 'line_id', 'Payment Lines')
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
line_obj = self.pool.get('payment.line')
res = super(account_payment_populate_statement, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
line_ids = line_obj.search(cr, uid, [
('move_line_id.reconcile_id', '=', False),
('bank_statement_line_id', '=', False),
('move_line_id.state','=','valid')])
line_ids.extend(line_obj.search(cr, uid, [
('move_line_id.reconcile_id', '=', False),
('order_id.mode', '=', False),
('move_line_id.state','=','valid')]))
domain = '[("id", "in", '+ str(line_ids)+')]'
doc = etree.XML(res['arch'])
nodes = doc.xpath("//field[@name='lines']")
for node in nodes:
node.set('domain', domain)
res['arch'] = etree.tostring(doc)
return res
def populate_statement(self, cr, uid, ids, context=None):
line_obj = self.pool.get('payment.line')
statement_obj = self.pool.get('account.bank.statement')
statement_line_obj = self.pool.get('account.bank.statement.line')
currency_obj = self.pool.get('res.currency')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
line_ids = data['lines']
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
statement = statement_obj.browse(cr, uid, context['active_id'], context=context)
for line in line_obj.browse(cr, uid, line_ids, context=context):
ctx = context.copy()
ctx['date'] = line.ml_maturity_date # was value_date earlier,but this field exists no more now
amount = currency_obj.compute(cr, uid, line.currency.id,
statement.currency.id, line.amount_currency, context=ctx)
st_line_vals = self._prepare_statement_line_vals(cr, uid, line, amount, statement, context=context)
st_line_id = statement_line_obj.create(cr, uid, st_line_vals, context=context)
line_obj.write(cr, uid, [line.id], {'bank_statement_line_id': st_line_id})
return {'type': 'ir.actions.act_window_close'}
def _prepare_statement_line_vals(self, cr, uid, payment_line, amount,
statement, context=None):
return {
'name': payment_line.order_id.reference or '?',
'amount':-amount,
'partner_id': payment_line.partner_id.id,
'statement_id': statement.id,
'ref': payment_line.communication,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
rsunder10/PopularityBased-SearchEngine
|
refs/heads/master
|
lib/python3.4/site-packages/django/contrib/admindocs/middleware.py
|
477
|
from django import http
from django.conf import settings
class XViewMiddleware(object):
"""
Adds an X-View header to internal HEAD requests -- used by the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
"""
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, quickly return with an x-header
indicating the view function. This is used by the documentation module
to lookup the view function for an arbitrary page.
"""
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'.")
if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or
(request.user.is_active and request.user.is_staff)):
response = http.HttpResponse()
response['X-View'] = "%s.%s" % (view_func.__module__, view_func.__name__)
return response
|
martijnengler/mycli
|
refs/heads/master
|
mycli/key_bindings.py
|
14
|
import logging
from prompt_toolkit.keys import Keys
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.filters import Condition
_logger = logging.getLogger(__name__)
def mycli_bindings(get_key_bindings, set_key_bindings):
"""
Custom key bindings for mycli.
"""
assert callable(get_key_bindings)
assert callable(set_key_bindings)
key_binding_manager = KeyBindingManager(
enable_open_in_editor=True,
enable_system_bindings=True,
enable_vi_mode=Condition(lambda cli: get_key_bindings() == 'vi'))
@key_binding_manager.registry.add_binding(Keys.F2)
def _(event):
"""
Enable/Disable SmartCompletion Mode.
"""
_logger.debug('Detected F2 key.')
buf = event.cli.current_buffer
buf.completer.smart_completion = not buf.completer.smart_completion
@key_binding_manager.registry.add_binding(Keys.F3)
def _(event):
"""
Enable/Disable Multiline Mode.
"""
_logger.debug('Detected F3 key.')
buf = event.cli.current_buffer
buf.always_multiline = not buf.always_multiline
@key_binding_manager.registry.add_binding(Keys.F4)
def _(event):
"""
Toggle between Vi and Emacs mode.
"""
_logger.debug('Detected F4 key.')
if get_key_bindings() == 'vi':
set_key_bindings('emacs')
else:
set_key_bindings('vi')
@key_binding_manager.registry.add_binding(Keys.Tab)
def _(event):
"""
Force autocompletion at cursor.
"""
_logger.debug('Detected <Tab> key.')
b = event.cli.current_buffer
if b.complete_state:
b.complete_next()
else:
event.cli.start_completion(select_first=True)
@key_binding_manager.registry.add_binding(Keys.ControlSpace)
def _(event):
"""
Initialize autocompletion at cursor.
If the autocompletion menu is not showing, display it with the
appropriate completions for the context.
If the menu is showing, select the next completion.
"""
_logger.debug('Detected <C-Space> key.')
b = event.cli.current_buffer
if b.complete_state:
b.complete_next()
else:
event.cli.start_completion(select_first=False)
return key_binding_manager
|
lxn2/mxnet
|
refs/heads/master
|
example/recommenders/matrix_fact.py
|
15
|
import math
import mxnet as mx
import numpy as np
import mxnet.notebook.callback
import logging
logging.basicConfig(level=logging.DEBUG)
def RMSE(label, pred):
ret = 0.0
n = 0.0
pred = pred.flatten()
for i in range(len(label)):
ret += (label[i] - pred[i]) * (label[i] - pred[i])
n += 1.0
return math.sqrt(ret / n)
def train(network, data_pair, num_epoch, learning_rate, optimizer='sgd', opt_args=None, ctx=[mx.gpu(0)]):
np.random.seed(123) # Fix random seed for consistent demos
mx.random.seed(123) # Fix random seed for consistent demos
if not opt_args:
opt_args = {}
if optimizer=='sgd' and (not opt_args):
opt_args['momentum'] = 0.9
model = mx.model.FeedForward(
ctx = ctx,
symbol = network,
num_epoch = num_epoch,
optimizer = optimizer,
learning_rate = learning_rate,
wd = 1e-4,
**opt_args
)
train, test = (data_pair)
lc = mxnet.notebook.callback.LiveLearningCurve('RMSE', 1)
model.fit(X = train,
eval_data = test,
eval_metric = RMSE,
**mxnet.notebook.callback.args_wrapper(lc)
)
return lc
|
daveferrara1/linkchecker
|
refs/heads/master
|
third_party/miniboa-r42/hello_demo.py
|
13
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
# hello_demo.py
# Copyright 2009 Jim Storch
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#------------------------------------------------------------------------------
"""
As simple as it gets.
Launch the Telnet server on the default port and greet visitors using the
placeholder 'on_connect()' function. Does nothing else.
"""
from miniboa import TelnetServer
server = TelnetServer()
print "\n\nStarting server on port %d. CTRL-C to interrupt.\n" % server.port
while True:
server.poll()
|
jmargeta/scikit-learn
|
refs/heads/master
|
examples/hashing_vs_dict_vectorizer.py
|
5
|
"""
===================================================================
Comparison of hashing-based and dictionary based text vectorization
===================================================================
Compares FeatureHasher and DictVectorizer by using both to vectorize
text documents.
The example demonstrates syntax and speed only; it doesn't actually do
anything useful with the extracted vectors. See the example scripts
{document_classification_20newsgroups,clustering}.py for actual learning
on text documents.
A discrepancy between the number of terms reported for DictVectorizer and
for FeatureHasher is to be expected due to hash collisions.
"""
# Author: Lars Buitinck <L.J.Buitinck@uva.nl>
# License: 3-clause BSD
from __future__ import print_function
from collections import defaultdict
import re
import sys
from time import time
import numpy as np
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction import DictVectorizer, FeatureHasher
def n_nonzero_columns(X):
"""Returns the number of non-zero columns in a CSR matrix X."""
return len(np.unique(X.nonzero()[1]))
def tokens(doc):
"""Extract tokens from doc.
This uses a simple regex to break strings into tokens. For a more
principled approach, see CountVectorizer or TfidfVectorizer.
"""
return (tok.lower() for tok in re.findall(r"\w+", doc))
def token_freqs(doc):
"""Extract a dict mapping tokens from doc to their frequencies."""
freq = defaultdict(int)
for tok in tokens(doc):
freq[tok] += 1
return freq
categories = [
'alt.atheism',
'comp.graphics',
'comp.sys.ibm.pc.hardware',
'misc.forsale',
'rec.autos',
'sci.space',
'talk.religion.misc',
]
# Uncomment the following line to use a larger set (11k+ documents)
#categories = None
print(__doc__)
print("Usage: %s [n_features_for_hashing]" % sys.argv[0])
print(" The default number of features is 2**18.")
print()
try:
n_features = int(sys.argv[1])
except IndexError:
n_features = 2 ** 18
except ValueError:
print("not a valid number of features: %r" % sys.argv[1])
sys.exit(1)
print("Loading 20 newsgroups training data")
raw_data = fetch_20newsgroups(subset='train', categories=categories).data
data_size_mb = sum(len(s.encode('utf-8')) for s in raw_data) / 1e6
print("%d documents - %0.3fMB" % (len(raw_data), data_size_mb))
print()
print("DictVectorizer")
t0 = time()
vectorizer = DictVectorizer()
vectorizer.fit_transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % len(vectorizer.get_feature_names()))
print()
print("FeatureHasher on frequency dicts")
t0 = time()
hasher = FeatureHasher(n_features=n_features)
X = hasher.transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
print()
print("FeatureHasher on raw tokens")
t0 = time()
hasher = FeatureHasher(n_features=n_features, input_type="string")
X = hasher.transform(tokens(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
|
ixc/glamkit-feincmstools
|
refs/heads/master
|
feincmstools/templatetags/feincmstools_tags.py
|
1
|
import os
from django import template
from feincms.templatetags.feincms_tags import feincms_render_content
register = template.Library()
@register.filter
def is_parent_of(page1, page2):
"""
Determines whether a given page is the parent of another page
Example:
{% if page|is_parent_of:feincms_page %} ... {% endif %}
"""
if page1 is None:
return False
return (page1.tree_id == page2.tree_id and
page1.lft < page2.lft and
page1.rght > page2.rght)
@register.filter
def is_equal_or_parent_of(page1, page2):
return (page1.tree_id == page2.tree_id and
page1.lft <= page2.lft and
page1.rght >= page2.rght)
@register.filter
def is_sibling_of(page1, page2):
"""
Determines whether a given page is a sibling of another page
{% if page|is_sibling_of:feincms_page %} ... {% endif %}
"""
if page1 is None or page2 is None:
return False
return (page1.parent_id == page2.parent_id)
@register.filter
def get_extension(filename):
""" Return the extension from a file name """
return os.path.splitext(filename)[1][1:]
@register.assignment_tag(takes_context=True)
def feincms_render_content_as(context, content, request=None):
return feincms_render_content(context, content, request)
|
kickstandproject/sarlacc
|
refs/heads/master
|
sarlacc/tests/asterisk/agi/test_database_deltree.py
|
1
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 PolyBeacon, Inc.
#
# Author: Paul Belanger <paul.belanger@polybeacon.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cStringIO
import mock
from sarlacc.tests.asterisk.agi import test
class TestCase(test.TestCase):
@mock.patch('sys.stdin', cStringIO.StringIO("200 result=0"))
def test_database_deltree_failure(self):
with mock.patch(
'sys.stdout', new_callable=cStringIO.StringIO) as mock_stdout:
res = self.agi.database_deltree(family='Foo', keytree='Bar')
self.assertEqual(
mock_stdout.getvalue(), 'DATABASE DELTREE Foo Bar\n'
)
self.assertFalse(res)
@mock.patch('sys.stdin', cStringIO.StringIO("200 result=1"))
def test_database_deltree_success(self):
with mock.patch(
'sys.stdout', new_callable=cStringIO.StringIO) as mock_stdout:
res = self.agi.database_deltree(family='SIP', keytree='1001')
self.assertEqual(
mock_stdout.getvalue(), 'DATABASE DELTREE SIP 1001\n'
)
self.assertTrue(res)
|
masschallenge/django-accelerator
|
refs/heads/development
|
accelerator/tests/contexts/mentor_program_group_context.py
|
1
|
from accelerator.tests.contexts.mentor_user_context import MentorUserContext
from accelerator.tests.factories.named_group_factory import NamedGroupFactory
class MentorProgramGroupContext(object):
def __init__(self):
mentor_context1 = MentorUserContext()
mentor_context2 = MentorUserContext()
self.mentors = [mentor_context1.user, mentor_context2.user]
self.programs = [mentor_context1.program, mentor_context2.program]
self.program_group = NamedGroupFactory()
_set_mentor_program_group(self.programs[0], self.program_group)
_set_mentor_program_group(self.programs[1], self.program_group)
def _set_mentor_program_group(program, program_group):
program.mentor_program_group = program_group
program.save()
|
KidFashion/boo
|
refs/heads/master
|
examples/misc/arrayperformance.py
|
21
|
from time import time
def test():
items = 2000000
array = tuple(range(items))
collect = []
start = time()
for i in xrange(items):
collect.append(array[i])
elapsed = time() - start
print elapsed*1000, " elapsed."
test()
test()
test()
|
inkcoin/inkcoin-project
|
refs/heads/master
|
share/qt/make_spinner.py
|
4415
|
#!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
SRC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(SRC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
|
raschuetz/foundations-homework
|
refs/heads/master
|
07/data-analysis/bin/activate_this.py
|
1076
|
"""By using execfile(this_file, dict(__file__=this_file)) you will
activate this virtualenv environment.
This can be used when you must use an existing Python interpreter, not
the virtualenv bin/python
"""
try:
__file__
except NameError:
raise AssertionError(
"You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
import sys
import os
old_os_path = os.environ.get('PATH', '')
os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if sys.platform == 'win32':
site_packages = os.path.join(base, 'Lib', 'site-packages')
else:
site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
prev_sys_path = list(sys.path)
import site
site.addsitedir(site_packages)
sys.real_prefix = sys.prefix
sys.prefix = base
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
|
danielkza/dnf
|
refs/heads/master
|
doc/__init__.py
|
22
|
# __init__.py
# DNF documentation package.
#
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
|
perezg/infoxchange
|
refs/heads/master
|
BASE/lib/python2.7/site-packages/tastypie/utils/formatting.py
|
47
|
from __future__ import unicode_literals
import email
import datetime
import time
from django.utils import dateformat
from tastypie.utils.timezone import make_aware, make_naive, aware_datetime
# Try to use dateutil for maximum date-parsing niceness. Fall back to
# hard-coded RFC2822 parsing if that's not possible.
try:
from dateutil.parser import parse as mk_datetime
except ImportError:
def mk_datetime(string):
return make_aware(datetime.datetime.fromtimestamp(time.mktime(email.utils.parsedate(string))))
def format_datetime(dt):
"""
RFC 2822 datetime formatter
"""
return dateformat.format(make_naive(dt), 'r')
def format_date(d):
"""
RFC 2822 date formatter
"""
# workaround because Django's dateformat utility requires a datetime
# object (not just date)
dt = aware_datetime(d.year, d.month, d.day, 0, 0, 0)
return dateformat.format(dt, 'j M Y')
def format_time(t):
"""
RFC 2822 time formatter
"""
# again, workaround dateformat input requirement
dt = aware_datetime(2000, 1, 1, t.hour, t.minute, t.second)
return dateformat.format(dt, 'H:i:s O')
|
ah-anssi/SecuML
|
refs/heads/master
|
SecuML/experiments/experiment_db_tools.py
|
1
|
# SecuML
# Copyright (C) 2016-2017 ANSSI
#
# SecuML is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# SecuML is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with SecuML. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy.orm.exc import NoResultFound
from SecuML.experiments import db_tables
from SecuML.experiments.db_tables import ExperimentsAlchemy
def getExperimentRow(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
return query.first()
def getExperimentDetails(session, name, kind):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.name == name)
query = query.filter(ExperimentsAlchemy.kind == kind)
try:
exp = query.one()
return exp.id, exp.oldest_parent
except NoResultFound:
return None
def checkValidationExperiment(session, dataset_id, experiment_name):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.kind == 'Validation')
query = query.filter(ExperimentsAlchemy.dataset_id == dataset_id)
query = query.filter(ExperimentsAlchemy.name == experiment_name)
try:
exp = query.one()
return exp
except NoResultFound:
return None
def getExperimentId(session, experiment_name):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.name == experiment_name)
return query.first().id
def getProjectDataset(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
exp_obj = query.one()
dataset_obj = exp_obj.dataset
project_obj = dataset_obj.project
dataset = dataset_obj.dataset
project = project_obj.project
return project, dataset
def getExperimentName(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
return query.first().name
def getExperimentLabelId(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
return query.first().annotations_id
def addExperiment(session, kind, name, dataset_id, parent):
exp = ExperimentsAlchemy(kind=kind, name=name, dataset_id=dataset_id,
parent=parent)
session.add(exp)
session.commit()
experiment_id = exp.id
if parent is None:
oldest_parent = experiment_id
else:
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == parent)
parent = query.one()
oldest_parent = parent.oldest_parent
exp.oldest_parent = oldest_parent
session.commit()
return experiment_id, oldest_parent
def removeExperiment(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
experiment = query.one()
session.delete(experiment)
session.commit()
def getChildren(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.parent == experiment_id)
children = [r.id for r in query.all()]
return children
def getDescriptiveStatsExp(session, experiment):
features_filename = experiment.features_filename.split('.')[0]
exp_name = features_filename
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.dataset_id ==
experiment.dataset_id)
query = query.filter(ExperimentsAlchemy.kind == 'DescriptiveStatistics')
query = query.filter(ExperimentsAlchemy.name == exp_name)
res = query.first()
if res is not None:
return res.id
else:
return None
def getExperiments(session, project, dataset, exp_kind=None):
project_id = db_tables.checkProject(session, project)
dataset_id = db_tables.checkDataset(session, project_id, dataset)
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.dataset_id == dataset_id)
if exp_kind is not None:
query = query.filter(ExperimentsAlchemy.kind == exp_kind)
query = query.filter(ExperimentsAlchemy.parent == None)
experiments = {}
for exp in query.all():
if exp.name not in list(experiments.keys()):
experiments[exp.name] = []
experiments[exp.name].append(exp.id)
return experiments
def getExperimentKinds(session, dataset_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(dataset_id == dataset_id)
query = query.distinct(ExperimentsAlchemy.kind)
kinds = [r.kind for r in query.all()]
return kinds
def getAllExperiments(session, project, dataset, exp_kind=None):
project_id = db_tables.checkProject(session, project)
dataset_id = db_tables.checkDataset(session, project_id, dataset)
all_kinds = getExperimentKinds(session, dataset_id)
experiments = {}
for kind in all_kinds:
experiments[kind] = []
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.dataset_id == dataset_id)
query = query.filter(ExperimentsAlchemy.kind == kind)
query = query.filter(ExperimentsAlchemy.parent == None)
for exp in query.all():
e = {'name': exp.name, 'id': exp.id}
experiments[kind].append(e)
if len(experiments[kind]) == 0:
del experiments[kind]
return experiments
def getCurrentIteration(session, experiment_id):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
experiment = query.one()
return experiment.current_iter
def updateExperimentName(session, experiment_id, experiment_name):
query = session.query(ExperimentsAlchemy)
query = query.filter(ExperimentsAlchemy.id == experiment_id)
exp = query.one()
exp.name = experiment_name
session.commit()
|
luzheqi1987/nova-annotation
|
refs/heads/master
|
nova/cmd/cert.py
|
37
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Starter script for Nova Cert."""
import sys
from oslo.config import cfg
from nova import config
from nova.openstack.common import log as logging
from nova.openstack.common.report import guru_meditation_report as gmr
from nova import service
from nova import utils
from nova import version
CONF = cfg.CONF
CONF.import_opt('cert_topic', 'nova.cert.rpcapi')
def main():
config.parse_args(sys.argv)
logging.setup("nova")
utils.monkey_patch()
gmr.TextGuruMeditation.setup_autorun(version)
server = service.Service.create(binary='nova-cert', topic=CONF.cert_topic)
service.serve(server)
service.wait()
|
ddrmanxbxfr/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/handshake/_base.py
|
652
|
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Common functions and exceptions used by WebSocket opening handshake
processors.
"""
from mod_pywebsocket import common
from mod_pywebsocket import http_header_util
class AbortedByUserException(Exception):
"""Exception for aborting a connection intentionally.
If this exception is raised in do_extra_handshake handler, the connection
will be abandoned. No other WebSocket or HTTP(S) handler will be invoked.
If this exception is raised in transfer_data_handler, the connection will
be closed without closing handshake. No other WebSocket or HTTP(S) handler
will be invoked.
"""
pass
class HandshakeException(Exception):
"""This exception will be raised when an error occurred while processing
WebSocket initial handshake.
"""
def __init__(self, name, status=None):
super(HandshakeException, self).__init__(name)
self.status = status
class VersionException(Exception):
"""This exception will be raised when a version of client request does not
match with version the server supports.
"""
def __init__(self, name, supported_versions=''):
"""Construct an instance.
Args:
supported_version: a str object to show supported hybi versions.
(e.g. '8, 13')
"""
super(VersionException, self).__init__(name)
self.supported_versions = supported_versions
def get_default_port(is_secure):
if is_secure:
return common.DEFAULT_WEB_SOCKET_SECURE_PORT
else:
return common.DEFAULT_WEB_SOCKET_PORT
def validate_subprotocol(subprotocol):
"""Validate a value in the Sec-WebSocket-Protocol field.
See the Section 4.1., 4.2.2., and 4.3. of RFC 6455.
"""
if not subprotocol:
raise HandshakeException('Invalid subprotocol name: empty')
# Parameter should be encoded HTTP token.
state = http_header_util.ParsingState(subprotocol)
token = http_header_util.consume_token(state)
rest = http_header_util.peek(state)
# If |rest| is not None, |subprotocol| is not one token or invalid. If
# |rest| is None, |token| must not be None because |subprotocol| is
# concatenation of |token| and |rest| and is not None.
if rest is not None:
raise HandshakeException('Invalid non-token string in subprotocol '
'name: %r' % rest)
def parse_host_header(request):
fields = request.headers_in[common.HOST_HEADER].split(':', 1)
if len(fields) == 1:
return fields[0], get_default_port(request.is_https())
try:
return fields[0], int(fields[1])
except ValueError, e:
raise HandshakeException('Invalid port number format: %r' % e)
def format_header(name, value):
return '%s: %s\r\n' % (name, value)
def get_mandatory_header(request, key):
value = request.headers_in.get(key)
if value is None:
raise HandshakeException('Header %s is not defined' % key)
return value
def validate_mandatory_header(request, key, expected_value, fail_status=None):
value = get_mandatory_header(request, key)
if value.lower() != expected_value.lower():
raise HandshakeException(
'Expected %r for header %s but found %r (case-insensitive)' %
(expected_value, key, value), status=fail_status)
def check_request_line(request):
# 5.1 1. The three character UTF-8 string "GET".
# 5.1 2. A UTF-8-encoded U+0020 SPACE character (0x20 byte).
if request.method != 'GET':
raise HandshakeException('Method is not GET: %r' % request.method)
if request.protocol != 'HTTP/1.1':
raise HandshakeException('Version is not HTTP/1.1: %r' %
request.protocol)
def parse_token_list(data):
"""Parses a header value which follows 1#token and returns parsed elements
as a list of strings.
Leading LWSes must be trimmed.
"""
state = http_header_util.ParsingState(data)
token_list = []
while True:
token = http_header_util.consume_token(state)
if token is not None:
token_list.append(token)
http_header_util.consume_lwses(state)
if http_header_util.peek(state) is None:
break
if not http_header_util.consume_string(state, ','):
raise HandshakeException(
'Expected a comma but found %r' % http_header_util.peek(state))
http_header_util.consume_lwses(state)
if len(token_list) == 0:
raise HandshakeException('No valid token found')
return token_list
# vi:sts=4 sw=4 et
|
bop/bauhaus
|
refs/heads/master
|
lib/python2.7/site-packages/django/test/simple.py
|
108
|
"""
This module is pending deprecation as of Django 1.6 and will be removed in
version 1.8.
"""
import json
import re
import unittest as real_unittest
import warnings
from django.db.models import get_app, get_apps
from django.test import _doctest as doctest
from django.test import runner
from django.test.utils import compare_xml, strip_quotes
from django.utils import unittest
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
__all__ = ('DjangoTestSuiteRunner',)
warnings.warn(
"The django.test.simple module and DjangoTestSuiteRunner are deprecated; "
"use django.test.runner.DiscoverRunner instead.",
PendingDeprecationWarning)
# The module name for tests outside models.py
TEST_MODULE = 'tests'
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
normalize_decimals = lambda s: re.sub(r"Decimal\('(\d+(\.\d*)?)'\)",
lambda m: "Decimal(\"%s\")" % m.groups()[0], s)
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
"""
The entry method for doctest output checking. Defers to a sequence of
child checkers
"""
checks = (self.check_output_default,
self.check_output_numeric,
self.check_output_xml,
self.check_output_json)
for check in checks:
if check(want, got, optionflags):
return True
return False
def check_output_default(self, want, got, optionflags):
"""
The default comparator provided by doctest - not perfect, but good for
most purposes
"""
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def check_output_numeric(self, want, got, optionflags):
"""Doctest does an exact string comparison of output, which means that
some numerically equivalent values aren't equal. This check normalizes
* long integers (22L) so that they equal normal integers. (22)
* Decimals so that they are comparable, regardless of the change
made to __repr__ in Python 2.6.
"""
return doctest.OutputChecker.check_output(self,
normalize_decimals(normalize_long_ints(want)),
normalize_decimals(normalize_long_ints(got)),
optionflags)
def check_output_xml(self, want, got, optionsflags):
try:
return compare_xml(want, got)
except Exception:
return False
def check_output_json(self, want, got, optionsflags):
"""
Tries to compare want and got as if they were JSON-encoded data
"""
want, got = strip_quotes(want, got)
try:
want_json = json.loads(want)
got_json = json.loads(got)
except Exception:
return False
return want_json == got_json
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
doctestOutputChecker = OutputChecker()
def get_tests(app_module):
parts = app_module.__name__.split('.')
prefix, last = parts[:-1], parts[-1]
try:
test_module = import_module('.'.join(prefix + [TEST_MODULE]))
except ImportError:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
# app_module either points to a models.py file, or models/__init__.py
# Tests are therefore either in same directory, or one level up
if last == 'models':
app_root = import_module('.'.join(prefix))
else:
app_root = app_module
if not module_has_submodule(app_root, TEST_MODULE):
test_module = None
else:
# The module exists, so there must be an import error in the test
# module itself.
raise
return test_module
def make_doctest(module):
return doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner,
)
def build_suite(app_module):
"""
Create a complete Django test suite for the provided application module.
"""
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(app_module, 'suite'):
suite.addTest(app_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
app_module))
try:
suite.addTest(make_doctest(app_module))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
test_module = get_tests(app_module)
if test_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(test_module, 'suite'):
suite.addTest(test_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
test_module))
try:
suite.addTest(make_doctest(test_module))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""
Construct a test case with the specified label. Label should be of the
form model.TestClass or model.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase "
"or app.TestCase.test_method" % label)
#
# First, look for TestCase instances with a name that matches
#
app_module = get_app(parts[0])
test_module = get_tests(app_module)
TestClass = getattr(app_module, parts[1], None)
# Couldn't find the test class in models.py; look in tests.py
if TestClass is None:
if test_module:
TestClass = getattr(test_module, parts[1], None)
try:
if issubclass(TestClass, (unittest.TestCase, real_unittest.TestCase)):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(
TestClass)
except TypeError:
raise ValueError(
"Test label '%s' does not refer to a test class"
% label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in app_module, test_module:
try:
doctests = make_doctest(module)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (
module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
class DjangoTestSuiteRunner(runner.DiscoverRunner):
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
else:
for app in get_apps():
suite.addTest(build_suite(app))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return runner.reorder_suite(suite, (unittest.TestCase,))
|
nickdex/cosmos
|
refs/heads/master
|
code/dynamic_programming/src/coin_change/coin_change.py
|
3
|
#################
## dynamic programming | coin change | Python
## Part of Cosmos by OpenGenus Foundation
#################
def coin_change(coins, amount):
# init the dp table
tab = [0 for i in range(amount + 1)]
tab[0] = 1 # base case
for j in range(len(coins)):
for i in range(1, amount + 1):
if coins[j] <= i:
# if coins[j] < i then add no. of ways -
# - to form the amount by using coins[j]
tab[i] += tab[i - coins[j]]
# final result at tab[amount]
return tab[amount]
def main():
coins = [1, 2, 3, 8] # coin denominations
amount = 3 # amount of money
print("No. of ways to change - {}".format(coin_change(coins, amount)))
return
if __name__ == "__main__":
main()
|
dr-prodigy/python-holidays
|
refs/heads/master
|
holidays/countries/georgia.py
|
1
|
# -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Author: ryanss <ryanssdev@icloud.com> (c) 2014-2017
# dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2021
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.easter import easter, EASTER_ORTHODOX
from dateutil.relativedelta import relativedelta as rd
from holidays.constants import JAN, MAR, APR, MAY, AUG, OCT, NOV
from holidays.holiday_base import HolidayBase
class Georgia(HolidayBase):
"""
https://en.wikipedia.org/wiki/Public_holidays_in_Georgia_(country)
"""
def __init__(self, **kwargs):
self.country = "GE"
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
# New Year's Day
name = "ახალი წელი"
self[date(year, JAN, 1)] = name
# New Year's Day
name = "ბედობა"
self[date(year, JAN, 2)] = name
# Christmas Day (Orthodox)
name = "ქრისტეშობა"
self[date(year, JAN, 7)] = name
# Baptism Day of our Lord Jesus Christ
name = "ნათლისღება"
self[date(year, JAN, 19)] = name
# Mother's Day
name = "დედის დღე"
self[date(year, MAR, 3)] = name
# Women's Day
name = "ქალთა საერთაშორისო დღე"
self[date(year, MAR, 8)] = name
# Orthodox Good Friday
name = "წითელი პარასკევი"
self[easter(year, method=EASTER_ORTHODOX) - rd(days=2)] = name
# Orthodox Holy Saturday
name = "დიდი შაბათი"
self[easter(year, method=EASTER_ORTHODOX) - rd(days=1)] = name
# Orthodox Easter Sunday
name = "აღდგომა"
self[easter(year, method=EASTER_ORTHODOX)] = name
# Orthodox Easter Monday
name = "შავი ორშაბათი"
self[easter(year, method=EASTER_ORTHODOX) + rd(days=1)] = name
# National Unity Day
name = "ეროვნული ერთიანობის დღე"
self[date(year, APR, 9)] = name
# Day of Victory
name = "ფაშიზმზე გამარჯვების დღე"
self[date(year, MAY, 9)] = name
# Saint Andrew the First-Called Day
name = "წმინდა ანდრია პირველწოდებულის დღე"
self[date(year, MAY, 12)] = name
# Independence Day
name = "დამოუკიდებლობის დღე"
self[date(year, MAY, 26)] = name
# Saint Mary's Day
name = "მარიამობა"
self[date(year, AUG, 28)] = name
# Day of Svetitskhoveli Cathedral
name = "სვეტიცხოვლობა"
self[date(year, OCT, 14)] = name
# Saint George's Day
name = "გიორგობა"
self[date(year, NOV, 23)] = name
class GE(Georgia):
pass
class GEO(Georgia):
pass
|
jonmash/ardupilot
|
refs/heads/master
|
mk/PX4/Tools/gencpp/src/gencpp/__init__.py
|
214
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import genmsg.msgs
try:
from cStringIO import StringIO #Python 2.x
except ImportError:
from io import StringIO #Python 3.x
MSG_TYPE_TO_CPP = {'byte': 'int8_t',
'char': 'uint8_t',
'bool': 'uint8_t',
'uint8': 'uint8_t',
'int8': 'int8_t',
'uint16': 'uint16_t',
'int16': 'int16_t',
'uint32': 'uint32_t',
'int32': 'int32_t',
'uint64': 'uint64_t',
'int64': 'int64_t',
'float32': 'float',
'float64': 'double',
'string': 'std::basic_string<char, std::char_traits<char>, typename ContainerAllocator::template rebind<char>::other > ',
'time': 'ros::Time',
'duration': 'ros::Duration'}
#used
def msg_type_to_cpp(type):
"""
Converts a message type (e.g. uint32, std_msgs/String, etc.) into the C++ declaration
for that type (e.g. uint32_t, std_msgs::String_<ContainerAllocator>)
@param type: The message type
@type type: str
@return: The C++ declaration
@rtype: str
"""
(base_type, is_array, array_len) = genmsg.msgs.parse_type(type)
cpp_type = None
if (genmsg.msgs.is_builtin(base_type)):
cpp_type = MSG_TYPE_TO_CPP[base_type]
elif (len(base_type.split('/')) == 1):
if (genmsg.msgs.is_header_type(base_type)):
cpp_type = ' ::std_msgs::Header_<ContainerAllocator> '
else:
cpp_type = '%s_<ContainerAllocator> '%(base_type)
else:
pkg = base_type.split('/')[0]
msg = base_type.split('/')[1]
cpp_type = ' ::%s::%s_<ContainerAllocator> '%(pkg, msg)
if (is_array):
if (array_len is None):
return 'std::vector<%s, typename ContainerAllocator::template rebind<%s>::other > '%(cpp_type, cpp_type)
else:
return 'boost::array<%s, %s> '%(cpp_type, array_len)
else:
return cpp_type
def _escape_string(s):
s = s.replace('\\', '\\\\')
s = s.replace('"', '\\"')
return s
def escape_message_definition(definition):
lines = definition.splitlines()
if not lines:
lines.append('')
s = StringIO()
for line in lines:
line = _escape_string(line)
s.write('%s\\n\\\n'%(line))
val = s.getvalue()
s.close()
return val
#used2
def cpp_message_declarations(name_prefix, msg):
"""
Returns the different possible C++ declarations for a message given the message itself.
@param name_prefix: The C++ prefix to be prepended to the name, e.g. "std_msgs::"
@type name_prefix: str
@param msg: The message type
@type msg: str
@return: A tuple of 3 different names. cpp_message_decelarations("std_msgs::", "String") returns the tuple
("std_msgs::String_", "std_msgs::String_<ContainerAllocator>", "std_msgs::String")
@rtype: str
"""
pkg, basetype = genmsg.names.package_resource_name(msg)
cpp_name = ' ::%s%s'%(name_prefix, msg)
if (pkg):
cpp_name = ' ::%s::%s'%(pkg, basetype)
return ('%s_'%(cpp_name), '%s_<ContainerAllocator> '%(cpp_name), '%s'%(cpp_name))
#todo
def is_fixed_length(spec, msg_context, includepath):
"""
Returns whether or not the message is fixed-length
@param spec: The message spec
@type spec: genmsg.msgs.MsgSpec
@param package: The package of the
@type package: str
"""
types = []
for field in spec.parsed_fields():
if (field.is_array and field.array_len is None):
return False
if (field.base_type == 'string'):
return False
if (not field.is_builtin):
types.append(field.base_type)
types = set(types)
for t in types:
t = genmsg.msgs.resolve_type(t, spec.package)
assert isinstance(includepath, dict)
new_spec = genmsg.msg_loader.load_msg_by_type(msg_context, t, includepath)
if (not is_fixed_length(new_spec, msg_context, includepath)):
return False
return True
#used2
def default_value(type):
"""
Returns the value to initialize a message member with. 0 for integer types, 0.0 for floating point, false for bool,
empty string for everything else
@param type: The type
@type type: str
"""
if type in ['byte', 'int8', 'int16', 'int32', 'int64',
'char', 'uint8', 'uint16', 'uint32', 'uint64']:
return '0'
elif type in ['float32', 'float64']:
return '0.0'
elif type == 'bool':
return 'false'
return ""
#used2
def takes_allocator(type):
"""
Returns whether or not a type can take an allocator in its constructor. False for all builtin types except string.
True for all others.
@param type: The type
@type: str
"""
return not type in ['byte', 'int8', 'int16', 'int32', 'int64',
'char', 'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64', 'bool', 'time', 'duration']
def escape_string(str):
str = str.replace('\\', '\\\\')
str = str.replace('"', '\\"')
return str
#used
def generate_fixed_length_assigns(spec, container_gets_allocator, cpp_name_prefix):
"""
Initialize any fixed-length arrays
@param s: The stream to write to
@type s: stream
@param spec: The message spec
@type spec: genmsg.msgs.MsgSpec
@param container_gets_allocator: Whether or not a container type (whether it's another message, a vector, array or string)
should have the allocator passed to its constructor. Assumes the allocator is named _alloc.
@type container_gets_allocator: bool
@param cpp_name_prefix: The C++ prefix to use when referring to the message, e.g. "std_msgs::"
@type cpp_name_prefix: str
"""
# Assign all fixed-length arrays their default values
for field in spec.parsed_fields():
if (not field.is_array or field.array_len is None):
continue
val = default_value(field.base_type)
if (container_gets_allocator and takes_allocator(field.base_type)):
# String is a special case, as it is the only builtin type that takes an allocator
if (field.base_type == "string"):
string_cpp = msg_type_to_cpp("string")
yield ' %s.assign(%s(_alloc));\n'%(field.name, string_cpp)
else:
(cpp_msg_unqualified, cpp_msg_with_alloc, _) = cpp_message_declarations(cpp_name_prefix, field.base_type)
yield ' %s.assign(%s(_alloc));\n'%(field.name, cpp_msg_with_alloc)
elif (len(val) > 0):
yield ' %s.assign(%s);\n'%(field.name, val)
#used
def generate_initializer_list(spec, container_gets_allocator):
"""
Writes the initializer list for a constructor
@param s: The stream to write to
@type s: stream
@param spec: The message spec
@type spec: genmsg.msgs.MsgSpec
@param container_gets_allocator: Whether or not a container type (whether it's another message, a vector, array or string)
should have the allocator passed to its constructor. Assumes the allocator is named _alloc.
@type container_gets_allocator: bool
"""
op = ':'
for field in spec.parsed_fields():
val = default_value(field.base_type)
use_alloc = takes_allocator(field.base_type)
if (field.is_array):
if (field.array_len is None and container_gets_allocator):
yield ' %s %s(_alloc)'%(op, field.name)
else:
yield ' %s %s()'%(op, field.name)
else:
if (container_gets_allocator and use_alloc):
yield ' %s %s(_alloc)'%(op, field.name)
else:
yield ' %s %s(%s)'%(op, field.name, val)
op = ','
|
timelapseplus/VIEW
|
refs/heads/master
|
node_modules/nodeimu/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
|
1812
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
# A dictionary mapping supported target types to extensions.
TARGET_TYPE_EXT = {
'executable': 'exe',
'loadable_module': 'dll',
'shared_library': 'dll',
'static_library': 'lib',
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t][deptype] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')
base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
msvs = config.setdefault('msvs_settings', {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
return (target_list, target_dicts)
|
c0defreak/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/turtledemo/__main__.py
|
55
|
#!/usr/bin/env python3
import sys
import os
from tkinter import *
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.textView import view_file # TextViewer
from imp import reload
import turtle
import time
demo_dir = os.path.dirname(os.path.abspath(__file__))
STARTUP = 1
READY = 2
RUNNING = 3
DONE = 4
EVENTDRIVEN = 5
menufont = ("Arial", 12, NORMAL)
btnfont = ("Arial", 12, 'bold')
txtfont = ('Lucida Console', 8, 'normal')
def getExampleEntries():
return [entry[:-3] for entry in os.listdir(demo_dir) if
entry.endswith(".py") and entry[0] != '_']
def showDemoHelp():
view_file(demo.root, "Help on turtleDemo",
os.path.join(demo_dir, "demohelp.txt"))
def showAboutDemo():
view_file(demo.root, "About turtleDemo",
os.path.join(demo_dir, "about_turtledemo.txt"))
def showAboutTurtle():
view_file(demo.root, "About the new turtle module.",
os.path.join(demo_dir, "about_turtle.txt"))
class DemoWindow(object):
def __init__(self, filename=None): #, root=None):
self.root = root = turtle._root = Tk()
root.wm_protocol("WM_DELETE_WINDOW", self._destroy)
#################
self.mBar = Frame(root, relief=RAISED, borderwidth=2)
self.mBar.pack(fill=X)
self.ExamplesBtn = self.makeLoadDemoMenu()
self.OptionsBtn = self.makeHelpMenu()
self.mBar.tk_menuBar(self.ExamplesBtn, self.OptionsBtn) #, QuitBtn)
root.title('Python turtle-graphics examples')
#################
self.left_frame = left_frame = Frame(root)
self.text_frame = text_frame = Frame(left_frame)
self.vbar = vbar =Scrollbar(text_frame, name='vbar')
self.text = text = Text(text_frame,
name='text', padx=5, wrap='none',
width=45)
vbar['command'] = text.yview
vbar.pack(side=LEFT, fill=Y)
#####################
self.hbar = hbar =Scrollbar(text_frame, name='hbar', orient=HORIZONTAL)
hbar['command'] = text.xview
hbar.pack(side=BOTTOM, fill=X)
#####################
text['yscrollcommand'] = vbar.set
text.config(font=txtfont)
text.config(xscrollcommand=hbar.set)
text.pack(side=LEFT, fill=Y, expand=1)
#####################
self.output_lbl = Label(left_frame, height= 1,text=" --- ", bg = "#ddf",
font = ("Arial", 16, 'normal'))
self.output_lbl.pack(side=BOTTOM, expand=0, fill=X)
#####################
text_frame.pack(side=LEFT, fill=BOTH, expand=0)
left_frame.pack(side=LEFT, fill=BOTH, expand=0)
self.graph_frame = g_frame = Frame(root)
turtle._Screen._root = g_frame
turtle._Screen._canvas = turtle.ScrolledCanvas(g_frame, 800, 600, 1000, 800)
#xturtle.Screen._canvas.pack(expand=1, fill="both")
self.screen = _s_ = turtle.Screen()
#####
turtle.TurtleScreen.__init__(_s_, _s_._canvas)
#####
self.scanvas = _s_._canvas
#xturtle.RawTurtle.canvases = [self.scanvas]
turtle.RawTurtle.screens = [_s_]
self.scanvas.pack(side=TOP, fill=BOTH, expand=1)
self.btn_frame = btn_frame = Frame(g_frame, height=100)
self.start_btn = Button(btn_frame, text=" START ", font=btnfont, fg = "white",
disabledforeground = "#fed", command=self.startDemo)
self.start_btn.pack(side=LEFT, fill=X, expand=1)
self.stop_btn = Button(btn_frame, text=" STOP ", font=btnfont, fg = "white",
disabledforeground = "#fed", command = self.stopIt)
self.stop_btn.pack(side=LEFT, fill=X, expand=1)
self.clear_btn = Button(btn_frame, text=" CLEAR ", font=btnfont, fg = "white",
disabledforeground = "#fed", command = self.clearCanvas)
self.clear_btn.pack(side=LEFT, fill=X, expand=1)
self.btn_frame.pack(side=TOP, fill=BOTH, expand=0)
self.graph_frame.pack(side=TOP, fill=BOTH, expand=1)
Percolator(text).insertfilter(ColorDelegator())
self.dirty = False
self.exitflag = False
if filename:
self.loadfile(filename)
self.configGUI(NORMAL, DISABLED, DISABLED, DISABLED,
"Choose example from menu", "black")
self.state = STARTUP
def _destroy(self):
self.root.destroy()
sys.exit()
def configGUI(self, menu, start, stop, clear, txt="", color="blue"):
self.ExamplesBtn.config(state=menu)
self.start_btn.config(state=start)
if start == NORMAL:
self.start_btn.config(bg="#d00")
else:
self.start_btn.config(bg="#fca")
self.stop_btn.config(state=stop)
if stop == NORMAL:
self.stop_btn.config(bg="#d00")
else:
self.stop_btn.config(bg="#fca")
self.clear_btn.config(state=clear)
self.clear_btn.config(state=clear)
if clear == NORMAL:
self.clear_btn.config(bg="#d00")
else:
self.clear_btn.config(bg="#fca")
self.output_lbl.config(text=txt, fg=color)
def makeLoadDemoMenu(self):
CmdBtn = Menubutton(self.mBar, text='Examples', underline=0, font=menufont)
CmdBtn.pack(side=LEFT, padx="2m")
CmdBtn.menu = Menu(CmdBtn)
for entry in getExampleEntries():
def loadexample(x):
def emit():
self.loadfile(x)
return emit
CmdBtn.menu.add_command(label=entry, underline=0,
font=menufont, command=loadexample(entry))
CmdBtn['menu'] = CmdBtn.menu
return CmdBtn
def makeHelpMenu(self):
CmdBtn = Menubutton(self.mBar, text='Help', underline=0, font=menufont)
CmdBtn.pack(side=LEFT, padx='2m')
CmdBtn.menu = Menu(CmdBtn)
CmdBtn.menu.add_command(label='About turtle.py', font=menufont,
command=showAboutTurtle)
CmdBtn.menu.add_command(label='turtleDemo - Help', font=menufont,
command=showDemoHelp)
CmdBtn.menu.add_command(label='About turtleDemo', font=menufont,
command=showAboutDemo)
CmdBtn['menu'] = CmdBtn.menu
return CmdBtn
def refreshCanvas(self):
if not self.dirty: return
self.screen.clear()
#self.screen.mode("standard")
self.dirty=False
def loadfile(self, filename):
self.refreshCanvas()
modname = 'turtledemo.' + filename
__import__(modname)
self.module = sys.modules[modname]
with open(self.module.__file__, 'r') as f:
chars = f.read()
self.text.delete("1.0", "end")
self.text.insert("1.0", chars)
self.root.title(filename + " - a Python turtle graphics example")
reload(self.module)
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED,
"Press start button", "red")
self.state = READY
def startDemo(self):
self.refreshCanvas()
self.dirty = True
turtle.TurtleScreen._RUNNING = True
self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED,
"demo running...", "black")
self.screen.clear()
self.screen.mode("standard")
self.state = RUNNING
try:
result = self.module.main()
if result == "EVENTLOOP":
self.state = EVENTDRIVEN
else:
self.state = DONE
except turtle.Terminator:
self.state = DONE
result = "stopped!"
if self.state == DONE:
self.configGUI(NORMAL, NORMAL, DISABLED, NORMAL,
result)
elif self.state == EVENTDRIVEN:
self.exitflag = True
self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED,
"use mouse/keys or STOP", "red")
def clearCanvas(self):
self.refreshCanvas()
self.screen._delete("all")
self.scanvas.config(cursor="")
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED)
def stopIt(self):
if self.exitflag:
self.clearCanvas()
self.exitflag = False
self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED,
"STOPPED!", "red")
turtle.TurtleScreen._RUNNING = False
#print "stopIT: exitflag = True"
else:
turtle.TurtleScreen._RUNNING = False
#print "stopIt: exitflag = False"
if __name__ == '__main__':
demo = DemoWindow()
RUN = True
while RUN:
try:
#print("ENTERING mainloop")
demo.root.mainloop()
except AttributeError:
#print("AttributeError!- WAIT A MOMENT!")
time.sleep(0.3)
print("GOING ON ..")
demo.ckearCanvas()
except TypeError:
demo.screen._delete("all")
#print("CRASH!!!- WAIT A MOMENT!")
time.sleep(0.3)
#print("GOING ON ..")
demo.clearCanvas()
except:
print("BYE!")
RUN = False
|
Chilledheart/chromium
|
refs/heads/master
|
tools/telemetry/third_party/gsutilz/third_party/boto/tests/integration/gs/util.py
|
101
|
# Copyright (c) 2012, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import time
from boto.provider import Provider
_HAS_GOOGLE_CREDENTIALS = None
def has_google_credentials():
global _HAS_GOOGLE_CREDENTIALS
if _HAS_GOOGLE_CREDENTIALS is None:
provider = Provider('google')
if (provider.get_access_key() is None or
provider.get_secret_key() is None):
_HAS_GOOGLE_CREDENTIALS = False
else:
_HAS_GOOGLE_CREDENTIALS = True
return _HAS_GOOGLE_CREDENTIALS
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
"""Retry calling the decorated function using an exponential backoff.
Taken from:
https://github.com/saltycrane/retry-decorator
Licensed under BSD:
https://github.com/saltycrane/retry-decorator/blob/master/LICENSE
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
"""
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
try_one_last_time = True
while mtries > 1:
try:
return f(*args, **kwargs)
try_one_last_time = False
break
except ExceptionToCheck, e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
if try_one_last_time:
return f(*args, **kwargs)
return
return f_retry # true decorator
return deco_retry
|
Boggart/-tg-station
|
refs/heads/master
|
bot/C_srtd.py
|
36
|
import random
def srtd(data,debug,sender):
try:
arg1,arg2 = data.split("d")
except ValueError, err:
if str(err) == "need more than 1 value to unpack":
return("Too small amount of arguments")
else:
return("Too many arguments")
else:
if debug:
print sender+":!rtd "+arg1+"d"+arg2
die = []
arg1 = arg1.replace(" ","")
arg2 = arg2.replace(" ","")
try:
i_arg1 = int(arg1)
i_arg2 = int(arg2)
if abs(i_arg1) == 0 or abs(i_arg2) == 0:
raise RuntimeError
except ValueError:
return("You lied! That's not a number!")
except RuntimeError:
return("Too many zeroes!")
else:
if abs(i_arg1) > 500:
return("Too many rolls, I can only do five hundred at max.")
else:
for i in xrange(0,abs(i_arg1)):
if i_arg2 < 0:
dice = random.randint(i_arg2,0)
else:
dice = random.randint(1,i_arg2)
die.append(dice)
return(str(reduce(lambda x,y: x+y, die)))
|
galaxy001/libtorrent
|
refs/heads/master
|
python_BTL_BitTorrent-5.3-GPL/BTL/greenlet_coro.py
|
5
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# greenlet-based coroutine convenience functions.
#
# author: David Harrison and Greg Hazel
from BTL.reactor_magic import reactor
from BTL import defer
from BTL import coro
from BTL.greenlet_yielddefer import like_yield, coroutine, GreenletWithDeferred
import greenlet
from twisted.python.failure import Failure
# Analogous to time.sleep, but it returns a deferred whose callback
# is called after 'secs' seconds.
wait = coro.wait
def init_yield(clss, *args, **kwargs):
"""Instantiate an object of type clss and then call its asynchronous initializer
(__dfinit__). The __dfinit__ returns a Deferred. When the deferred's callback
is called execution resumes at init_yield and the fully initialized object is
returned."""
kwargs['__magic_init_yield'] = True
obj = clss(*args, **kwargs) # synchronous initialization.
like_yield(obj.__dfinit__()) # asynchronous initialization.
# How it works: async_init returns a deferred. like_yield
# installs callbacks in the deferred to the greenlet's switch.
# When the deferred completes, it calls the greenlet's switch
# causing execution to resume here.
return obj
default_yield_timeout = 10 # in seconds.
def timeout_yield(orig_df, timeout = None ):
"""like_yield with a timeout. Pased timeout is
in seconds. If timeout is None then uses
the default default_yield_timeout. If the function f
eventually completes (i.e., its deferred gets called) after
having already timed out then the result is tossed.
timeout is set to None rather than default_yield_timeout so that
the default can be changed after import timeout_yield
by changing default_yield_timeout.
WARNING: It is left to the caller to free up any state that might
be held by the hung deferred.
"""
assert isinstance(orig_df, defer.Deferred)
df = defer.Deferred()
if timeout is None:
timeout = default_yield_timeout
t = reactor.callLater(timeout, defer.timeout, df)
def good(r):
if t.active():
df.callback(r)
def bad(r):
if t.active():
df.errback(r)
orig_df.addCallbacks(good, bad)
try:
r = like_yield(df)
finally:
if t.active():
t.cancel()
return r
# Use this as a decorator to __init__ on any class in order to require the
# class be initialized using init_yield. This guarantees that the
# asynchronous initializer __dfinit__ gets called. Ex:
#
# class Foo(object):
# @use_init_yield
# def __init__( self, a, b, c):
# ...
# def __dfinit__( self ):
# ...
#
# Now to instantiate an object of type Foo, we use init_yield:
#
# foo = init_yield(Foo,a,b,c)
# If we try to instantiate Foo directly, we get an exception:
#
# foo = Foo(a,b,c) # causes an AssertionException.
use_init_yield = coro.use_init_yield
|
neurotechuoft/MindType
|
refs/heads/master
|
Code/V1/src/deprecated/pyqtgraph/graphicsItems/CurvePoint.py
|
21
|
from ..Qt import QtGui, QtCore
from . import ArrowItem
import numpy as np
from ..Point import Point
import weakref
from .GraphicsObject import GraphicsObject
__all__ = ['CurvePoint', 'CurveArrow']
class CurvePoint(GraphicsObject):
"""A GraphicsItem that sets its location to a point on a PlotCurveItem.
Also rotates to be tangent to the curve.
The position along the curve is a Qt property, and thus can be easily animated.
Note: This class does not display anything; see CurveArrow for an applied example
"""
def __init__(self, curve, index=0, pos=None, rotate=True):
"""Position can be set either as an index referring to the sample number or
the position 0.0 - 1.0
If *rotate* is True, then the item rotates to match the tangent of the curve.
"""
GraphicsObject.__init__(self)
#QObjectWorkaround.__init__(self)
self._rotate = rotate
self.curve = weakref.ref(curve)
self.setParentItem(curve)
self.setProperty('position', 0.0)
self.setProperty('index', 0)
if hasattr(self, 'ItemHasNoContents'):
self.setFlags(self.flags() | self.ItemHasNoContents)
if pos is not None:
self.setPos(pos)
else:
self.setIndex(index)
def setPos(self, pos):
self.setProperty('position', float(pos))## cannot use numpy types here, MUST be python float.
def setIndex(self, index):
self.setProperty('index', int(index)) ## cannot use numpy types here, MUST be python int.
def event(self, ev):
if not isinstance(ev, QtCore.QDynamicPropertyChangeEvent) or self.curve() is None:
return False
if ev.propertyName() == 'index':
index = self.property('index')
if 'QVariant' in repr(index):
index = index.toInt()[0]
elif ev.propertyName() == 'position':
index = None
else:
return False
(x, y) = self.curve().getData()
if index is None:
#print ev.propertyName(), self.property('position').toDouble()[0], self.property('position').typeName()
pos = self.property('position')
if 'QVariant' in repr(pos): ## need to support 2 APIs :(
pos = pos.toDouble()[0]
index = (len(x)-1) * np.clip(pos, 0.0, 1.0)
if index != int(index): ## interpolate floating-point values
i1 = int(index)
i2 = np.clip(i1+1, 0, len(x)-1)
s2 = index-i1
s1 = 1.0-s2
newPos = (x[i1]*s1+x[i2]*s2, y[i1]*s1+y[i2]*s2)
else:
index = int(index)
i1 = np.clip(index-1, 0, len(x)-1)
i2 = np.clip(index+1, 0, len(x)-1)
newPos = (x[index], y[index])
p1 = self.parentItem().mapToScene(QtCore.QPointF(x[i1], y[i1]))
p2 = self.parentItem().mapToScene(QtCore.QPointF(x[i2], y[i2]))
ang = np.arctan2(p2.y()-p1.y(), p2.x()-p1.x()) ## returns radians
self.resetTransform()
if self._rotate:
self.rotate(180+ ang * 180 / np.pi) ## takes degrees
QtGui.QGraphicsItem.setPos(self, *newPos)
return True
def boundingRect(self):
return QtCore.QRectF()
def paint(self, *args):
pass
def makeAnimation(self, prop='position', start=0.0, end=1.0, duration=10000, loop=1):
# In Python 3, a bytes object needs to be used as a property name in
# QPropertyAnimation. PyQt stopped automatically encoding a str when a
# QByteArray was expected in v5.5 (see qbytearray.sip).
if not isinstance(prop, bytes):
prop = prop.encode('latin-1')
anim = QtCore.QPropertyAnimation(self, prop)
anim.setDuration(duration)
anim.setStartValue(start)
anim.setEndValue(end)
anim.setLoopCount(loop)
return anim
class CurveArrow(CurvePoint):
"""Provides an arrow that points to any specific sample on a PlotCurveItem.
Provides properties that can be animated."""
def __init__(self, curve, index=0, pos=None, **opts):
CurvePoint.__init__(self, curve, index=index, pos=pos)
if opts.get('pxMode', True):
opts['pxMode'] = False
self.setFlags(self.flags() | self.ItemIgnoresTransformations)
opts['angle'] = 0
self.arrow = ArrowItem.ArrowItem(**opts)
self.arrow.setParentItem(self)
def setStyle(self, **opts):
return self.arrow.setStyle(**opts)
|
Azure/azure-sdk-for-python
|
refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_12_01/operations/_network_watchers_operations.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkWatchersOperations(object):
"""NetworkWatchersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def create_or_update(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkWatcher"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher resource.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkWatcher')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network watcher resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkWatcher"
"""Updates a network watcher tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters supplied to update network watcher tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkWatcherListResult"]
"""Gets all network watchers by subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkWatcherListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkWatchers'} # type: ignore
def get_topology(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TopologyParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.Topology"
"""Gets the current network topology by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the representation of topology.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TopologyParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Topology, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.Topology
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Topology"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_topology.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TopologyParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Topology', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_topology.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/topology'} # type: ignore
def _verify_ip_flow_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.VerificationIPFlowResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._verify_ip_flow_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VerificationIPFlowParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_verify_ip_flow_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def begin_verify_ip_flow(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.VerificationIPFlowParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VerificationIPFlowResult"]
"""Verify IP flow from the specified VM to a location given the currently configured NSG rules.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the IP flow to be verified.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.VerificationIPFlowParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VerificationIPFlowResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.VerificationIPFlowResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._verify_ip_flow_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_verify_ip_flow.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify'} # type: ignore
def _get_next_hop_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NextHopResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_next_hop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NextHopParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_next_hop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def begin_get_next_hop(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NextHopParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NextHopResult"]
"""Gets the next hop from the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the source and destination endpoint.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NextHopParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NextHopResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.NextHopResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_next_hop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_next_hop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop'} # type: ignore
def _get_vm_security_rules_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityGroupViewResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_vm_security_rules_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecurityGroupViewParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vm_security_rules_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def begin_get_vm_security_rules(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.SecurityGroupViewParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.SecurityGroupViewResult"]
"""Gets the configured and effective security group rules on the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the VM to check security groups for.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.SecurityGroupViewParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either SecurityGroupViewResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.SecurityGroupViewResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_vm_security_rules_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vm_security_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView'} # type: ignore
def _get_troubleshooting_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def begin_get_troubleshooting(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.TroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Initiate troubleshooting on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to troubleshoot.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot'} # type: ignore
def _get_troubleshooting_result_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.TroubleshootingResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_troubleshooting_result_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'QueryTroubleshootingParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_result_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def begin_get_troubleshooting_result(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.QueryTroubleshootingParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.TroubleshootingResult"]
"""Get the last completed troubleshooting result on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to query the troubleshooting result.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.QueryTroubleshootingParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either TroubleshootingResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.TroubleshootingResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_troubleshooting_result_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting_result.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult'} # type: ignore
def _set_flow_log_configuration_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._set_flow_log_configuration_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogInformation')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_flow_log_configuration_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def begin_set_flow_log_configuration(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogInformation"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Configures flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the configuration of flow log.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.FlowLogInformation
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._set_flow_log_configuration_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_flow_log_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog'} # type: ignore
def _get_flow_log_status_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.FlowLogInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_flow_log_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'FlowLogStatusParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_flow_log_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def begin_get_flow_log_status(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.FlowLogStatusParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.FlowLogInformation"]
"""Queries status of flow log and traffic analytics (optional) on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define a resource to query flow log and traffic analytics
(optional) status.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.FlowLogStatusParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either FlowLogInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.FlowLogInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_flow_log_status_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_flow_log_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus'} # type: ignore
def _check_connectivity_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectivityInformation"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._check_connectivity_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectivityParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_check_connectivity_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def begin_check_connectivity(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.ConnectivityParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ConnectivityInformation"]
"""Verifies the possibility of establishing a direct TCP connection from a virtual machine to a
given endpoint including another VM or an arbitrary remote server.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine how the connectivity check will be performed.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.ConnectivityParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectivityInformation or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.ConnectivityInformation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectivityInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._check_connectivity_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectivityInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_check_connectivity.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectivityCheck'} # type: ignore
def _get_azure_reachability_report_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureReachabilityReport"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_azure_reachability_report_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureReachabilityReportParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_azure_reachability_report_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def begin_get_azure_reachability_report(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AzureReachabilityReportParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureReachabilityReport"]
"""NOTE: This feature is currently in preview and still being tested for stability. Gets the
relative latency score for internet service providers from a specified location to Azure
regions.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that determine Azure reachability report configuration.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.AzureReachabilityReportParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureReachabilityReport or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.AzureReachabilityReport]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureReachabilityReport"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_azure_reachability_report_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureReachabilityReport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_azure_reachability_report.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/azureReachabilityReport'} # type: ignore
def _list_available_providers_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.AvailableProvidersList"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._list_available_providers_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AvailableProvidersListParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_available_providers_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def begin_list_available_providers(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.AvailableProvidersListParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AvailableProvidersList"]
"""NOTE: This feature is currently in preview and still being tested for stability. Lists all
available internet service providers for a specified Azure region.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that scope the list of available providers.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.AvailableProvidersListParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AvailableProvidersList or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.AvailableProvidersList]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableProvidersList"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_available_providers_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AvailableProvidersList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_available_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/availableProvidersList'} # type: ignore
def _get_network_configuration_diagnostic_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkConfigurationDiagnosticResponse"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_network_configuration_diagnostic_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkConfigurationDiagnosticParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_network_configuration_diagnostic_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
def begin_get_network_configuration_diagnostic(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
parameters, # type: "_models.NetworkConfigurationDiagnosticParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NetworkConfigurationDiagnosticResponse"]
"""Gets Network Configuration Diagnostic data to help customers understand and debug network
behavior. It provides detailed information on what security rules were applied to a specified
traffic flow and the result of evaluating these rules. Customers must provide details of a flow
like source, destination, protocol, etc. The API returns whether traffic was allowed or denied,
the rules evaluated for the specified flow and the evaluation results.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters to get network configuration diagnostic.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.NetworkConfigurationDiagnosticParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkConfigurationDiagnosticResponse or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.NetworkConfigurationDiagnosticResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkConfigurationDiagnosticResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_network_configuration_diagnostic_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkConfigurationDiagnosticResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_network_configuration_diagnostic.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/networkConfigurationDiagnostic'} # type: ignore
|
naojsoft/qplan
|
refs/heads/master
|
qplan/q2ope.py
|
1
|
#!/usr/bin/env python
#
# q2ope.py -- Queue integration with legacy OPE
#
# Eric Jeschke (eric@naoj.org)
#
"""
Usage:
q2ope.py
"""
from __future__ import print_function
# stdlib imports
import sys, os
from io import BytesIO, StringIO
# 3rd party imports
from ginga.misc import log
# Local imports
from . import entity
from .Model import QueueModel
version = '20150105.0'
class QueueLoader(object):
def __init__(self, model, logger, options):
self.model = model
self.logger = logger
self.input_dir = options.input_dir.strip()
self.schedule_qf = None
self.programs_qf = None
self.ob_qf_dict = None
self.oblist_info = []
def initialize_model(self):
try:
# read schedule
schedule_file = os.path.join(self.input_dir, "schedule.csv")
if not os.path.exists(schedule_file):
self.logger.error("File not readable: %s" % (schedule_file))
return
self.logger.info("reading schedule from %s" % (schedule_file))
self.schedule_qf = entity.ScheduleFile(schedule_file, self.logger)
self.model.set_schedule_qf(self.schedule_qf)
# read proposals
proposal_file = os.path.join(self.input_dir, "programs.csv")
if not os.path.exists(proposal_file):
self.logger.error("File not readable: %s" % (proposal_file))
return
self.logger.info("reading proposals from %s" % (proposal_file))
self.programs_qf = entity.ProgramsFile(proposal_file, self.logger)
self.model.set_programs_qf(self.programs_qf)
# read observing blocks
self.ob_qf_dict = {}
self.oblist_info = []
propnames = list(self.programs_qf.programs_info.keys())
propnames.sort()
for propname in propnames:
obfile = os.path.join(self.input_dir, propname+".csv")
if not os.path.exists(obfile):
self.logger.error("File not readable: %s" % (obfile))
continue
self.logger.info("loading observing blocks from file %s" % obfile)
self.ob_qf_dict[propname] = entity.OBListFile(obfile, self.logger,
propname,
self.programs_qf.programs_info)
self.model.set_ob_qf_dict(self.ob_qf_dict)
except Exception as e:
self.logger.error("Error initializing: %s" % (str(e)))
def update_model(self):
try:
self.model.set_schedule_info(self.schedule_qf.schedule_info)
self.model.set_programs_info(self.programs_qf.programs_info)
# TODO: this maybe should be done in the Model
self.oblist_info = []
propnames = list(self.programs_qf.programs_info.keys())
propnames.sort()
#for propname in self.programs_qf.programs_info:
for propname in propnames:
self.oblist_info.extend(self.ob_qf_dict[propname].obs_info)
# TODO: only needed if we ADD or REMOVE programs
self.model.set_oblist_info(self.oblist_info)
except Exception as e:
self.logger.error("Error storing into model: %s" % (str(e)))
self.logger.info("model initialized")
class BaseConverter(object):
def __init__(self, logger):
self.logger = logger
def _mk_out(self, out_f):
def out(*args):
print(*args, file=out_f)
return out
def ra_to_funky(self, ra):
return float(ra.replace(':', ''))
def dec_to_funky(self, dec):
return float(dec.replace(':', ''))
def main(options, args):
# Create top level logger.
logger = log.make_logger('ob2ope', options=options)
# create queue model, loader and OPE converter
model = QueueModel(logger=logger)
loader = QueueLoader(model, logger, options)
converter = SPCAM.Converter(logger)
# load the data
loader.initialize_model()
loader.update_model()
# buffer for OPE output
out_f = StringIO()
# write preamble
converter.write_ope_header(out_f)
# convert each OB
oblist = loader.oblist_info
for ob in oblist:
converter.ob_to_ope(ob, out_f)
# here's the OPE file
print(out_f.getvalue())
if __name__ == "__main__":
# Parse command line options with nifty new optparse module
from optparse import OptionParser
usage = "usage: %prog [options] cmd [args]"
optprs = OptionParser(usage=usage, version=('%%prog %s' % version))
optprs.add_option("--debug", dest="debug", default=False, action="store_true",
help="Enter the pdb debugger on main()")
optprs.add_option("-i", "--input", dest="input_dir", default="input",
metavar="DIRECTORY",
help="Read input files from DIRECTORY")
optprs.add_option("-o", "--output", dest="output_dir", default="output",
metavar="DIRECTORY",
help="Write output files to DIRECTORY")
optprs.add_option("--profile", dest="profile", action="store_true",
default=False,
help="Run the profiler on main()")
log.addlogopts(optprs)
(options, args) = optprs.parse_args(sys.argv[1:])
# Are we debugging this?
if options.debug:
import pdb
pdb.run('main(options, args)')
# Are we profiling this?
elif options.profile:
import profile
print("%s profile:" % sys.argv[0])
profile.run('main(options, args)')
else:
main(options, args)
# END
|
wrouesnel/ansible
|
refs/heads/devel
|
test/units/modules/network/vyos/test_vyos_user.py
|
57
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.vyos import vyos_user
from units.modules.utils import set_module_args
from .vyos_module import TestVyosModule, load_fixture
class TestVyosUserModule(TestVyosModule):
module = vyos_user
def setUp(self):
super(TestVyosUserModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.vyos.vyos_user.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.vyos.vyos_user.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestVyosUserModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
self.get_config.return_value = load_fixture('vyos_user_config.cfg')
self.load_config.return_value = dict(diff=None, session='session')
def test_vyos_user_password(self):
set_module_args(dict(name='ansible', configured_password='test'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['set system login user ansible authentication plaintext-password test'])
def test_vyos_user_delete(self):
set_module_args(dict(name='ansible', state='absent'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['delete system login user ansible'])
def test_vyos_user_level(self):
set_module_args(dict(name='ansible', level='operator'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['set system login user ansible level operator'])
def test_vyos_user_level_invalid(self):
set_module_args(dict(name='ansible', level='sysadmin'))
self.execute_module(failed=True)
def test_vyos_user_purge(self):
set_module_args(dict(purge=True))
result = self.execute_module(changed=True)
self.assertEqual(sorted(result['commands']), sorted(['delete system login user ansible',
'delete system login user admin']))
def test_vyos_user_update_password_changed(self):
set_module_args(dict(name='test', configured_password='test', update_password='on_create'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['set system login user test authentication plaintext-password test'])
def test_vyos_user_update_password_on_create_ok(self):
set_module_args(dict(name='ansible', configured_password='test', update_password='on_create'))
self.execute_module()
def test_vyos_user_update_password_always(self):
set_module_args(dict(name='ansible', configured_password='test', update_password='always'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['set system login user ansible authentication plaintext-password test'])
|
SanketDG/contributr
|
refs/heads/master
|
contributr/contributr/wsgi.py
|
3
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Using whitenoise to serve static files in production
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
rezoo/chainer
|
refs/heads/master
|
chainer/functions/math/fix.py
|
4
|
import chainer
from chainer.backends import cuda
from chainer import utils
def fix(x):
"""Elementwise fix function.
.. math::
y_i = \\lfix x_i \\rfix
Args:
x (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
if isinstance(x, chainer.variable.Variable):
x = x.data
xp = cuda.get_array_module(x)
return chainer.as_variable(utils.force_array(xp.fix(x), x.dtype))
|
gogobebe2/thefuck
|
refs/heads/master
|
tests/conftest.py
|
14
|
import pytest
@pytest.fixture
def no_memoize(monkeypatch):
monkeypatch.setattr('thefuck.utils.memoize.disabled', True)
|
daltonsena/eyed3
|
refs/heads/master
|
src/test/id3/test_tag.py
|
1
|
# -*- coding: utf-8 -*-
################################################################################
# Copyright (C) 2011-2012 Travis Shirk <travis@pobox.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
import sys
import os
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
from nose.tools import *
import eyed3
from eyed3.core import Date
from eyed3.id3 import Tag, ID3_DEFAULT_VERSION, ID3_V2_3, ID3_V2_4
from eyed3.id3 import frames
from eyed3.compat import unicode
from ..compat import *
from .. import ExternalDataTestCase, DATA_D
def testTagImport():
import eyed3.id3, eyed3.id3.tag
assert_equal(eyed3.id3.Tag, eyed3.id3.tag.Tag)
def testTagConstructor():
t = Tag()
assert_is_none(t.file_info)
assert_is_not_none(t.header)
assert_is_not_none(t.extended_header)
assert_is_not_none(t.frame_set)
assert_equal(len(t.frame_set), 0)
def testFileInfoConstructor():
from eyed3.id3.tag import FileInfo
# Both bytes and unicode input file names must be accepted and the former
# must be converted to unicode.
for name in [__file__, unicode(__file__)]:
fi = FileInfo(name)
assert_is(type(fi.name), unicode)
assert_equal(name, unicode(name))
assert_equal(fi.tag_size, 0)
# FIXME Passing invalid unicode
def testTagMainProps():
tag = Tag()
# No version yet
assert_equal(tag.version, ID3_DEFAULT_VERSION)
assert_false(tag.isV1())
assert_true(tag.isV2())
assert_equal(tag.artist, None)
tag.artist = u"Autolux"
assert_equal(tag.artist, u"Autolux")
assert_equal(len(tag.frame_set), 1)
tag.artist = u""
assert_equal(len(tag.frame_set), 0)
tag.artist = u"Autolux"
assert_equal(tag.album, None)
tag.album = u"Future Perfect"
assert_equal(tag.album, u"Future Perfect")
assert_equal(tag.album_artist, None)
tag.album_artist = u"Various Artists"
assert_equal(tag.album_artist, u"Various Artists")
assert_equal(tag.title, None)
tag.title = u"Robots in the Garden"
assert_equal(tag.title, u"Robots in the Garden")
assert_equal(tag.track_num, (None, None))
tag.track_num = 7
assert_equal(tag.track_num, (7, None))
tag.track_num = (7, None)
assert_equal(tag.track_num, (7, None))
tag.track_num = (7, 15)
assert_equal(tag.frame_set[frames.TRACKNUM_FID][0].text, "07/15")
assert_equal(tag.track_num, (7, 15))
tag.track_num = (7, 150)
assert_equal(tag.frame_set[frames.TRACKNUM_FID][0].text, "007/150")
assert_equal(tag.track_num, (7, 150))
tag.track_num = (1, 7)
assert_equal(tag.frame_set[frames.TRACKNUM_FID][0].text, "01/07")
assert_equal(tag.track_num, (1, 7))
tag.track_num = None
assert_equal(tag.track_num, (None, None))
tag.track_num = None, None
def testTagDates():
tag = Tag()
tag.release_date = 2004
assert_equal(tag.release_date, Date(2004))
tag.release_date = None
assert_equal(tag.release_date, None)
tag = Tag()
for date in [Date(2002), Date(2002, 11, 26), Date(2002, 11, 26),
Date(2002, 11, 26, 4), Date(2002, 11, 26, 4, 20),
Date(2002, 11, 26, 4, 20), Date(2002, 11, 26, 4, 20, 10)]:
tag.encoding_date = date
assert_equal(tag.encoding_date, date)
tag.encoding_date = str(date)
assert_equal(tag.encoding_date, date)
tag.release_date = date
assert_equal(tag.release_date, date)
tag.release_date = str(date)
assert_equal(tag.release_date, date)
tag.original_release_date = date
assert_equal(tag.original_release_date, date)
tag.original_release_date = str(date)
assert_equal(tag.original_release_date, date)
tag.recording_date = date
assert_equal(tag.recording_date, date)
tag.recording_date = str(date)
assert_equal(tag.recording_date, date)
tag.tagging_date = date
assert_equal(tag.tagging_date, date)
tag.tagging_date = str(date)
assert_equal(tag.tagging_date, date)
try:
tag._setDate(2.4)
except TypeError:
pass # expected
else:
assert_false("Invalid date type, expected TypeError")
def testTagComments():
tag = Tag()
for c in tag.comments:
assert_false("Expected not to be here")
# Adds
assert_raises(TypeError, tag.comments.set, "bold")
assert_raises(TypeError, tag.comments.set, u"bold", "search")
tag.comments.set(u"Always Try", u"")
assert_equal(len(tag.comments), 1)
c = tag.comments[0]
assert_equal(c.description, u"")
assert_equal(c.text, u"Always Try")
assert_equal(c.lang, "eng")
tag.comments.set(u"Speak Out", u"Bold")
assert_equal(len(tag.comments), 2)
c = tag.comments[1]
assert_equal(c.description, u"Bold")
assert_equal(c.text, u"Speak Out")
assert_equal(c.lang, "eng")
tag.comments.set(u"K Town Mosh Crew", u"Crippled Youth", "sxe")
assert_equal(len(tag.comments), 3)
c = tag.comments[2]
assert_equal(c.description, u"Crippled Youth")
assert_equal(c.text, u"K Town Mosh Crew")
assert_equal(c.lang, "sxe")
# Lang is different, new frame
tag.comments.set(u"K Town Mosh Crew", u"Crippled Youth", "eng")
assert_equal(len(tag.comments), 4)
c = tag.comments[3]
assert_equal(c.description, u"Crippled Youth")
assert_equal(c.text, u"K Town Mosh Crew")
assert_equal(c.lang, "eng")
# Gets
assert_is_none(tag.comments.get(u"", "fre"))
assert_is_none(tag.comments.get(u"Crippled Youth", "esp"))
c = tag.comments.get(u"")
assert_true(c)
assert_equal(c.description, u"")
assert_equal(c.text, u"Always Try")
assert_equal(c.lang, "eng")
assert_is_not_none(tag.comments.get(u"Bold"))
assert_is_not_none(tag.comments.get(u"Bold", "eng"))
assert_is_not_none(tag.comments.get(u"Crippled Youth", "eng"))
assert_is_not_none(tag.comments.get(u"Crippled Youth", "sxe"))
assert_equal(len(tag.comments), 4)
# Iterate
count = 0
for c in tag.comments:
count += 1
assert_equal(count, 4)
# Index access
assert_true(tag.comments[0])
assert_true(tag.comments[1])
assert_true(tag.comments[2])
assert_true(tag.comments[3])
try:
c = tag.comments[4]
except IndexError:
pass # expected
else:
assert_false("Expected IndexError, but got success")
# Removal
assert_raises(TypeError, tag.comments.remove, "not unicode")
assert_is_none(tag.comments.remove(u"foobazz"))
c = tag.comments.get(u"Bold")
assert_is_not_none(c)
c2 = tag.comments.remove(u"Bold")
assert_equal(c, c2)
assert_equal(len(tag.comments), 3)
c = tag.comments.get(u"Crippled Youth", "eng")
assert_is_not_none(c)
c2 = tag.comments.remove(u"Crippled Youth", "eng")
assert_equal(c, c2)
assert_equal(len(tag.comments), 2)
assert_is_none(tag.comments.remove(u"Crippled Youth", "eng"))
assert_equal(len(tag.comments), 2)
assert_equal(tag.comments.get(u""), tag.comments.remove(u""))
assert_equal(len(tag.comments), 1)
assert_equal(tag.comments.get(u"Crippled Youth", "sxe"),
tag.comments.remove(u"Crippled Youth", "sxe"))
assert_equal(len(tag.comments), 0)
# Index Error when there are no comments
try:
c = tag.comments[0]
except IndexError:
pass # expected
else:
assert_false("Expected IndexError, but got success")
# Replacing frames thru add and frame object preservation
tag = Tag()
c1 = tag.comments.set(u"Snoop", u"Dog", "rap")
assert_equal(tag.comments.get(u"Dog", "rap").text, u"Snoop")
c1.text = u"Lollipop"
assert_equal(tag.comments.get(u"Dog", "rap").text, u"Lollipop")
# now thru add
c2 = tag.comments.set(u"Doggy", u"Dog", "rap")
assert_equal(id(c1), id(c2))
assert_equal(tag.comments.get(u"Dog", "rap").text, u"Doggy")
def testTagBPM():
tag = Tag()
assert_is_none(tag.bpm)
tag.bpm = 150
assert_equal(tag.bpm, 150)
assert_true(tag.frame_set["TBPM"])
tag.bpm = 180
assert_equal(tag.bpm, 180)
assert_true(tag.frame_set["TBPM"])
assert_equal(len(tag.frame_set["TBPM"]), 1)
tag.bpm = 190.5
assert_true(type(tag.bpm) is int)
assert_equal(tag.bpm, 191)
assert_equal(len(tag.frame_set["TBPM"]), 1)
tag.bpm = "200"
assert_true(type(tag.bpm) is int)
assert_equal(tag.bpm, 200)
assert_equal(len(tag.frame_set["TBPM"]), 1)
def testTagPlayCount():
tag = Tag()
assert_is_none(tag.play_count)
tag.play_count = 0
assert_equal(tag.play_count, 0)
tag.play_count = 1
assert_equal(tag.play_count, 1)
tag.play_count += 1
assert_equal(tag.play_count, 2)
tag.play_count -= 1
assert_equal(tag.play_count, 1)
tag.play_count *= 5
assert_equal(tag.play_count, 5)
tag.play_count = None
assert_equal(tag.play_count, None)
try:
tag.play_count = -1
except ValueError:
pass # expected
else:
assert_false("Invalid play count, expected ValueError")
def testTagPublisher():
t = Tag()
assert_is_none(t.publisher)
try:
t.publisher = "not unicode"
except TypeError:
pass #expected
else:
assert_false("Expected TypeError when setting non-unicode publisher")
t.publisher = u"Dischord"
assert_equal(t.publisher, u"Dischord")
t.publisher = u"Infinity Cat"
assert_equal(t.publisher, u"Infinity Cat")
t.publisher = None
assert_equal(t.publisher, None)
def testTagCdId():
tag = Tag()
assert_equal(tag.cd_id, None)
tag.cd_id = b"\x01\x02"
assert_equal(tag.cd_id, b"\x01\x02")
tag.cd_id = b"\xff" * 804
assert_equal(tag.cd_id, b"\xff" * 804)
try:
tag.cd_id = b"\x00" * 805
except ValueError:
pass # expected
else:
assert_false("CD id is too long, expected ValueError")
def testTagImages():
from eyed3.id3.frames import ImageFrame
tag = Tag()
# No images
assert_equal(len(tag.images), 0)
for i in tag.images:
assert_false("Expected no images")
try:
img = tag.images[0]
except IndexError:
pass #expected
else:
assert_false("Expected IndexError for no images")
assert_is_none(tag.images.get(u""))
# Image types must be within range
for i in range(ImageFrame.MIN_TYPE, ImageFrame.MAX_TYPE):
tag.images.set(i, b"\xff", "img")
for i in (ImageFrame.MIN_TYPE - 1, ImageFrame.MAX_TYPE + 1):
try:
tag.images.set(i, b"\xff", "img")
except ValueError:
pass # expected
else:
assert_false("Expected ValueError for invalid picture type")
tag = Tag()
tag.images.set(ImageFrame.FRONT_COVER, b"\xab\xcd", "img/gif")
assert_equal(len(tag.images), 1)
assert_equal(tag.images[0].description, u"")
assert_equal(tag.images[0].picture_type, ImageFrame.FRONT_COVER)
assert_equal(tag.images[0].image_data, b"\xab\xcd")
assert_equal(tag.images[0].mime_type, "img/gif")
assert_equal(tag.images[0].image_url, None)
assert_equal(tag.images.get(u"").description, u"")
assert_equal(tag.images.get(u"").picture_type, ImageFrame.FRONT_COVER)
assert_equal(tag.images.get(u"").image_data, b"\xab\xcd")
assert_equal(tag.images.get(u"").mime_type, "img/gif")
assert_equal(tag.images.get(u"").image_url, None)
tag.images.set(ImageFrame.FRONT_COVER, b"\xdc\xba", "img/gif", u"Different")
assert_equal(len(tag.images), 2)
assert_equal(tag.images[1].description, u"Different")
assert_equal(tag.images[1].picture_type, ImageFrame.FRONT_COVER)
assert_equal(tag.images[1].image_data, b"\xdc\xba")
assert_equal(tag.images[1].mime_type, "img/gif")
assert_equal(tag.images[1].image_url, None)
assert_equal(tag.images.get(u"Different").description, u"Different")
assert_equal(tag.images.get(u"Different").picture_type,
ImageFrame.FRONT_COVER)
assert_equal(tag.images.get(u"Different").image_data, b"\xdc\xba")
assert_equal(tag.images.get(u"Different").mime_type, "img/gif")
assert_equal(tag.images.get(u"Different").image_url, None)
# This is an update (same description)
tag.images.set(ImageFrame.BACK_COVER, b"\xff\xef", "img/jpg", u"Different")
assert_equal(len(tag.images), 2)
assert_equal(tag.images[1].description, u"Different")
assert_equal(tag.images[1].picture_type, ImageFrame.BACK_COVER)
assert_equal(tag.images[1].image_data, b"\xff\xef")
assert_equal(tag.images[1].mime_type, "img/jpg")
assert_equal(tag.images[1].image_url, None)
assert_equal(tag.images.get(u"Different").description, u"Different")
assert_equal(tag.images.get(u"Different").picture_type,
ImageFrame.BACK_COVER)
assert_equal(tag.images.get(u"Different").image_data, b"\xff\xef")
assert_equal(tag.images.get(u"Different").mime_type, "img/jpg")
assert_equal(tag.images.get(u"Different").image_url, None)
count = 0
for img in tag.images:
count += 1
assert_equal(count, 2)
# Remove
img = tag.images.remove(u"")
assert_equal(img.description, u"")
assert_equal(img.picture_type, ImageFrame.FRONT_COVER)
assert_equal(img.image_data, b"\xab\xcd")
assert_equal(img.mime_type, "img/gif")
assert_equal(img.image_url, None)
assert_equal(len(tag.images), 1)
img = tag.images.remove(u"Different")
assert_equal(img.description, u"Different")
assert_equal(img.picture_type, ImageFrame.BACK_COVER)
assert_equal(img.image_data, b"\xff\xef")
assert_equal(img.mime_type, "img/jpg")
assert_equal(img.image_url, None)
assert_equal(len(tag.images), 0)
assert_is_none(tag.images.remove(u"Lundqvist"))
# Unicode enforcement
assert_raises(TypeError, tag.images.get, "not Unicode")
assert_raises(TypeError, tag.images.set, ImageFrame.ICON, "\xff", "img",
"not Unicode")
assert_raises(TypeError, tag.images.remove, "not Unicode")
# Image URL
tag = Tag()
tag.images.set(ImageFrame.BACK_COVER, None, None, u"A URL",
img_url="http://www.tumblr.com/tagged/ty-segall")
img = tag.images.get(u"A URL")
assert_is_not_none(img)
assert_equal(img.image_data, None)
assert_equal(img.image_url, "http://www.tumblr.com/tagged/ty-segall")
assert_equal(img.mime_type, "-->")
def testTagLyrics():
tag = Tag()
for c in tag.lyrics:
assert_false("Expected not to be here")
# Adds
assert_raises(TypeError, tag.lyrics.set, "bold")
assert_raises(TypeError, tag.lyrics.set, u"bold", "search")
tag.lyrics.set(u"Always Try", u"")
assert_equal(len(tag.lyrics), 1)
c = tag.lyrics[0]
assert_equal(c.description, u"")
assert_equal(c.text, u"Always Try")
assert_equal(c.lang, "eng")
tag.lyrics.set(u"Speak Out", u"Bold")
assert_equal(len(tag.lyrics), 2)
c = tag.lyrics[1]
assert_equal(c.description, u"Bold")
assert_equal(c.text, u"Speak Out")
assert_equal(c.lang, "eng")
tag.lyrics.set(u"K Town Mosh Crew", u"Crippled Youth", "sxe")
assert_equal(len(tag.lyrics), 3)
c = tag.lyrics[2]
assert_equal(c.description, u"Crippled Youth")
assert_equal(c.text, u"K Town Mosh Crew")
assert_equal(c.lang, "sxe")
# Lang is different, new frame
tag.lyrics.set(u"K Town Mosh Crew", u"Crippled Youth", "eng")
assert_equal(len(tag.lyrics), 4)
c = tag.lyrics[3]
assert_equal(c.description, u"Crippled Youth")
assert_equal(c.text, u"K Town Mosh Crew")
assert_equal(c.lang, "eng")
# Gets
assert_is_none(tag.lyrics.get(u"", "fre"))
assert_is_none(tag.lyrics.get(u"Crippled Youth", "esp"))
c = tag.lyrics.get(u"")
assert_true(c)
assert_equal(c.description, u"")
assert_equal(c.text, u"Always Try")
assert_equal(c.lang, "eng")
assert_is_not_none(tag.lyrics.get(u"Bold"))
assert_is_not_none(tag.lyrics.get(u"Bold", "eng"))
assert_is_not_none(tag.lyrics.get(u"Crippled Youth", "eng"))
assert_is_not_none(tag.lyrics.get(u"Crippled Youth", "sxe"))
assert_equal(len(tag.lyrics), 4)
# Iterate
count = 0
for c in tag.lyrics:
count += 1
assert_equal(count, 4)
# Index access
assert_true(tag.lyrics[0])
assert_true(tag.lyrics[1])
assert_true(tag.lyrics[2])
assert_true(tag.lyrics[3])
try:
c = tag.lyrics[4]
except IndexError:
pass # expected
else:
assert_false("Expected IndexError, but got success")
# Removal
assert_raises(TypeError, tag.lyrics.remove, "not unicode")
assert_is_none(tag.lyrics.remove(u"foobazz"))
c = tag.lyrics.get(u"Bold")
assert_is_not_none(c)
c2 = tag.lyrics.remove(u"Bold")
assert_equal(c, c2)
assert_equal(len(tag.lyrics), 3)
c = tag.lyrics.get(u"Crippled Youth", "eng")
assert_is_not_none(c)
c2 = tag.lyrics.remove(u"Crippled Youth", "eng")
assert_equal(c, c2)
assert_equal(len(tag.lyrics), 2)
assert_is_none(tag.lyrics.remove(u"Crippled Youth", "eng"))
assert_equal(len(tag.lyrics), 2)
assert_equal(tag.lyrics.get(u""), tag.lyrics.remove(u""))
assert_equal(len(tag.lyrics), 1)
assert_equal(tag.lyrics.get(u"Crippled Youth", "sxe"),
tag.lyrics.remove(u"Crippled Youth", "sxe"))
assert_equal(len(tag.lyrics), 0)
# Index Error when there are no lyrics
try:
c = tag.lyrics[0]
except IndexError:
pass # expected
else:
assert_false("Expected IndexError, but got success")
def testTagObjects():
tag = Tag()
# No objects
assert_equal(len(tag.objects), 0)
for i in tag.objects:
assert_false("Expected no objects")
try:
img = tag.objects[0]
except IndexError:
pass #expected
else:
assert_false("Expected IndexError for no objects")
assert_is_none(tag.objects.get(u""))
tag = Tag()
tag.objects.set(b"\xab\xcd", "img/gif")
assert_equal(len(tag.objects), 1)
assert_equal(tag.objects[0].description, u"")
assert_equal(tag.objects[0].filename, u"")
assert_equal(tag.objects[0].object_data, b"\xab\xcd")
assert_equal(tag.objects[0].mime_type, "img/gif")
assert_equal(tag.objects.get(u"").description, u"")
assert_equal(tag.objects.get(u"").filename, u"")
assert_equal(tag.objects.get(u"").object_data, b"\xab\xcd")
assert_equal(tag.objects.get(u"").mime_type, "img/gif")
tag.objects.set(b"\xdc\xba", "img/gif", u"Different")
assert_equal(len(tag.objects), 2)
assert_equal(tag.objects[1].description, u"Different")
assert_equal(tag.objects[1].filename, u"")
assert_equal(tag.objects[1].object_data, b"\xdc\xba")
assert_equal(tag.objects[1].mime_type, "img/gif")
assert_equal(tag.objects.get(u"Different").description, u"Different")
assert_equal(tag.objects.get(u"Different").filename, u"")
assert_equal(tag.objects.get(u"Different").object_data, b"\xdc\xba")
assert_equal(tag.objects.get(u"Different").mime_type, "img/gif")
# This is an update (same description)
tag.objects.set(b"\xff\xef", "img/jpg", u"Different",
u"example_filename.XXX")
assert_equal(len(tag.objects), 2)
assert_equal(tag.objects[1].description, u"Different")
assert_equal(tag.objects[1].filename, u"example_filename.XXX")
assert_equal(tag.objects[1].object_data, b"\xff\xef")
assert_equal(tag.objects[1].mime_type, "img/jpg")
assert_equal(tag.objects.get(u"Different").description, u"Different")
assert_equal(tag.objects.get(u"Different").filename,
u"example_filename.XXX")
assert_equal(tag.objects.get(u"Different").object_data, b"\xff\xef")
assert_equal(tag.objects.get(u"Different").mime_type, "img/jpg")
count = 0
for obj in tag.objects:
count += 1
assert_equal(count, 2)
# Remove
obj = tag.objects.remove(u"")
assert_equal(obj.description, u"")
assert_equal(obj.filename, u"")
assert_equal(obj.object_data, b"\xab\xcd")
assert_equal(obj.mime_type, "img/gif")
assert_equal(len(tag.objects), 1)
obj = tag.objects.remove(u"Different")
assert_equal(obj.description, u"Different")
assert_equal(obj.filename, u"example_filename.XXX")
assert_equal(obj.object_data, b"\xff\xef")
assert_equal(obj.mime_type, "img/jpg")
assert_equal(len(tag.objects), 0)
assert_is_none(tag.objects.remove(u"Dubinsky"))
# Unicode enforcement
assert_raises(TypeError, tag.objects.get, "not Unicode")
assert_raises(TypeError, tag.objects.set, "\xff", "img", "not Unicode")
assert_raises(TypeError, tag.objects.set, "\xff", "img", u"Unicode",
"not unicode")
assert_raises(TypeError, tag.objects.remove, "not Unicode")
def testTagPrivates():
tag = Tag()
# No private frames
assert_equal(len(tag.privates), 0)
for i in tag.privates:
assert_false("Expected no privates")
try:
img = tag.privates[0]
except IndexError:
pass #expected
else:
assert_false("Expected IndexError for no privates")
assert_is_none(tag.privates.get(u""))
tag = Tag()
tag.privates.set(b"\xab\xcd", "owner1")
assert_equal(len(tag.privates), 1)
assert_equal(tag.privates[0].owner_id, "owner1")
assert_equal(tag.privates[0].owner_data, b"\xab\xcd")
assert_equal(tag.privates.get("owner1").owner_id, "owner1")
assert_equal(tag.privates.get("owner1").owner_data, b"\xab\xcd")
tag.privates.set(b"\xba\xdc", "owner2")
assert_equal(len(tag.privates), 2)
assert_equal(tag.privates[1].owner_id, "owner2")
assert_equal(tag.privates[1].owner_data, b"\xba\xdc")
assert_equal(tag.privates.get("owner2").owner_id, "owner2")
assert_equal(tag.privates.get("owner2").owner_data, b"\xba\xdc")
# This is an update (same description)
tag.privates.set(b"\x00\x00\x00", "owner1")
assert_equal(len(tag.privates), 2)
assert_equal(tag.privates[0].owner_id, "owner1")
assert_equal(tag.privates[0].owner_data, b"\x00\x00\x00")
assert_equal(tag.privates.get("owner1").owner_id, "owner1")
assert_equal(tag.privates.get("owner1").owner_data, b"\x00\x00\x00")
count = 0
for f in tag.privates:
count += 1
assert_equal(count, 2)
# Remove
priv = tag.privates.remove("owner1")
assert_equal(priv.owner_id, "owner1")
assert_equal(priv.owner_data, b"\x00\x00\x00")
assert_equal(len(tag.privates), 1)
priv = tag.privates.remove("owner2")
assert_equal(priv.owner_id, "owner2")
assert_equal(priv.owner_data, b"\xba\xdc")
assert_equal(len(tag.privates), 0)
assert_is_none(tag.objects.remove(u"Callahan"))
def testTagDiscNum():
tag = Tag()
assert_equal(tag.disc_num, (None, None))
tag.disc_num = 7
assert_equal(tag.disc_num, (7, None))
tag.disc_num = (7, None)
assert_equal(tag.disc_num, (7, None))
tag.disc_num = (7, 15)
assert_equal(tag.frame_set[frames.DISCNUM_FID][0].text, "07/15")
assert_equal(tag.disc_num, (7, 15))
tag.disc_num = (7, 150)
assert_equal(tag.frame_set[frames.DISCNUM_FID][0].text, "007/150")
assert_equal(tag.disc_num, (7, 150))
tag.disc_num = (1, 7)
assert_equal(tag.frame_set[frames.DISCNUM_FID][0].text, "01/07")
assert_equal(tag.disc_num, (1, 7))
tag.disc_num = None
assert_equal(tag.disc_num, (None, None))
tag.disc_num = None, None
def testTagGenre():
from eyed3.id3 import Genre
tag = Tag()
assert_is_none(tag.genre)
try:
tag.genre = "Not Unicode"
except TypeError:
pass # expected
else:
assert_false("Non unicode genre, expected TypeError")
gobj = Genre(u"Hardcore")
tag.genre = u"Hardcore"
assert_equal(tag.genre.name, u"Hardcore")
assert_equal(tag.genre, gobj)
tag.genre = 130
assert_equal(tag.genre.id, 130)
assert_equal(tag.genre.name, u"Terror")
tag.genre = 0
assert_equal(tag.genre.id, 0)
assert_equal(tag.genre.name, u"Blues")
tag.genre = None
assert_is_none(tag.genre)
assert_is_none(tag.frame_set["TCON"])
def testTagUserTextFrames():
tag = Tag()
assert_equal(len(tag.user_text_frames), 0)
utf1 = tag.user_text_frames.set(u"Custom content")
assert_equal(tag.user_text_frames.get(u"").text, u"Custom content")
utf2 = tag.user_text_frames.set(u"Content custom", u"Desc1")
assert_equal(tag.user_text_frames.get(u"Desc1").text, u"Content custom")
assert_equal(len(tag.user_text_frames), 2)
utf3 = tag.user_text_frames.set(u"New content", u"")
assert_equal(tag.user_text_frames.get(u"").text, u"New content")
assert_equal(len(tag.user_text_frames), 2)
assert_equal(id(utf1), id(utf3))
assert_equal(tag.user_text_frames[0], utf1)
assert_equal(tag.user_text_frames[1], utf2)
assert_equal(tag.user_text_frames.get(u""), utf1)
assert_equal(tag.user_text_frames.get(u"Desc1"), utf2)
tag.user_text_frames.remove(u"")
assert_equal(len(tag.user_text_frames), 1)
tag.user_text_frames.remove(u"Desc1")
assert_equal(len(tag.user_text_frames), 0)
tag.user_text_frames.set(u"Foobazz", u"Desc2")
assert_equal(len(tag.user_text_frames), 1)
def testTagUrls():
tag = Tag()
url = "http://example.com/"
url2 = "http://sample.com/"
tag.commercial_url = url
assert_equal(tag.commercial_url, url)
tag.commercial_url = url2
assert_equal(tag.commercial_url, url2)
tag.commercial_url = None
assert_is_none(tag.commercial_url)
tag.copyright_url = url
assert_equal(tag.copyright_url, url)
tag.copyright_url = url2
assert_equal(tag.copyright_url, url2)
tag.copyright_url = None
assert_is_none(tag.copyright_url)
tag.audio_file_url = url
assert_equal(tag.audio_file_url, url)
tag.audio_file_url = url2
assert_equal(tag.audio_file_url, url2)
tag.audio_file_url = None
assert_is_none(tag.audio_file_url)
tag.audio_source_url = url
assert_equal(tag.audio_source_url, url)
tag.audio_source_url = url2
assert_equal(tag.audio_source_url, url2)
tag.audio_source_url = None
assert_is_none(tag.audio_source_url)
tag.artist_url = url
assert_equal(tag.artist_url, url)
tag.artist_url = url2
assert_equal(tag.artist_url, url2)
tag.artist_url = None
assert_is_none(tag.artist_url)
tag.internet_radio_url = url
assert_equal(tag.internet_radio_url, url)
tag.internet_radio_url = url2
assert_equal(tag.internet_radio_url, url2)
tag.internet_radio_url = None
assert_is_none(tag.internet_radio_url)
tag.payment_url = url
assert_equal(tag.payment_url, url)
tag.payment_url = url2
assert_equal(tag.payment_url, url2)
tag.payment_url = None
assert_is_none(tag.payment_url)
tag.publisher_url = url
assert_equal(tag.publisher_url, url)
tag.publisher_url = url2
assert_equal(tag.publisher_url, url2)
tag.publisher_url = None
assert_is_none(tag.publisher_url)
# Frame ID enforcement
assert_raises(ValueError, tag._setUrlFrame, "WDDD", "url")
assert_raises(ValueError, tag._getUrlFrame, "WDDD")
def testTagUniqIds():
tag = Tag()
assert_equal(len(tag.unique_file_ids), 0)
tag.unique_file_ids.set("http://music.com/12354", "test")
tag.unique_file_ids.set("1234", "http://eyed3.nicfit.net")
assert_equal(tag.unique_file_ids.get("test").uniq_id,
"http://music.com/12354")
assert_equal(tag.unique_file_ids.get("http://eyed3.nicfit.net").uniq_id,
"1234")
assert_equal(len(tag.unique_file_ids), 2)
tag.unique_file_ids.remove("test")
assert_equal(len(tag.unique_file_ids), 1)
tag.unique_file_ids.set("4321", "http://eyed3.nicfit.net")
assert_equal(len(tag.unique_file_ids), 1)
assert_equal(tag.unique_file_ids.get("http://eyed3.nicfit.net").uniq_id,
"4321")
def testTagUserUrls():
tag = Tag()
assert_equal(len(tag.user_url_frames), 0)
uuf1 = tag.user_url_frames.set("http://yo.yo.com/")
assert_equal(tag.user_url_frames.get(u"").url, "http://yo.yo.com/")
utf2 = tag.user_url_frames.set("http://run.dmc.org", u"URL")
assert_equal(tag.user_url_frames.get(u"URL").url, u"http://run.dmc.org")
assert_equal(len(tag.user_url_frames), 2)
utf3 = tag.user_url_frames.set("http://my.adidas.com", u"")
assert_equal(tag.user_url_frames.get(u"").url, "http://my.adidas.com")
assert_equal(len(tag.user_url_frames), 2)
assert_equal(id(uuf1), id(utf3))
assert_equal(tag.user_url_frames[0], uuf1)
assert_equal(tag.user_url_frames[1], utf2)
assert_equal(tag.user_url_frames.get(u""), uuf1)
assert_equal(tag.user_url_frames.get(u"URL"), utf2)
tag.user_url_frames.remove(u"")
assert_equal(len(tag.user_url_frames), 1)
tag.user_url_frames.remove(u"URL")
assert_equal(len(tag.user_url_frames), 0)
tag.user_url_frames.set("Foobazz", u"Desc2")
assert_equal(len(tag.user_url_frames), 1)
def testSortOrderConversions():
test_file = "/tmp/soconvert.id3"
tag = Tag()
# 2.3 frames to 2.4
for fid in ["XSOA", "XSOP", "XSOT"]:
frame = frames.TextFrame(fid)
frame.text = unicode(fid)
tag.frame_set[fid] = frame
try:
tag.save(test_file) # v2.4 is the default
tag = eyed3.load(test_file).tag
assert_equal(tag.version, ID3_V2_4)
assert_equal(len(tag.frame_set), 3)
del tag.frame_set["TSOA"]
del tag.frame_set["TSOP"]
del tag.frame_set["TSOT"]
assert_equal(len(tag.frame_set), 0)
finally:
os.remove(test_file)
tag = Tag()
# 2.4 frames to 2.3
for fid in ["TSOA", "TSOP", "TSOT"]:
frame = frames.TextFrame(fid)
frame.text = unicode(fid)
tag.frame_set[fid] = frame
try:
tag.save(test_file, version=eyed3.id3.ID3_V2_3)
tag = eyed3.load(test_file).tag
assert_equal(tag.version, ID3_V2_3)
assert_equal(len(tag.frame_set), 3)
del tag.frame_set["XSOA"]
del tag.frame_set["XSOP"]
del tag.frame_set["XSOT"]
assert_equal(len(tag.frame_set), 0)
finally:
os.remove(test_file)
def test_XDOR_TDOR_Conversions():
test_file = "/tmp/xdortdrc.id3"
tag = Tag()
# 2.3 frames to 2.4
frame = frames.DateFrame("XDOR", "1990-06-24")
tag.frame_set["XDOR"] = frame
try:
tag.save(test_file) # v2.4 is the default
tag = eyed3.load(test_file).tag
assert_equal(tag.version, ID3_V2_4)
assert_equal(len(tag.frame_set), 1)
del tag.frame_set["TDOR"]
assert_equal(len(tag.frame_set), 0)
finally:
os.remove(test_file)
tag = Tag()
# 2.4 frames to 2.3
frame = frames.DateFrame("TDRC", "2012-10-21")
tag.frame_set[frame.id] = frame
try:
tag.save(test_file, version=eyed3.id3.ID3_V2_3)
tag = eyed3.load(test_file).tag
assert_equal(tag.version, ID3_V2_3)
assert_equal(len(tag.frame_set), 2)
del tag.frame_set["TYER"]
del tag.frame_set["TDAT"]
assert_equal(len(tag.frame_set), 0)
finally:
os.remove(test_file)
@unittest.skipIf(not os.path.exists(DATA_D), "test requires data files")
def testChapterExampleTag():
tag = eyed3.load(os.path.join(DATA_D, "id3_chapters_example.mp3")).tag
assert_equal(len(tag.table_of_contents), 1)
toc = list(tag.table_of_contents)[0]
assert_equal(id(toc), id(tag.table_of_contents.get(toc.element_id)))
assert_equal(toc.element_id, "toc1")
assert_is_none(toc.description)
assert_true(toc.toplevel)
assert_true(toc.ordered)
assert_equal(toc.child_ids, ['ch1', 'ch2', 'ch3'])
assert_equal(tag.chapters.get("ch1").title, "start")
assert_equal(tag.chapters.get("ch1").subtitle, None)
assert_equal(tag.chapters.get("ch1").user_url, None)
assert_equal(tag.chapters.get("ch1").times, (0, 5000))
assert_equal(tag.chapters.get("ch1").offsets, (None, None))
assert_equal(len(tag.chapters.get("ch1").sub_frames), 1)
assert_equal(tag.chapters.get("ch2").title, "5 seconds")
assert_equal(tag.chapters.get("ch2").subtitle, None)
assert_equal(tag.chapters.get("ch2").user_url, None)
assert_equal(tag.chapters.get("ch2").times, (5000, 10000))
assert_equal(tag.chapters.get("ch2").offsets, (None, None))
assert_equal(len(tag.chapters.get("ch2").sub_frames), 1)
assert_equal(tag.chapters.get("ch3").title, "10 seconds")
assert_equal(tag.chapters.get("ch3").subtitle, None)
assert_equal(tag.chapters.get("ch3").user_url, None)
assert_equal(tag.chapters.get("ch3").times, (10000, 15000))
assert_equal(tag.chapters.get("ch3").offsets, (None, None))
assert_equal(len(tag.chapters.get("ch3").sub_frames), 1)
def testTableOfContents():
test_file = "/tmp/toc.id3"
t = Tag()
assert_equal(len(t.table_of_contents), 0)
toc_main = t.table_of_contents.set("main", toplevel=True,
child_ids=["c1", "c2", "c3", "c4"],
description=u"Table of Conents")
assert_is_not_none(toc_main)
assert_equal(len(t.table_of_contents), 1)
toc_dc = t.table_of_contents.set("director-cut", toplevel=False,
ordered=False,
child_ids=["d3", "d1", "d2"])
assert_is_not_none(toc_dc)
assert_equal(len(t.table_of_contents), 2)
toc_dummy = t.table_of_contents.set("test")
assert_equal(len(t.table_of_contents), 3)
t.table_of_contents.remove(toc_dummy.element_id)
assert_equal(len(t.table_of_contents), 2)
t.save(test_file)
try:
t2 = eyed3.load(test_file).tag
finally:
os.remove(test_file)
assert_equal(len(t.table_of_contents), 2)
assert_equal(t2.table_of_contents.get("main").toplevel, True)
assert_equal(t2.table_of_contents.get("main").ordered, True)
assert_equal(t2.table_of_contents.get("main").description,
toc_main.description)
assert_equal(t2.table_of_contents.get("main").child_ids, toc_main.child_ids)
assert_equal(t2.table_of_contents.get("director-cut").toplevel,
toc_dc.toplevel)
assert_equal(t2.table_of_contents.get("director-cut").ordered, False)
assert_equal(t2.table_of_contents.get("director-cut").description,
toc_dc.description)
assert_equal(t2.table_of_contents.get("director-cut").child_ids,
toc_dc.child_ids)
def testChapters():
test_file = "/tmp/chapters.id3"
t = Tag()
ch1 = t.chapters.set("c1", (0, 200))
ch2 = t.chapters.set("c2", (200, 300))
ch3 = t.chapters.set("c3", (300, 375))
ch4 = t.chapters.set("c4", (375, 600))
assert_equal(len(t.chapters), 4)
for i, c in enumerate(iter(t.chapters), 1):
if i != 2:
c.title = u"Chapter %d" % i
c.subtitle = u"Subtitle %d" % i
c.user_url = "http://example.com/%d" % i
t.save(test_file)
try:
t2 = eyed3.load(test_file).tag
finally:
os.remove(test_file)
assert_equal(len(t2.chapters), 4)
for i in range(1, 5):
c = t2.chapters.get("c%d" % i)
if i == 2:
assert_is_none(c.title)
assert_is_none(c.subtitle)
assert_is_none(c.user_url)
else:
assert_equal(c.title, u"Chapter %d" % i)
assert_equal(c.subtitle, u"Subtitle %d" % i)
assert_equal(c.user_url, u"http://example.com/%d" % i)
def testReadOnly():
assert_false(Tag.read_only)
t = Tag()
assert_false(t.read_only)
t.read_only = True
assert_raises(RuntimeError, t.save)
assert_raises(RuntimeError, t._saveV1Tag, None)
assert_raises(RuntimeError, t._saveV2Tag, None, None, None)
|
gamahead/nupic
|
refs/heads/master
|
nupic/regions/ImageSensorExplorers/ExhaustiveSweep.py
|
17
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import math
from nupic.regions.ImageSensorExplorers.BaseExplorer import BaseExplorer
class ExhaustiveSweep(BaseExplorer):
"""
This explorer performs an exhaustive raster scan through the input space.
By default, it iterates through images, filters, and sweep positions, with
sweep positions as the inner loop.
"""
def __init__(self, sweepDirections=["right", "down"], shiftDuringSweep=1,
shiftBetweenSweeps=1, sweepOffObject=False, order=None, *args, **kwargs):
"""
sweepDirections -- Directions for sweeping (a list containing one or
more of 'left', 'right', 'up', and 'down').
shiftDuringSweep -- Number of pixels to jump with each step (during a
sweep).
shiftBetweenSweeps -- Number of pixels to jump in between sweeps
(for example, when moving down a line after sweeping across).
sweepOffObject -- Whether the sensor can only include a part of the
object, as specified by the bounding box. If False, it will only move to
positions that include as much of the object as possible. If True, it
will sweep until all of the object moves off the sensor. If set to a floating
point number between 0 and 1, then it will sweep until that fraction of the
object moves off the sensor.
order -- Order in which to iterate (outer to inner). Default progresses
through switching images, filters, and sweeping, where switching images
is the outer loop and sweeping is the inner loop. Should be a list
containing 'image', 'sweep', and 0, 1, ... numFilters-1.
"""
BaseExplorer.__init__(self, *args, **kwargs)
for direction in sweepDirections:
if direction not in ('left', 'right', 'up', 'down'):
raise RuntimeError("Unknown sweep direction: '%s'" % direction)
if type(shiftDuringSweep) is not int:
raise RuntimeError("'shiftDuringSweep' must be an integer")
if type(shiftBetweenSweeps) is not int:
raise RuntimeError("'shiftBetweenSweeps' must be an integer")
if float(sweepOffObject) < 0 or float(sweepOffObject) > 1.0:
raise RuntimeError("'sweepOffObject' should be a boolean, or floating point"
" number between 0 and 1")
if order is not None:
if 'image' not in order or 'sweep' not in order:
raise RuntimeError("'order' must contain both 'image' and 'sweep'")
if len([x for x in order if type(x) == str]) > 2:
raise RuntimeError("'order' must contain no other strings besides "
"'image' and 'sweep'")
self.customOrder = True
else:
self.customOrder = False
self.sweepDirections = sweepDirections
self.shiftDuringSweep = shiftDuringSweep
self.shiftBetweenSweeps = shiftBetweenSweeps
self.sweepOffObject = sweepOffObject
self.order = order
def first(self):
"""
Set up the position.
BaseExplorer picks image 0, offset (0,0), etc., but explorers that wish
to set a different first position should extend this method. Such explorers
may wish to call BaseExplorer.first(center=False), which initializes the
position tuple but does not call centerImage() (which could cause
unnecessary filtering to occur).
"""
BaseExplorer.first(self)
self.directionIndex = 0
if self.numImages:
self._firstSweepPosition()
def next(self, seeking=False):
"""
Go to the next position (next iteration).
seeking -- Boolean that indicates whether the explorer is calling next()
from seek(). If True, the explorer should avoid unnecessary computation
that would not affect the seek command. The last call to next() from
seek() will be with seeking=False.
"""
BaseExplorer.next(self)
# If filters were changed, order may be invalid
if self.order is None or \
len([x for x in self.order if type(x) == int]) != self.numFilters:
# If user did not set a custom order, just create new one automatically
if not self.customOrder:
self.order = ["image"]
self.order.extend(range(self.numFilters))
self.order += ["sweep"]
# Otherwise, user needs to recreate the explorer with a new order
else:
raise RuntimeError("'order' is invalid. Must recreate explorer with "
"valid order after changing filters.")
if self.position['reset'] and self.blankWithReset:
# Last iteration was a blank, so don't increment the position
self.position['reset'] = False
else:
self.position['reset'] = False
for x in reversed(self.order):
if x == 'image': # Iterate the image
self.position['image'] += 1
if self.position['image'] == self.numImages:
self.position['image'] = 0
self.position['reset'] = True
else:
break
elif x == 'sweep': # Iterate the sweep position
nextImage = self._nextSweepPosition()
if not nextImage:
break
else: # Iterate the filter with index x
self.position['filters'][x] += 1
if self.position['filters'][x] == self.numFilterOutputs[x]:
self.position['filters'][x] = 0
self.position['reset'] = True
else:
break
if nextImage:
self._firstSweepPosition()
def getNumIterations(self, image):
"""
Get the number of iterations required to completely explore the input space.
Explorers that do not wish to support this method should not override it.
image -- If None, returns the sum of the iterations for all the loaded
images. Otherwise, image should be an integer specifying the image for
which to calculate iterations.
ImageSensor takes care of the input validation.
"""
if image is None:
filteredImages = []
for i in xrange(self.numImages):
filteredImages.extend(self.getAllFilteredVersionsOfImage(i))
else:
filteredImages = self.getAllFilteredVersionsOfImage(image)
return sum([self._getNumIterationsForImage(x[0]) for x in filteredImages])
def _firstSweepPosition(self):
"""
Go to the first sweep position for the current image and sweep direction.
"""
sbbox = self._getSweepBoundingBox(self.getFilteredImages()[0])
direction = self.sweepDirections[self.directionIndex]
if direction in ('right', 'down'):
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] = sbbox[1]
elif direction == 'left':
self.position['offset'][0] = sbbox[2] - 1
self.position['offset'][1] = sbbox[1]
elif direction == 'up':
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] = sbbox[3] - 1
def _nextSweepPosition(self):
"""
Increment the sweep position.
Return True (nextImage) if we exhausted all sweeps.
"""
sbbox = self._getSweepBoundingBox(self.getFilteredImages()[0])
direction = self.sweepDirections[self.directionIndex]
nextDirection = False
if direction == 'right':
self.position['offset'][0] += self.shiftDuringSweep
if self.position['offset'][0] >= sbbox[2]:
self.position['reset'] = True
self.position['offset'][0] = sbbox[0]
self.position['offset'][1] += self.shiftBetweenSweeps
if self.position['offset'][1] >= sbbox[3]:
nextDirection = True
elif direction == 'left':
self.position['offset'][0] -= self.shiftDuringSweep
if self.position['offset'][0] < sbbox[0]:
self.position['reset'] = True
self.position['offset'][0] = sbbox[2] - 1
self.position['offset'][1] += self.shiftBetweenSweeps
if self.position['offset'][1] >= sbbox[3]:
nextDirection = True
elif direction == 'down':
self.position['offset'][1] += self.shiftDuringSweep
if self.position['offset'][1] >= sbbox[3]:
self.position['reset'] = True
self.position['offset'][1] = sbbox[1]
self.position['offset'][0] += self.shiftBetweenSweeps
if self.position['offset'][0] >= sbbox[2]:
nextDirection = True
elif direction == 'up':
self.position['offset'][1] -= self.shiftDuringSweep
if self.position['offset'][1] < sbbox[1]:
self.position['reset'] = True
self.position['offset'][1] = sbbox[3] - 1
self.position['offset'][0] += self.shiftBetweenSweeps
if self.position['offset'][0] >= sbbox[2]:
nextDirection = True
if nextDirection:
self.directionIndex += 1
if self.directionIndex == len(self.sweepDirections):
self.directionIndex = 0
return True # Go to next image
self._firstSweepPosition()
return False
def _getSweepBoundingBox(self, image):
"""
Calculate a 'sweep bounding box' from the image's bounding box.
If 'sbbox' is the bounding box returned from this method, valid sweep
positions [x,y] are bounded by sbbox[0] <= x < sbbox[2] and
sbbox[1] <= y < sbbox[3].
"""
bbox = image.split()[1].getbbox()
# If alpha channel is completely empty, we will end up
# with a bbox of 'None'. Nothing much we can do
if bbox is None:
bbox = (0, 0, 1, 1)
#bbox = (0, 0, image.size[0], image.size[1])
print 'WARNING: empty alpha channel'
if float(self.sweepOffObject) == 1.0:
startX = bbox[0] - self.enabledWidth + 1
startY = bbox[1] - self.enabledHeight + 1
endX = bbox[2]
endY = bbox[3]
else:
# Shrink the bbox based on the amount of the object we want to sweep off
width = bbox[2] - bbox[0]
height = bbox[3] - bbox[1]
bbox = [int(round(bbox[0] + width*self.sweepOffObject)),
int(round(bbox[1] + height*self.sweepOffObject)),
int(round(bbox[2] - width*self.sweepOffObject)),
int(round(bbox[3] - height*self.sweepOffObject))]
startX = min(bbox[0], bbox[2] - self.enabledWidth)
startY = min(bbox[1], bbox[3] - self.enabledHeight)
endX = max(bbox[0], bbox[2] - self.enabledWidth) + 1
endY = max(bbox[1], bbox[3] - self.enabledHeight) + 1
return (startX, startY, endX, endY)
def _getNumIterationsForImage(self, image):
"""
Return the number of iterations for the image, given the current parameters.
"""
sbbox = self._getSweepBoundingBox(image)
stepsX = sbbox[2] - sbbox[0]
stepsY = sbbox[3] - sbbox[1]
numIterations = 0
for direction in self.sweepDirections:
if direction in ('left', 'right'):
across = int(math.ceil(stepsX / float(self.shiftDuringSweep)))
down = int(math.ceil(stepsY / float(self.shiftBetweenSweeps)))
if self.blankWithReset:
across += 1
elif direction in ('up', 'down'):
across = int(math.ceil(stepsX / float(self.shiftBetweenSweeps)))
down = int(math.ceil(stepsY / float(self.shiftDuringSweep)))
if self.blankWithReset:
down += 1
numIterations += across*down
return numIterations
|
ppn029012/One-man-band
|
refs/heads/master
|
hackcamp/taps.py
|
2
|
import numpy as np
#import pyaudio
import struct
import math
# import pylab as pl
import time
import pygame as pg
import pygame.mixer as pm
import alsaaudio, time, audioop
pm.init()
string=['a','b','c','d']
channela=pm.Sound('beat.wav')
channelb=pm.Sound('hihat.wav')
channelc=pm.Sound(string[0]+'.wav')
channeld=pm.Sound(string[1]+'.wav')
channele=pm.Sound(string[2]+'.wav')
channelf=pm.Sound(string[3]+'.wav')
channelg=pm.Sound(string[0]+'b'+'wav')
channelh=pm.Sound(string[1]+'b'+'wav')
INITIAL_TAP_THRESHOLD = 0.510
inp = alsaaudio.PCM(alsaaudio.PCM_CAPTURE,alsaaudio.PCM_NONBLOCK)
inp.setchannels(1)
inp.setformat(alsaaudio.PCM_FORMAT_S16_LE)
SHORT_NORMALIZE = (1.0/32768.0)
inp.setrate(44100)
INPUT_BLOCK_TIME = 0.05
#INPUT_FRAMES_PER_BLOCK = int(RATE*INPUT_BLOCK_TIME)
inp.setperiodsize(160)
# inp.setperiodsize(int(44100*INPUT_BLOCK_TIME))
# if we get this many noisy blocks in a row, increase the threshold
OVERSENSITIVE = 15.0/INPUT_BLOCK_TIME
# if we get this many quiet blocks in a row, decrease the threshold
UNDERSENSITIVE = 120.0/INPUT_BLOCK_TIME
# if the noise was longer than this many blocks, it's not a 'tap'
MAX_TAP_BLOCKS = 0.15/INPUT_BLOCK_TIME
def get_rms( shorts ):
# RMS amplitude is defined as the square root of the
# mean over time of the square of the amplitude.
# so we need to convert this string of bytes into
# a string of 16-bit samples...
# we will get one short out for each
# two chars in the string.
# iterate over the block.
sum_squares = 0.0
count = len(shorts) /2
for sample in shorts:
# sample is a signed short in +/- 32768.
# normalize it to 1.0
n = sample * SHORT_NORMALIZE
sum_squares += n*n
return math.sqrt( sum_squares / count )
class TapTester(object):
def __init__(self):
#self.pa = pyaudio.PyAudio()
#self.stream = self.open_mic_stream()
self.tap_threshold = INITIAL_TAP_THRESHOLD
self.noisycount = MAX_TAP_BLOCKS+1
self.quietcount = 0
self.errorcount = 0
def stop(self):
self.stream.close()
def tapDetected(self):
print "Tap!"
def listen(self):
try:
# block = self.stream.read(INPUT_FRAMES_PER_BLOCK)
l,data = inp.read()
print "DATA:"
print data
except IOError, e:
# dammit.
self.errorcount += 1
#print( "(%d) Error recording: %s"%(self.errorcount,e) )
self.noisycount = 1
return
# amplitude = get_rms( data )
amplitude = 10
if l:
count = len(data)/2
format = "%dh"%(count)
shorts = struct.unpack( format, data )
print shorts
amp = np.fft.fft(shorts)
N = len(amp)
amp = 20*np.log(amp)
amp = amp[1:N/2]
amp_max = max(amp[1:])
thre = 0.8*amp_max
ploc = [i for i in range(1,len(amp)-1) if amp[i]>amp[i-1] and amp[i]>amp[i+1] and amp[i]>thre]
if len(ploc)>=4:
ploc_freq = [i*44100/1024 for i in ploc]
if ploc_freq[0] <= 258:
#ceg
string[0]='c3'
string[1]='e3'
string[2]='g3'
string[3]='c4'
channela.set_volume(0.2)
elif ploc_freq[0] <= 301:
#e
string[0]='e3'
string[1]='ga3'
string[2]='b4'
string[3]='e4'
channela.set_volume(0.3)
elif ploc_freq[0] <= 344:
#fg
string[0]='fg3'
string[1]='ab4'
string[2]='cd4'
string[3]='fg4'
channela.set_volume(0.6)
elif ploc_freq[0] >= 387:
#ga
string[0]='ga3'
string[1]='c4'
string[2]='de4'
string[3]='ga4'
channela.set_volume(1.0)
pm.music.load(string[0]+'b'+'.wav')
pm.music.play()
channela.play()
channelb.play()
channelc.play()
channeld.play()
channelh.play()
if __name__ == "__main__":
# pl.ion()
# fig = pl.figure()
pm.init()
tt = TapTester()
for i in range(1000):
tt.listen()
|
jcftang/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_ospf_vrf.py
|
8
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: nxos_ospf_vrf
version_added: "2.2"
short_description: Manages a VRF for an OSPF router.
description:
- Manages a VRF for an OSPF router.
author: Gabriele Gerbino (@GGabriele)
extends_documentation_fragment: nxos
notes:
- Value I(default) restores params default value, if any.
Otherwise it removes the existing param configuration.
options:
vrf:
description:
- Name of the resource instance. Valid value is a string.
The name 'default' is a valid VRF representing the global OSPF.
required: false
default: default
ospf:
description:
- Name of the OSPF instance.
required: true
default: null
router_id:
description:
- Router Identifier (ID) of the OSPF router VRF instance.
required: false
default: null
default_metric:
description:
- Specify the default Metric value. Valid values are an integer
or the keyword 'default'.
required: false
default: null
log_adjacency:
description:
- Controls the level of log messages generated whenever a
neighbor changes state. Valid values are 'log', 'detail',
and 'default'.
required: false
choices: ['log','detail','default']
default: null
timer_throttle_lsa_start:
description:
- Specify the start interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_lsa_hold:
description:
- Specify the hold interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_lsa_max:
description:
- Specify the max interval for rate-limiting Link-State
Advertisement (LSA) generation. Valid values are an integer,
in milliseconds, or the keyword 'default'.
required: false
default: null
timer_throttle_spf_start:
description:
- Specify initial Shortest Path First (SPF) schedule delay.
Valid values are an integer, in milliseconds, or
the keyword 'default'.
required: false
default: null
timer_throttle_spf_hold:
description:
- Specify minimum hold time between Shortest Path First (SPF)
calculations. Valid values are an integer, in milliseconds,
or the keyword 'default'.
required: false
default: null
timer_throttle_spf_max:
description:
- Specify the maximum wait time between Shortest Path First (SPF)
calculations. Valid values are an integer, in milliseconds,
or the keyword 'default'.
required: false
default: null
auto_cost:
description:
- Specifies the reference bandwidth used to assign OSPF cost.
Valid values are an integer, in Mbps, or the keyword 'default'.
required: false
default: null
'''
EXAMPLES = '''
- nxos_ospf_vrf:
ospf: 1
timer_throttle_spf_start: 50
timer_throttle_spf_hold: 1000
timer_throttle_spf_max: 2000
timer_throttle_lsa_start: 60
timer_throttle_lsa_hold: 1100
timer_throttle_lsa_max: 3000
vrf: test
state: present
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"ospf": "1", "timer_throttle_lsa_hold": "1100",
"timer_throttle_lsa_max": "3000", "timer_throttle_lsa_start": "60",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "2000", "timer_throttle_spf_start": "50",
"vrf": "test"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"auto_cost": "40000", "default_metric": "", "log_adjacency": "",
"ospf": "1", "router_id": "", "timer_throttle_lsa_hold": "5000",
"timer_throttle_lsa_max": "5000", "timer_throttle_lsa_start": "0",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "5000",
"timer_throttle_spf_start": "200", "vrf": "test"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"auto_cost": "40000", "default_metric": "", "log_adjacency": "",
"ospf": "1", "router_id": "", "timer_throttle_lsa_hold": "1100",
"timer_throttle_lsa_max": "3000", "timer_throttle_lsa_start": "60",
"timer_throttle_spf_hold": "1000",
"timer_throttle_spf_max": "2000", "timer_throttle_spf_start": "50",
"vrf": "test"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["router ospf 1", "vrf test", "timers throttle lsa 60 1100 3000",
"timers throttle spf 50 1000 2000"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
# COMMON CODE FOR MIGRATION
import re
import ansible.module_utils.nxos
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.network import NetworkModule
from ansible.module_utils.shell import ShellError
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
PARAM_TO_COMMAND_KEYMAP = {
'router_id': 'router-id',
'default_metric': 'default-metric',
'log_adjacency': 'log-adjacency-changes',
'timer_throttle_lsa_start': 'timers throttle lsa',
'timer_throttle_lsa_max': 'timers throttle lsa',
'timer_throttle_lsa_hold': 'timers throttle lsa',
'timer_throttle_spf_max': 'timers throttle spf',
'timer_throttle_spf_start': 'timers throttle spf',
'timer_throttle_spf_hold': 'timers throttle spf',
'auto_cost': 'auto-cost reference-bandwidth'
}
PARAM_TO_DEFAULT_KEYMAP = {
'timer_throttle_lsa_start': '0',
'timer_throttle_lsa_max': '5000',
'timer_throttle_lsa_hold': '5000',
'timer_throttle_spf_start': '200',
'timer_throttle_spf_max': '5000',
'timer_throttle_spf_hold': '1000',
'auto_cost': '40000'
}
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = ''
if PARAM_TO_COMMAND_KEYMAP[arg] in config:
if arg == 'log_adjacency':
if 'log-adjacency-changes detail' in config:
value = 'detail'
else:
value = 'log'
else:
value_list = REGEX.search(config).group('value').split()
if 'hold' in arg:
value = value_list[1]
elif 'max' in arg:
value = value_list[2]
elif 'auto' in arg:
if 'Gbps' in value_list:
value = str(int(value_list[0]) * 1000)
else:
value = value_list[0]
else:
value = value_list[0]
return value
def get_existing(module, args):
existing = {}
netcfg = get_config(module)
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
config = netcfg.get_section(parents)
if config:
if module.params['vrf'] == 'default':
splitted_config = config.splitlines()
vrf_index = False
for index in range(0, len(splitted_config) - 1):
if 'vrf' in splitted_config[index].strip():
vrf_index = index
break
if vrf_index:
config = '\n'.join(splitted_config[0:vrf_index])
for arg in args:
if arg not in ['ospf', 'vrf']:
existing[arg] = get_value(arg, config, module)
existing['vrf'] = module.params['vrf']
existing['ospf'] = module.params['ospf']
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default':
if existing_commands.get(key):
existing_value = existing_commands.get(key)
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key == 'timers throttle lsa':
command = '{0} {1} {2} {3}'.format(
key,
proposed['timer_throttle_lsa_start'],
proposed['timer_throttle_lsa_hold'],
proposed['timer_throttle_lsa_max'])
elif key == 'timers throttle spf':
command = '{0} {1} {2} {3}'.format(
key,
proposed['timer_throttle_spf_start'],
proposed['timer_throttle_spf_hold'],
proposed['timer_throttle_spf_max'])
elif key == 'log-adjacency-changes':
if value == 'log':
command = key
elif value == 'detail':
command = '{0} {1}'.format(key, value)
elif key == 'auto-cost reference-bandwidth':
if len(value) < 5:
command = '{0} {1} Mbps'.format(key, value)
else:
value = str(int(value) / 1000)
command = '{0} {1} Gbps'.format(key, value)
else:
command = '{0} {1}'.format(key, value.lower())
if command not in commands:
commands.append(command)
if commands:
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] != 'default':
parents.append('vrf {0}'.format(module.params['vrf']))
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
commands = []
parents = ['router ospf {0}'.format(module.params['ospf'])]
if module.params['vrf'] == 'default':
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in existing_commands.items():
if value:
if key == 'timers throttle lsa':
command = 'no {0} {1} {2} {3}'.format(
key,
existing['timer_throttle_lsa_start'],
existing['timer_throttle_lsa_hold'],
existing['timer_throttle_lsa_max'])
elif key == 'timers throttle spf':
command = 'no {0} {1} {2} {3}'.format(
key,
existing['timer_throttle_spf_start'],
existing['timer_throttle_spf_hold'],
existing['timer_throttle_spf_max'])
else:
existing_value = existing_commands.get(key)
command = 'no {0} {1}'.format(key, existing_value)
if command not in commands:
commands.append(command)
else:
commands = ['no vrf {0}'.format(module.params['vrf'])]
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
vrf=dict(required=False, type='str', default='default'),
ospf=dict(required=True, type='str'),
router_id=dict(required=False, type='str'),
default_metric=dict(required=False, type='str'),
log_adjacency=dict(required=False, type='str',
choices=['log', 'detail', 'default']),
timer_throttle_lsa_start=dict(required=False, type='str'),
timer_throttle_lsa_hold=dict(required=False, type='str'),
timer_throttle_lsa_max=dict(required=False, type='str'),
timer_throttle_spf_start=dict(required=False, type='str'),
timer_throttle_spf_hold=dict(required=False, type='str'),
timer_throttle_spf_max=dict(required=False, type='str'),
auto_cost=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
state = module.params['state']
args = [
'vrf',
'ospf',
'router_id',
'default_metric',
'log_adjacency',
'timer_throttle_lsa_start',
'timer_throttle_lsa_hold',
'timer_throttle_lsa_max',
'timer_throttle_spf_start',
'timer_throttle_spf_hold',
'timer_throttle_spf_max',
'auto_cost'
]
existing = invoke('get_existing', module, args)
end_state = existing
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key != 'interface':
if str(value).lower() == 'true':
value = True
elif str(value).lower() == 'false':
value = False
elif str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key)
if value is None:
value = 'default'
if existing.get(key) or (not existing.get(key) and value):
proposed[key] = value
result = {}
if state == 'present' or (state == 'absent' and existing):
candidate = CustomNetworkConfig(indent=3)
invoke('state_%s' % state, module, existing, proposed, candidate)
try:
response = load_config(module, candidate)
result.update(response)
except ShellError:
exc = get_exception()
module.fail_json(msg=str(exc))
else:
result['updates'] = []
result['connected'] = module.connected
if module._verbosity > 0:
end_state = invoke('get_existing', module, args)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed_args
module.exit_json(**result)
if __name__ == '__main__':
main()
|
slingcoin/sling-market
|
refs/heads/master
|
test/functional/importprunedfunds.py
|
34
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importprunedfunds and removeprunedfunds RPCs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ImportPrunedFundsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(101)
self.sync_all()
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
address2_pubkey = self.nodes[0].validateaddress(address2)['pubkey'] # Using pubkey
# privkey
address3 = self.nodes[0].getnewaddress()
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
#Check only one address
address_info = self.nodes[0].validateaddress(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),101)
#Address Test - before import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
#Send funds to self
txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
self.nodes[0].generate(1)
rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
proof1 = self.nodes[0].gettxoutproof([txnid1])
txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
self.nodes[0].generate(1)
rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
proof2 = self.nodes[0].gettxoutproof([txnid2])
txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
self.nodes[0].generate(1)
rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
proof3 = self.nodes[0].gettxoutproof([txnid3])
self.sync_all()
#Import with no affiliated address
assert_raises_jsonrpc(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
balance1 = self.nodes[1].getbalance("", 0, True)
assert_equal(balance1, Decimal(0))
#Import with affiliated address with no rescan
self.nodes[1].importaddress(address2, "add2", False)
result2 = self.nodes[1].importprunedfunds(rawtxn2, proof2)
balance2 = self.nodes[1].getbalance("add2", 0, True)
assert_equal(balance2, Decimal('0.05'))
#Import with private key with no rescan
self.nodes[1].importprivkey(address3_privkey, "add3", False)
result3 = self.nodes[1].importprunedfunds(rawtxn3, proof3)
balance3 = self.nodes[1].getbalance("add3", 0, False)
assert_equal(balance3, Decimal('0.025'))
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.075'))
#Addresses Test - after import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
#Remove transactions
assert_raises_jsonrpc(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)
balance1 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance1, Decimal('0.075'))
self.nodes[1].removeprunedfunds(txnid2)
balance2 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance2, Decimal('0.025'))
self.nodes[1].removeprunedfunds(txnid3)
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.0'))
if __name__ == '__main__':
ImportPrunedFundsTest().main()
|
JFriel/honours_project
|
refs/heads/master
|
networkx/build/lib/networkx/algorithms/centrality/betweenness_subset.py
|
10
|
"""
Betweenness centrality measures for subsets of nodes.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """Aric Hagberg (hagberg@lanl.gov)"""
__all__ = ['betweenness_centrality_subset',
'edge_betweenness_centrality_subset',
'betweenness_centrality_source']
import networkx as nx
from networkx.algorithms.centrality.betweenness import\
_single_source_dijkstra_path_basic as dijkstra
from networkx.algorithms.centrality.betweenness import\
_single_source_shortest_path_basic as shortest_path
def betweenness_centrality_subset(G,sources,targets,
normalized=False,
weight=None):
r"""Compute betweenness centrality for a subset of nodes.
.. math::
c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
where `S` is the set of sources, `T` is the set of targets,
`\sigma(s, t)` is the number of shortest `(s, t)`-paths,
and `\sigma(s, t|v)` is the number of those paths
passing through some node `v` other than `s, t`.
If `s = t`, `\sigma(s, t) = 1`,
and if `v \in {s, t}`, `\sigma(s, t|v) = 0` [2]_.
Parameters
----------
G : graph
sources: list of nodes
Nodes to use as sources for shortest paths in betweenness
targets: list of nodes
Nodes to use as targets for shortest paths in betweenness
normalized : bool, optional
If True the betweenness values are normalized by `2/((n-1)(n-2))`
for graphs, and `1/((n-1)(n-2))` for directed graphs where `n`
is the number of nodes in G.
weight : None or string, optional
If None, all edge weights are considered equal.
Otherwise holds the name of the edge attribute used as weight.
Returns
-------
nodes : dictionary
Dictionary of nodes with betweenness centrality as the value.
See Also
--------
edge_betweenness_centrality
load_centrality
Notes
-----
The basic algorithm is from [1]_.
For weighted graphs the edge weights must be greater than zero.
Zero edge weights can produce an infinite number of equal length
paths between pairs of nodes.
The normalization might seem a little strange but it is the same
as in betweenness_centrality() and is designed to make
betweenness_centrality(G) be the same as
betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
References
----------
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
Journal of Mathematical Sociology 25(2):163-177, 2001.
http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
Centrality and their Generic Computation.
Social Networks 30(2):136-145, 2008.
http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
"""
b=dict.fromkeys(G,0.0) # b[v]=0 for v in G
for s in sources:
# single source shortest paths
if weight is None: # use BFS
S,P,sigma=shortest_path(G,s)
else: # use Dijkstra's algorithm
S,P,sigma=dijkstra(G,s,weight)
b=_accumulate_subset(b,S,P,sigma,s,targets)
b=_rescale(b,len(G),normalized=normalized,directed=G.is_directed())
return b
def edge_betweenness_centrality_subset(G,sources,targets,
normalized=False,
weight=None):
r"""Compute betweenness centrality for edges for a subset of nodes.
.. math::
c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
where `S` is the set of sources, `T` is the set of targets,
`\sigma(s, t)` is the number of shortest `(s, t)`-paths,
and `\sigma(s, t|e)` is the number of those paths
passing through edge `e` [2]_.
Parameters
----------
G : graph
A networkx graph
sources: list of nodes
Nodes to use as sources for shortest paths in betweenness
targets: list of nodes
Nodes to use as targets for shortest paths in betweenness
normalized : bool, optional
If True the betweenness values are normalized by `2/(n(n-1))`
for graphs, and `1/(n(n-1))` for directed graphs where `n`
is the number of nodes in G.
weight : None or string, optional
If None, all edge weights are considered equal.
Otherwise holds the name of the edge attribute used as weight.
Returns
-------
edges : dictionary
Dictionary of edges with Betweenness centrality as the value.
See Also
--------
betweenness_centrality
edge_load
Notes
-----
The basic algorithm is from [1]_.
For weighted graphs the edge weights must be greater than zero.
Zero edge weights can produce an infinite number of equal length
paths between pairs of nodes.
The normalization might seem a little strange but it is the same
as in edge_betweenness_centrality() and is designed to make
edge_betweenness_centrality(G) be the same as
edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
References
----------
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
Journal of Mathematical Sociology 25(2):163-177, 2001.
http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
Centrality and their Generic Computation.
Social Networks 30(2):136-145, 2008.
http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf
"""
b=dict.fromkeys(G,0.0) # b[v]=0 for v in G
b.update(dict.fromkeys(G.edges(),0.0)) # b[e] for e in G.edges()
for s in sources:
# single source shortest paths
if weight is None: # use BFS
S,P,sigma=shortest_path(G,s)
else: # use Dijkstra's algorithm
S,P,sigma=dijkstra(G,s,weight)
b=_accumulate_edges_subset(b,S,P,sigma,s,targets)
for n in G: # remove nodes to only return edges
del b[n]
b=_rescale_e(b,len(G),normalized=normalized,directed=G.is_directed())
return b
# obsolete name
def betweenness_centrality_source(G,normalized=True,weight=None,sources=None):
if sources is None:
sources=G.nodes()
targets=G.nodes()
return betweenness_centrality_subset(G,sources,targets,normalized,weight)
def _accumulate_subset(betweenness,S,P,sigma,s,targets):
delta=dict.fromkeys(S,0)
target_set=set(targets)
while S:
w=S.pop()
for v in P[w]:
if w in target_set:
delta[v]+=(sigma[v]/sigma[w])*(1.0+delta[w])
else:
delta[v]+=delta[w]/len(P[w])
if w != s:
betweenness[w]+=delta[w]
return betweenness
def _accumulate_edges_subset(betweenness,S,P,sigma,s,targets):
delta=dict.fromkeys(S,0)
target_set=set(targets)
while S:
w=S.pop()
for v in P[w]:
if w in target_set:
c=(sigma[v]/sigma[w])*(1.0+delta[w])
else:
c=delta[w]/len(P[w])
if (v,w) not in betweenness:
betweenness[(w,v)]+=c
else:
betweenness[(v,w)]+=c
delta[v]+=c
if w != s:
betweenness[w]+=delta[w]
return betweenness
def _rescale(betweenness,n,normalized,directed=False):
if normalized is True:
if n <=2:
scale=None # no normalization b=0 for all nodes
else:
scale=1.0/((n-1)*(n-2))
else: # rescale by 2 for undirected graphs
if not directed:
scale=1.0/2.0
else:
scale=None
if scale is not None:
for v in betweenness:
betweenness[v] *= scale
return betweenness
def _rescale_e(betweenness,n,normalized,directed=False):
if normalized is True:
if n <=1:
scale=None # no normalization b=0 for all nodes
else:
scale=1.0/(n*(n-1))
else: # rescale by 2 for undirected graphs
if not directed:
scale=1.0/2.0
else:
scale=None
if scale is not None:
for v in betweenness:
betweenness[v] *= scale
return betweenness
|
nicecapj/crossplatfromMmorpgServer
|
refs/heads/master
|
ThirdParty/googletest/googlemock/scripts/generator/cpp/ast.py
|
384
|
#!/usr/bin/env python
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate an Abstract Syntax Tree (AST) for C++."""
__author__ = 'nnorwitz@google.com (Neal Norwitz)'
# TODO:
# * Tokens should never be exported, need to convert to Nodes
# (return types, parameters, etc.)
# * Handle static class data for templatized classes
# * Handle casts (both C++ and C-style)
# * Handle conditions and loops (if/else, switch, for, while/do)
#
# TODO much, much later:
# * Handle #define
# * exceptions
try:
# Python 3.x
import builtins
except ImportError:
# Python 2.x
import __builtin__ as builtins
import sys
import traceback
from cpp import keywords
from cpp import tokenize
from cpp import utils
if not hasattr(builtins, 'reversed'):
# Support Python 2.3 and earlier.
def reversed(seq):
for i in range(len(seq)-1, -1, -1):
yield seq[i]
if not hasattr(builtins, 'next'):
# Support Python 2.5 and earlier.
def next(obj):
return obj.next()
VISIBILITY_PUBLIC, VISIBILITY_PROTECTED, VISIBILITY_PRIVATE = range(3)
FUNCTION_NONE = 0x00
FUNCTION_CONST = 0x01
FUNCTION_VIRTUAL = 0x02
FUNCTION_PURE_VIRTUAL = 0x04
FUNCTION_CTOR = 0x08
FUNCTION_DTOR = 0x10
FUNCTION_ATTRIBUTE = 0x20
FUNCTION_UNKNOWN_ANNOTATION = 0x40
FUNCTION_THROW = 0x80
FUNCTION_OVERRIDE = 0x100
"""
These are currently unused. Should really handle these properly at some point.
TYPE_MODIFIER_INLINE = 0x010000
TYPE_MODIFIER_EXTERN = 0x020000
TYPE_MODIFIER_STATIC = 0x040000
TYPE_MODIFIER_CONST = 0x080000
TYPE_MODIFIER_REGISTER = 0x100000
TYPE_MODIFIER_VOLATILE = 0x200000
TYPE_MODIFIER_MUTABLE = 0x400000
TYPE_MODIFIER_MAP = {
'inline': TYPE_MODIFIER_INLINE,
'extern': TYPE_MODIFIER_EXTERN,
'static': TYPE_MODIFIER_STATIC,
'const': TYPE_MODIFIER_CONST,
'register': TYPE_MODIFIER_REGISTER,
'volatile': TYPE_MODIFIER_VOLATILE,
'mutable': TYPE_MODIFIER_MUTABLE,
}
"""
_INTERNAL_TOKEN = 'internal'
_NAMESPACE_POP = 'ns-pop'
# TODO(nnorwitz): use this as a singleton for templated_types, etc
# where we don't want to create a new empty dict each time. It is also const.
class _NullDict(object):
__contains__ = lambda self: False
keys = values = items = iterkeys = itervalues = iteritems = lambda self: ()
# TODO(nnorwitz): move AST nodes into a separate module.
class Node(object):
"""Base AST node."""
def __init__(self, start, end):
self.start = start
self.end = end
def IsDeclaration(self):
"""Returns bool if this node is a declaration."""
return False
def IsDefinition(self):
"""Returns bool if this node is a definition."""
return False
def IsExportable(self):
"""Returns bool if this node exportable from a header file."""
return False
def Requires(self, node):
"""Does this AST node require the definition of the node passed in?"""
return False
def XXX__str__(self):
return self._StringHelper(self.__class__.__name__, '')
def _StringHelper(self, name, suffix):
if not utils.DEBUG:
return '%s(%s)' % (name, suffix)
return '%s(%d, %d, %s)' % (name, self.start, self.end, suffix)
def __repr__(self):
return str(self)
class Define(Node):
def __init__(self, start, end, name, definition):
Node.__init__(self, start, end)
self.name = name
self.definition = definition
def __str__(self):
value = '%s %s' % (self.name, self.definition)
return self._StringHelper(self.__class__.__name__, value)
class Include(Node):
def __init__(self, start, end, filename, system):
Node.__init__(self, start, end)
self.filename = filename
self.system = system
def __str__(self):
fmt = '"%s"'
if self.system:
fmt = '<%s>'
return self._StringHelper(self.__class__.__name__, fmt % self.filename)
class Goto(Node):
def __init__(self, start, end, label):
Node.__init__(self, start, end)
self.label = label
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.label))
class Expr(Node):
def __init__(self, start, end, expr):
Node.__init__(self, start, end)
self.expr = expr
def Requires(self, node):
# TODO(nnorwitz): impl.
return False
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.expr))
class Return(Expr):
pass
class Delete(Expr):
pass
class Friend(Expr):
def __init__(self, start, end, expr, namespace):
Expr.__init__(self, start, end, expr)
self.namespace = namespace[:]
class Using(Node):
def __init__(self, start, end, names):
Node.__init__(self, start, end)
self.names = names
def __str__(self):
return self._StringHelper(self.__class__.__name__, str(self.names))
class Parameter(Node):
def __init__(self, start, end, name, parameter_type, default):
Node.__init__(self, start, end)
self.name = name
self.type = parameter_type
self.default = default
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
return self.type.name == node.name
def __str__(self):
name = str(self.type)
suffix = '%s %s' % (name, self.name)
if self.default:
suffix += ' = ' + ''.join([d.name for d in self.default])
return self._StringHelper(self.__class__.__name__, suffix)
class _GenericDeclaration(Node):
def __init__(self, start, end, name, namespace):
Node.__init__(self, start, end)
self.name = name
self.namespace = namespace[:]
def FullName(self):
prefix = ''
if self.namespace and self.namespace[-1]:
prefix = '::'.join(self.namespace) + '::'
return prefix + self.name
def _TypeStringHelper(self, suffix):
if self.namespace:
names = [n or '<anonymous>' for n in self.namespace]
suffix += ' in ' + '::'.join(names)
return self._StringHelper(self.__class__.__name__, suffix)
# TODO(nnorwitz): merge with Parameter in some way?
class VariableDeclaration(_GenericDeclaration):
def __init__(self, start, end, name, var_type, initial_value, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.type = var_type
self.initial_value = initial_value
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
return self.type.name == node.name
def ToString(self):
"""Return a string that tries to reconstitute the variable decl."""
suffix = '%s %s' % (self.type, self.name)
if self.initial_value:
suffix += ' = ' + self.initial_value
return suffix
def __str__(self):
return self._StringHelper(self.__class__.__name__, self.ToString())
class Typedef(_GenericDeclaration):
def __init__(self, start, end, name, alias, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.alias = alias
def IsDefinition(self):
return True
def IsExportable(self):
return True
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
name = node.name
for token in self.alias:
if token is not None and name == token.name:
return True
return False
def __str__(self):
suffix = '%s, %s' % (self.name, self.alias)
return self._TypeStringHelper(suffix)
class _NestedType(_GenericDeclaration):
def __init__(self, start, end, name, fields, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.fields = fields
def IsDefinition(self):
return True
def IsExportable(self):
return True
def __str__(self):
suffix = '%s, {%s}' % (self.name, self.fields)
return self._TypeStringHelper(suffix)
class Union(_NestedType):
pass
class Enum(_NestedType):
pass
class Class(_GenericDeclaration):
def __init__(self, start, end, name, bases, templated_types, body, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
self.bases = bases
self.body = body
self.templated_types = templated_types
def IsDeclaration(self):
return self.bases is None and self.body is None
def IsDefinition(self):
return not self.IsDeclaration()
def IsExportable(self):
return not self.IsDeclaration()
def Requires(self, node):
# TODO(nnorwitz): handle namespaces, etc.
if self.bases:
for token_list in self.bases:
# TODO(nnorwitz): bases are tokens, do name comparision.
for token in token_list:
if token.name == node.name:
return True
# TODO(nnorwitz): search in body too.
return False
def __str__(self):
name = self.name
if self.templated_types:
name += '<%s>' % self.templated_types
suffix = '%s, %s, %s' % (name, self.bases, self.body)
return self._TypeStringHelper(suffix)
class Struct(Class):
pass
class Function(_GenericDeclaration):
def __init__(self, start, end, name, return_type, parameters,
modifiers, templated_types, body, namespace):
_GenericDeclaration.__init__(self, start, end, name, namespace)
converter = TypeConverter(namespace)
self.return_type = converter.CreateReturnType(return_type)
self.parameters = converter.ToParameters(parameters)
self.modifiers = modifiers
self.body = body
self.templated_types = templated_types
def IsDeclaration(self):
return self.body is None
def IsDefinition(self):
return self.body is not None
def IsExportable(self):
if self.return_type and 'static' in self.return_type.modifiers:
return False
return None not in self.namespace
def Requires(self, node):
if self.parameters:
# TODO(nnorwitz): parameters are tokens, do name comparision.
for p in self.parameters:
if p.name == node.name:
return True
# TODO(nnorwitz): search in body too.
return False
def __str__(self):
# TODO(nnorwitz): add templated_types.
suffix = ('%s %s(%s), 0x%02x, %s' %
(self.return_type, self.name, self.parameters,
self.modifiers, self.body))
return self._TypeStringHelper(suffix)
class Method(Function):
def __init__(self, start, end, name, in_class, return_type, parameters,
modifiers, templated_types, body, namespace):
Function.__init__(self, start, end, name, return_type, parameters,
modifiers, templated_types, body, namespace)
# TODO(nnorwitz): in_class could also be a namespace which can
# mess up finding functions properly.
self.in_class = in_class
class Type(_GenericDeclaration):
"""Type used for any variable (eg class, primitive, struct, etc)."""
def __init__(self, start, end, name, templated_types, modifiers,
reference, pointer, array):
"""
Args:
name: str name of main type
templated_types: [Class (Type?)] template type info between <>
modifiers: [str] type modifiers (keywords) eg, const, mutable, etc.
reference, pointer, array: bools
"""
_GenericDeclaration.__init__(self, start, end, name, [])
self.templated_types = templated_types
if not name and modifiers:
self.name = modifiers.pop()
self.modifiers = modifiers
self.reference = reference
self.pointer = pointer
self.array = array
def __str__(self):
prefix = ''
if self.modifiers:
prefix = ' '.join(self.modifiers) + ' '
name = str(self.name)
if self.templated_types:
name += '<%s>' % self.templated_types
suffix = prefix + name
if self.reference:
suffix += '&'
if self.pointer:
suffix += '*'
if self.array:
suffix += '[]'
return self._TypeStringHelper(suffix)
# By definition, Is* are always False. A Type can only exist in
# some sort of variable declaration, parameter, or return value.
def IsDeclaration(self):
return False
def IsDefinition(self):
return False
def IsExportable(self):
return False
class TypeConverter(object):
def __init__(self, namespace_stack):
self.namespace_stack = namespace_stack
def _GetTemplateEnd(self, tokens, start):
count = 1
end = start
while 1:
token = tokens[end]
end += 1
if token.name == '<':
count += 1
elif token.name == '>':
count -= 1
if count == 0:
break
return tokens[start:end-1], end
def ToType(self, tokens):
"""Convert [Token,...] to [Class(...), ] useful for base classes.
For example, code like class Foo : public Bar<x, y> { ... };
the "Bar<x, y>" portion gets converted to an AST.
Returns:
[Class(...), ...]
"""
result = []
name_tokens = []
reference = pointer = array = False
def AddType(templated_types):
# Partition tokens into name and modifier tokens.
names = []
modifiers = []
for t in name_tokens:
if keywords.IsKeyword(t.name):
modifiers.append(t.name)
else:
names.append(t.name)
name = ''.join(names)
if name_tokens:
result.append(Type(name_tokens[0].start, name_tokens[-1].end,
name, templated_types, modifiers,
reference, pointer, array))
del name_tokens[:]
i = 0
end = len(tokens)
while i < end:
token = tokens[i]
if token.name == '<':
new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
AddType(self.ToType(new_tokens))
# If there is a comma after the template, we need to consume
# that here otherwise it becomes part of the name.
i = new_end
reference = pointer = array = False
elif token.name == ',':
AddType([])
reference = pointer = array = False
elif token.name == '*':
pointer = True
elif token.name == '&':
reference = True
elif token.name == '[':
pointer = True
elif token.name == ']':
pass
else:
name_tokens.append(token)
i += 1
if name_tokens:
# No '<' in the tokens, just a simple name and no template.
AddType([])
return result
def DeclarationToParts(self, parts, needs_name_removed):
name = None
default = []
if needs_name_removed:
# Handle default (initial) values properly.
for i, t in enumerate(parts):
if t.name == '=':
default = parts[i+1:]
name = parts[i-1].name
if name == ']' and parts[i-2].name == '[':
name = parts[i-3].name
i -= 1
parts = parts[:i-1]
break
else:
if parts[-1].token_type == tokenize.NAME:
name = parts.pop().name
else:
# TODO(nnorwitz): this is a hack that happens for code like
# Register(Foo<T>); where it thinks this is a function call
# but it's actually a declaration.
name = '???'
modifiers = []
type_name = []
other_tokens = []
templated_types = []
i = 0
end = len(parts)
while i < end:
p = parts[i]
if keywords.IsKeyword(p.name):
modifiers.append(p.name)
elif p.name == '<':
templated_tokens, new_end = self._GetTemplateEnd(parts, i+1)
templated_types = self.ToType(templated_tokens)
i = new_end - 1
# Don't add a spurious :: to data members being initialized.
next_index = i + 1
if next_index < end and parts[next_index].name == '::':
i += 1
elif p.name in ('[', ']', '='):
# These are handled elsewhere.
other_tokens.append(p)
elif p.name not in ('*', '&', '>'):
# Ensure that names have a space between them.
if (type_name and type_name[-1].token_type == tokenize.NAME and
p.token_type == tokenize.NAME):
type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
type_name.append(p)
else:
other_tokens.append(p)
i += 1
type_name = ''.join([t.name for t in type_name])
return name, type_name, templated_types, modifiers, default, other_tokens
def ToParameters(self, tokens):
if not tokens:
return []
result = []
name = type_name = ''
type_modifiers = []
pointer = reference = array = False
first_token = None
default = []
def AddParameter(end):
if default:
del default[0] # Remove flag.
parts = self.DeclarationToParts(type_modifiers, True)
(name, type_name, templated_types, modifiers,
unused_default, unused_other_tokens) = parts
parameter_type = Type(first_token.start, first_token.end,
type_name, templated_types, modifiers,
reference, pointer, array)
p = Parameter(first_token.start, end, name,
parameter_type, default)
result.append(p)
template_count = 0
for s in tokens:
if not first_token:
first_token = s
if s.name == '<':
template_count += 1
elif s.name == '>':
template_count -= 1
if template_count > 0:
type_modifiers.append(s)
continue
if s.name == ',':
AddParameter(s.start)
name = type_name = ''
type_modifiers = []
pointer = reference = array = False
first_token = None
default = []
elif s.name == '*':
pointer = True
elif s.name == '&':
reference = True
elif s.name == '[':
array = True
elif s.name == ']':
pass # Just don't add to type_modifiers.
elif s.name == '=':
# Got a default value. Add any value (None) as a flag.
default.append(None)
elif default:
default.append(s)
else:
type_modifiers.append(s)
AddParameter(tokens[-1].end)
return result
def CreateReturnType(self, return_type_seq):
if not return_type_seq:
return None
start = return_type_seq[0].start
end = return_type_seq[-1].end
_, name, templated_types, modifiers, default, other_tokens = \
self.DeclarationToParts(return_type_seq, False)
names = [n.name for n in other_tokens]
reference = '&' in names
pointer = '*' in names
array = '[' in names
return Type(start, end, name, templated_types, modifiers,
reference, pointer, array)
def GetTemplateIndices(self, names):
# names is a list of strings.
start = names.index('<')
end = len(names) - 1
while end > 0:
if names[end] == '>':
break
end -= 1
return start, end+1
class AstBuilder(object):
def __init__(self, token_stream, filename, in_class='', visibility=None,
namespace_stack=[]):
self.tokens = token_stream
self.filename = filename
# TODO(nnorwitz): use a better data structure (deque) for the queue.
# Switching directions of the "queue" improved perf by about 25%.
# Using a deque should be even better since we access from both sides.
self.token_queue = []
self.namespace_stack = namespace_stack[:]
self.in_class = in_class
if in_class is None:
self.in_class_name_only = None
else:
self.in_class_name_only = in_class.split('::')[-1]
self.visibility = visibility
self.in_function = False
self.current_token = None
# Keep the state whether we are currently handling a typedef or not.
self._handling_typedef = False
self.converter = TypeConverter(self.namespace_stack)
def HandleError(self, msg, token):
printable_queue = list(reversed(self.token_queue[-20:]))
sys.stderr.write('Got %s in %s @ %s %s\n' %
(msg, self.filename, token, printable_queue))
def Generate(self):
while 1:
token = self._GetNextToken()
if not token:
break
# Get the next token.
self.current_token = token
# Dispatch on the next token type.
if token.token_type == _INTERNAL_TOKEN:
if token.name == _NAMESPACE_POP:
self.namespace_stack.pop()
continue
try:
result = self._GenerateOne(token)
if result is not None:
yield result
except:
self.HandleError('exception', token)
raise
def _CreateVariable(self, pos_token, name, type_name, type_modifiers,
ref_pointer_name_seq, templated_types, value=None):
reference = '&' in ref_pointer_name_seq
pointer = '*' in ref_pointer_name_seq
array = '[' in ref_pointer_name_seq
var_type = Type(pos_token.start, pos_token.end, type_name,
templated_types, type_modifiers,
reference, pointer, array)
return VariableDeclaration(pos_token.start, pos_token.end,
name, var_type, value, self.namespace_stack)
def _GenerateOne(self, token):
if token.token_type == tokenize.NAME:
if (keywords.IsKeyword(token.name) and
not keywords.IsBuiltinType(token.name)):
method = getattr(self, 'handle_' + token.name)
return method()
elif token.name == self.in_class_name_only:
# The token name is the same as the class, must be a ctor if
# there is a paren. Otherwise, it's the return type.
# Peek ahead to get the next token to figure out which.
next = self._GetNextToken()
self._AddBackToken(next)
if next.token_type == tokenize.SYNTAX and next.name == '(':
return self._GetMethod([token], FUNCTION_CTOR, None, True)
# Fall through--handle like any other method.
# Handle data or function declaration/definition.
syntax = tokenize.SYNTAX
temp_tokens, last_token = \
self._GetVarTokensUpTo(syntax, '(', ';', '{', '[')
temp_tokens.insert(0, token)
if last_token.name == '(':
# If there is an assignment before the paren,
# this is an expression, not a method.
expr = bool([e for e in temp_tokens if e.name == '='])
if expr:
new_temp = self._GetTokensUpTo(tokenize.SYNTAX, ';')
temp_tokens.append(last_token)
temp_tokens.extend(new_temp)
last_token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)
if last_token.name == '[':
# Handle array, this isn't a method, unless it's an operator.
# TODO(nnorwitz): keep the size somewhere.
# unused_size = self._GetTokensUpTo(tokenize.SYNTAX, ']')
temp_tokens.append(last_token)
if temp_tokens[-2].name == 'operator':
temp_tokens.append(self._GetNextToken())
else:
temp_tokens2, last_token = \
self._GetVarTokensUpTo(tokenize.SYNTAX, ';')
temp_tokens.extend(temp_tokens2)
if last_token.name == ';':
# Handle data, this isn't a method.
parts = self.converter.DeclarationToParts(temp_tokens, True)
(name, type_name, templated_types, modifiers, default,
unused_other_tokens) = parts
t0 = temp_tokens[0]
names = [t.name for t in temp_tokens]
if templated_types:
start, end = self.converter.GetTemplateIndices(names)
names = names[:start] + names[end:]
default = ''.join([t.name for t in default])
return self._CreateVariable(t0, name, type_name, modifiers,
names, templated_types, default)
if last_token.name == '{':
self._AddBackTokens(temp_tokens[1:])
self._AddBackToken(last_token)
method_name = temp_tokens[0].name
method = getattr(self, 'handle_' + method_name, None)
if not method:
# Must be declaring a variable.
# TODO(nnorwitz): handle the declaration.
return None
return method()
return self._GetMethod(temp_tokens, 0, None, False)
elif token.token_type == tokenize.SYNTAX:
if token.name == '~' and self.in_class:
# Must be a dtor (probably not in method body).
token = self._GetNextToken()
# self.in_class can contain A::Name, but the dtor will only
# be Name. Make sure to compare against the right value.
if (token.token_type == tokenize.NAME and
token.name == self.in_class_name_only):
return self._GetMethod([token], FUNCTION_DTOR, None, True)
# TODO(nnorwitz): handle a lot more syntax.
elif token.token_type == tokenize.PREPROCESSOR:
# TODO(nnorwitz): handle more preprocessor directives.
# token starts with a #, so remove it and strip whitespace.
name = token.name[1:].lstrip()
if name.startswith('include'):
# Remove "include".
name = name[7:].strip()
assert name
# Handle #include \<newline> "header-on-second-line.h".
if name.startswith('\\'):
name = name[1:].strip()
assert name[0] in '<"', token
assert name[-1] in '>"', token
system = name[0] == '<'
filename = name[1:-1]
return Include(token.start, token.end, filename, system)
if name.startswith('define'):
# Remove "define".
name = name[6:].strip()
assert name
value = ''
for i, c in enumerate(name):
if c.isspace():
value = name[i:].lstrip()
name = name[:i]
break
return Define(token.start, token.end, name, value)
if name.startswith('if') and name[2:3].isspace():
condition = name[3:].strip()
if condition.startswith('0') or condition.startswith('(0)'):
self._SkipIf0Blocks()
return None
def _GetTokensUpTo(self, expected_token_type, expected_token):
return self._GetVarTokensUpTo(expected_token_type, expected_token)[0]
def _GetVarTokensUpTo(self, expected_token_type, *expected_tokens):
last_token = self._GetNextToken()
tokens = []
while (last_token.token_type != expected_token_type or
last_token.name not in expected_tokens):
tokens.append(last_token)
last_token = self._GetNextToken()
return tokens, last_token
# TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necesary.
def _IgnoreUpTo(self, token_type, token):
unused_tokens = self._GetTokensUpTo(token_type, token)
def _SkipIf0Blocks(self):
count = 1
while 1:
token = self._GetNextToken()
if token.token_type != tokenize.PREPROCESSOR:
continue
name = token.name[1:].lstrip()
if name.startswith('endif'):
count -= 1
if count == 0:
break
elif name.startswith('if'):
count += 1
def _GetMatchingChar(self, open_paren, close_paren, GetNextToken=None):
if GetNextToken is None:
GetNextToken = self._GetNextToken
# Assumes the current token is open_paren and we will consume
# and return up to the close_paren.
count = 1
token = GetNextToken()
while 1:
if token.token_type == tokenize.SYNTAX:
if token.name == open_paren:
count += 1
elif token.name == close_paren:
count -= 1
if count == 0:
break
yield token
token = GetNextToken()
yield token
def _GetParameters(self):
return self._GetMatchingChar('(', ')')
def GetScope(self):
return self._GetMatchingChar('{', '}')
def _GetNextToken(self):
if self.token_queue:
return self.token_queue.pop()
return next(self.tokens)
def _AddBackToken(self, token):
if token.whence == tokenize.WHENCE_STREAM:
token.whence = tokenize.WHENCE_QUEUE
self.token_queue.insert(0, token)
else:
assert token.whence == tokenize.WHENCE_QUEUE, token
self.token_queue.append(token)
def _AddBackTokens(self, tokens):
if tokens:
if tokens[-1].whence == tokenize.WHENCE_STREAM:
for token in tokens:
token.whence = tokenize.WHENCE_QUEUE
self.token_queue[:0] = reversed(tokens)
else:
assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
self.token_queue.extend(reversed(tokens))
def GetName(self, seq=None):
"""Returns ([tokens], next_token_info)."""
GetNextToken = self._GetNextToken
if seq is not None:
it = iter(seq)
GetNextToken = lambda: next(it)
next_token = GetNextToken()
tokens = []
last_token_was_name = False
while (next_token.token_type == tokenize.NAME or
(next_token.token_type == tokenize.SYNTAX and
next_token.name in ('::', '<'))):
# Two NAMEs in a row means the identifier should terminate.
# It's probably some sort of variable declaration.
if last_token_was_name and next_token.token_type == tokenize.NAME:
break
last_token_was_name = next_token.token_type == tokenize.NAME
tokens.append(next_token)
# Handle templated names.
if next_token.name == '<':
tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
last_token_was_name = True
next_token = GetNextToken()
return tokens, next_token
def GetMethod(self, modifiers, templated_types):
return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(')
assert len(return_type_and_name) >= 1
return self._GetMethod(return_type_and_name, modifiers, templated_types,
False)
def _GetMethod(self, return_type_and_name, modifiers, templated_types,
get_paren):
template_portion = None
if get_paren:
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
if token.name == '<':
# Handle templatized dtors.
template_portion = [token]
template_portion.extend(self._GetMatchingChar('<', '>'))
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '(', token
name = return_type_and_name.pop()
# Handle templatized ctors.
if name.name == '>':
index = 1
while return_type_and_name[index].name != '<':
index += 1
template_portion = return_type_and_name[index:] + [name]
del return_type_and_name[index:]
name = return_type_and_name.pop()
elif name.name == ']':
rt = return_type_and_name
assert rt[-1].name == '[', return_type_and_name
assert rt[-2].name == 'operator', return_type_and_name
name_seq = return_type_and_name[-2:]
del return_type_and_name[-2:]
name = tokenize.Token(tokenize.NAME, 'operator[]',
name_seq[0].start, name.end)
# Get the open paren so _GetParameters() below works.
unused_open_paren = self._GetNextToken()
# TODO(nnorwitz): store template_portion.
return_type = return_type_and_name
indices = name
if return_type:
indices = return_type[0]
# Force ctor for templatized ctors.
if name.name == self.in_class and not modifiers:
modifiers |= FUNCTION_CTOR
parameters = list(self._GetParameters())
del parameters[-1] # Remove trailing ')'.
# Handling operator() is especially weird.
if name.name == 'operator' and not parameters:
token = self._GetNextToken()
assert token.name == '(', token
parameters = list(self._GetParameters())
del parameters[-1] # Remove trailing ')'.
token = self._GetNextToken()
while token.token_type == tokenize.NAME:
modifier_token = token
token = self._GetNextToken()
if modifier_token.name == 'const':
modifiers |= FUNCTION_CONST
elif modifier_token.name == '__attribute__':
# TODO(nnorwitz): handle more __attribute__ details.
modifiers |= FUNCTION_ATTRIBUTE
assert token.name == '(', token
# Consume everything between the (parens).
unused_tokens = list(self._GetMatchingChar('(', ')'))
token = self._GetNextToken()
elif modifier_token.name == 'throw':
modifiers |= FUNCTION_THROW
assert token.name == '(', token
# Consume everything between the (parens).
unused_tokens = list(self._GetMatchingChar('(', ')'))
token = self._GetNextToken()
elif modifier_token.name == 'override':
modifiers |= FUNCTION_OVERRIDE
elif modifier_token.name == modifier_token.name.upper():
# HACK(nnorwitz): assume that all upper-case names
# are some macro we aren't expanding.
modifiers |= FUNCTION_UNKNOWN_ANNOTATION
else:
self.HandleError('unexpected token', modifier_token)
assert token.token_type == tokenize.SYNTAX, token
# Handle ctor initializers.
if token.name == ':':
# TODO(nnorwitz): anything else to handle for initializer list?
while token.name != ';' and token.name != '{':
token = self._GetNextToken()
# Handle pointer to functions that are really data but look
# like method declarations.
if token.name == '(':
if parameters[0].name == '*':
# name contains the return type.
name = parameters.pop()
# parameters contains the name of the data.
modifiers = [p.name for p in parameters]
# Already at the ( to open the parameter list.
function_parameters = list(self._GetMatchingChar('(', ')'))
del function_parameters[-1] # Remove trailing ')'.
# TODO(nnorwitz): store the function_parameters.
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == ';', token
return self._CreateVariable(indices, name.name, indices.name,
modifiers, '', None)
# At this point, we got something like:
# return_type (type::*name_)(params);
# This is a data member called name_ that is a function pointer.
# With this code: void (sq_type::*field_)(string&);
# We get: name=void return_type=[] parameters=sq_type ... field_
# TODO(nnorwitz): is return_type always empty?
# TODO(nnorwitz): this isn't even close to being correct.
# Just put in something so we don't crash and can move on.
real_name = parameters[-1]
modifiers = [p.name for p in self._GetParameters()]
del modifiers[-1] # Remove trailing ')'.
return self._CreateVariable(indices, real_name.name, indices.name,
modifiers, '', None)
if token.name == '{':
body = list(self.GetScope())
del body[-1] # Remove trailing '}'.
else:
body = None
if token.name == '=':
token = self._GetNextToken()
if token.name == 'default' or token.name == 'delete':
# Ignore explicitly defaulted and deleted special members
# in C++11.
token = self._GetNextToken()
else:
# Handle pure-virtual declarations.
assert token.token_type == tokenize.CONSTANT, token
assert token.name == '0', token
modifiers |= FUNCTION_PURE_VIRTUAL
token = self._GetNextToken()
if token.name == '[':
# TODO(nnorwitz): store tokens and improve parsing.
# template <typename T, size_t N> char (&ASH(T (&seq)[N]))[N];
tokens = list(self._GetMatchingChar('[', ']'))
token = self._GetNextToken()
assert token.name == ';', (token, return_type_and_name, parameters)
# Looks like we got a method, not a function.
if len(return_type) > 2 and return_type[-1].name == '::':
return_type, in_class = \
self._GetReturnTypeAndClassName(return_type)
return Method(indices.start, indices.end, name.name, in_class,
return_type, parameters, modifiers, templated_types,
body, self.namespace_stack)
return Function(indices.start, indices.end, name.name, return_type,
parameters, modifiers, templated_types, body,
self.namespace_stack)
def _GetReturnTypeAndClassName(self, token_seq):
# Splitting the return type from the class name in a method
# can be tricky. For example, Return::Type::Is::Hard::To::Find().
# Where is the return type and where is the class name?
# The heuristic used is to pull the last name as the class name.
# This includes all the templated type info.
# TODO(nnorwitz): if there is only One name like in the
# example above, punt and assume the last bit is the class name.
# Ignore a :: prefix, if exists so we can find the first real name.
i = 0
if token_seq[0].name == '::':
i = 1
# Ignore a :: suffix, if exists.
end = len(token_seq) - 1
if token_seq[end-1].name == '::':
end -= 1
# Make a copy of the sequence so we can append a sentinel
# value. This is required for GetName will has to have some
# terminating condition beyond the last name.
seq_copy = token_seq[i:end]
seq_copy.append(tokenize.Token(tokenize.SYNTAX, '', 0, 0))
names = []
while i < end:
# Iterate through the sequence parsing out each name.
new_name, next = self.GetName(seq_copy[i:])
assert new_name, 'Got empty new_name, next=%s' % next
# We got a pointer or ref. Add it to the name.
if next and next.token_type == tokenize.SYNTAX:
new_name.append(next)
names.append(new_name)
i += len(new_name)
# Now that we have the names, it's time to undo what we did.
# Remove the sentinel value.
names[-1].pop()
# Flatten the token sequence for the return type.
return_type = [e for seq in names[:-1] for e in seq]
# The class name is the last name.
class_name = names[-1]
return return_type, class_name
def handle_bool(self):
pass
def handle_char(self):
pass
def handle_int(self):
pass
def handle_long(self):
pass
def handle_short(self):
pass
def handle_double(self):
pass
def handle_float(self):
pass
def handle_void(self):
pass
def handle_wchar_t(self):
pass
def handle_unsigned(self):
pass
def handle_signed(self):
pass
def _GetNestedType(self, ctor):
name = None
name_tokens, token = self.GetName()
if name_tokens:
name = ''.join([t.name for t in name_tokens])
# Handle forward declarations.
if token.token_type == tokenize.SYNTAX and token.name == ';':
return ctor(token.start, token.end, name, None,
self.namespace_stack)
if token.token_type == tokenize.NAME and self._handling_typedef:
self._AddBackToken(token)
return ctor(token.start, token.end, name, None,
self.namespace_stack)
# Must be the type declaration.
fields = list(self._GetMatchingChar('{', '}'))
del fields[-1] # Remove trailing '}'.
if token.token_type == tokenize.SYNTAX and token.name == '{':
next = self._GetNextToken()
new_type = ctor(token.start, token.end, name, fields,
self.namespace_stack)
# A name means this is an anonymous type and the name
# is the variable declaration.
if next.token_type != tokenize.NAME:
return new_type
name = new_type
token = next
# Must be variable declaration using the type prefixed with keyword.
assert token.token_type == tokenize.NAME, token
return self._CreateVariable(token, token.name, name, [], '', None)
def handle_struct(self):
# Special case the handling typedef/aliasing of structs here.
# It would be a pain to handle in the class code.
name_tokens, var_token = self.GetName()
if name_tokens:
next_token = self._GetNextToken()
is_syntax = (var_token.token_type == tokenize.SYNTAX and
var_token.name[0] in '*&')
is_variable = (var_token.token_type == tokenize.NAME and
next_token.name == ';')
variable = var_token
if is_syntax and not is_variable:
variable = next_token
temp = self._GetNextToken()
if temp.token_type == tokenize.SYNTAX and temp.name == '(':
# Handle methods declared to return a struct.
t0 = name_tokens[0]
struct = tokenize.Token(tokenize.NAME, 'struct',
t0.start-7, t0.start-2)
type_and_name = [struct]
type_and_name.extend(name_tokens)
type_and_name.extend((var_token, next_token))
return self._GetMethod(type_and_name, 0, None, False)
assert temp.name == ';', (temp, name_tokens, var_token)
if is_syntax or (is_variable and not self._handling_typedef):
modifiers = ['struct']
type_name = ''.join([t.name for t in name_tokens])
position = name_tokens[0]
return self._CreateVariable(position, variable.name, type_name,
modifiers, var_token.name, None)
name_tokens.extend((var_token, next_token))
self._AddBackTokens(name_tokens)
else:
self._AddBackToken(var_token)
return self._GetClass(Struct, VISIBILITY_PUBLIC, None)
def handle_union(self):
return self._GetNestedType(Union)
def handle_enum(self):
return self._GetNestedType(Enum)
def handle_auto(self):
# TODO(nnorwitz): warn about using auto? Probably not since it
# will be reclaimed and useful for C++0x.
pass
def handle_register(self):
pass
def handle_const(self):
pass
def handle_inline(self):
pass
def handle_extern(self):
pass
def handle_static(self):
pass
def handle_virtual(self):
# What follows must be a method.
token = token2 = self._GetNextToken()
if token.name == 'inline':
# HACK(nnorwitz): handle inline dtors by ignoring 'inline'.
token2 = self._GetNextToken()
if token2.token_type == tokenize.SYNTAX and token2.name == '~':
return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None)
assert token.token_type == tokenize.NAME or token.name == '::', token
return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') # )
return_type_and_name.insert(0, token)
if token2 is not token:
return_type_and_name.insert(1, token2)
return self._GetMethod(return_type_and_name, FUNCTION_VIRTUAL,
None, False)
def handle_volatile(self):
pass
def handle_mutable(self):
pass
def handle_public(self):
assert self.in_class
self.visibility = VISIBILITY_PUBLIC
def handle_protected(self):
assert self.in_class
self.visibility = VISIBILITY_PROTECTED
def handle_private(self):
assert self.in_class
self.visibility = VISIBILITY_PRIVATE
def handle_friend(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
t0 = tokens[0]
return Friend(t0.start, t0.end, tokens, self.namespace_stack)
def handle_static_cast(self):
pass
def handle_const_cast(self):
pass
def handle_dynamic_cast(self):
pass
def handle_reinterpret_cast(self):
pass
def handle_new(self):
pass
def handle_delete(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
return Delete(tokens[0].start, tokens[0].end, tokens)
def handle_typedef(self):
token = self._GetNextToken()
if (token.token_type == tokenize.NAME and
keywords.IsKeyword(token.name)):
# Token must be struct/enum/union/class.
method = getattr(self, 'handle_' + token.name)
self._handling_typedef = True
tokens = [method()]
self._handling_typedef = False
else:
tokens = [token]
# Get the remainder of the typedef up to the semi-colon.
tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';'))
# TODO(nnorwitz): clean all this up.
assert tokens
name = tokens.pop()
indices = name
if tokens:
indices = tokens[0]
if not indices:
indices = token
if name.name == ')':
# HACK(nnorwitz): Handle pointers to functions "properly".
if (len(tokens) >= 4 and
tokens[1].name == '(' and tokens[2].name == '*'):
tokens.append(name)
name = tokens[3]
elif name.name == ']':
# HACK(nnorwitz): Handle arrays properly.
if len(tokens) >= 2:
tokens.append(name)
name = tokens[1]
new_type = tokens
if tokens and isinstance(tokens[0], tokenize.Token):
new_type = self.converter.ToType(tokens)[0]
return Typedef(indices.start, indices.end, name.name,
new_type, self.namespace_stack)
def handle_typeid(self):
pass # Not needed yet.
def handle_typename(self):
pass # Not needed yet.
def _GetTemplatedTypes(self):
result = {}
tokens = list(self._GetMatchingChar('<', '>'))
len_tokens = len(tokens) - 1 # Ignore trailing '>'.
i = 0
while i < len_tokens:
key = tokens[i].name
i += 1
if keywords.IsKeyword(key) or key == ',':
continue
type_name = default = None
if i < len_tokens:
i += 1
if tokens[i-1].name == '=':
assert i < len_tokens, '%s %s' % (i, tokens)
default, unused_next_token = self.GetName(tokens[i:])
i += len(default)
else:
if tokens[i-1].name != ',':
# We got something like: Type variable.
# Re-adjust the key (variable) and type_name (Type).
key = tokens[i-1].name
type_name = tokens[i-2]
result[key] = (type_name, default)
return result
def handle_template(self):
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '<', token
templated_types = self._GetTemplatedTypes()
# TODO(nnorwitz): for now, just ignore the template params.
token = self._GetNextToken()
if token.token_type == tokenize.NAME:
if token.name == 'class':
return self._GetClass(Class, VISIBILITY_PRIVATE, templated_types)
elif token.name == 'struct':
return self._GetClass(Struct, VISIBILITY_PUBLIC, templated_types)
elif token.name == 'friend':
return self.handle_friend()
self._AddBackToken(token)
tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
tokens.append(last)
self._AddBackTokens(tokens)
if last.name == '(':
return self.GetMethod(FUNCTION_NONE, templated_types)
# Must be a variable definition.
return None
def handle_true(self):
pass # Nothing to do.
def handle_false(self):
pass # Nothing to do.
def handle_asm(self):
pass # Not needed yet.
def handle_class(self):
return self._GetClass(Class, VISIBILITY_PRIVATE, None)
def _GetBases(self):
# Get base classes.
bases = []
while 1:
token = self._GetNextToken()
assert token.token_type == tokenize.NAME, token
# TODO(nnorwitz): store kind of inheritance...maybe.
if token.name not in ('public', 'protected', 'private'):
# If inheritance type is not specified, it is private.
# Just put the token back so we can form a name.
# TODO(nnorwitz): it would be good to warn about this.
self._AddBackToken(token)
else:
# Check for virtual inheritance.
token = self._GetNextToken()
if token.name != 'virtual':
self._AddBackToken(token)
else:
# TODO(nnorwitz): store that we got virtual for this base.
pass
base, next_token = self.GetName()
bases_ast = self.converter.ToType(base)
assert len(bases_ast) == 1, bases_ast
bases.append(bases_ast[0])
assert next_token.token_type == tokenize.SYNTAX, next_token
if next_token.name == '{':
token = next_token
break
# Support multiple inheritance.
assert next_token.name == ',', next_token
return bases, token
def _GetClass(self, class_type, visibility, templated_types):
class_name = None
class_token = self._GetNextToken()
if class_token.token_type != tokenize.NAME:
assert class_token.token_type == tokenize.SYNTAX, class_token
token = class_token
else:
# Skip any macro (e.g. storage class specifiers) after the
# 'class' keyword.
next_token = self._GetNextToken()
if next_token.token_type == tokenize.NAME:
self._AddBackToken(next_token)
else:
self._AddBackTokens([class_token, next_token])
name_tokens, token = self.GetName()
class_name = ''.join([t.name for t in name_tokens])
bases = None
if token.token_type == tokenize.SYNTAX:
if token.name == ';':
# Forward declaration.
return class_type(class_token.start, class_token.end,
class_name, None, templated_types, None,
self.namespace_stack)
if token.name in '*&':
# Inline forward declaration. Could be method or data.
name_token = self._GetNextToken()
next_token = self._GetNextToken()
if next_token.name == ';':
# Handle data
modifiers = ['class']
return self._CreateVariable(class_token, name_token.name,
class_name,
modifiers, token.name, None)
else:
# Assume this is a method.
tokens = (class_token, token, name_token, next_token)
self._AddBackTokens(tokens)
return self.GetMethod(FUNCTION_NONE, None)
if token.name == ':':
bases, token = self._GetBases()
body = None
if token.token_type == tokenize.SYNTAX and token.name == '{':
assert token.token_type == tokenize.SYNTAX, token
assert token.name == '{', token
ast = AstBuilder(self.GetScope(), self.filename, class_name,
visibility, self.namespace_stack)
body = list(ast.Generate())
if not self._handling_typedef:
token = self._GetNextToken()
if token.token_type != tokenize.NAME:
assert token.token_type == tokenize.SYNTAX, token
assert token.name == ';', token
else:
new_class = class_type(class_token.start, class_token.end,
class_name, bases, None,
body, self.namespace_stack)
modifiers = []
return self._CreateVariable(class_token,
token.name, new_class,
modifiers, token.name, None)
else:
if not self._handling_typedef:
self.HandleError('non-typedef token', token)
self._AddBackToken(token)
return class_type(class_token.start, class_token.end, class_name,
bases, templated_types, body, self.namespace_stack)
def handle_namespace(self):
token = self._GetNextToken()
# Support anonymous namespaces.
name = None
if token.token_type == tokenize.NAME:
name = token.name
token = self._GetNextToken()
self.namespace_stack.append(name)
assert token.token_type == tokenize.SYNTAX, token
# Create an internal token that denotes when the namespace is complete.
internal_token = tokenize.Token(_INTERNAL_TOKEN, _NAMESPACE_POP,
None, None)
internal_token.whence = token.whence
if token.name == '=':
# TODO(nnorwitz): handle aliasing namespaces.
name, next_token = self.GetName()
assert next_token.name == ';', next_token
self._AddBackToken(internal_token)
else:
assert token.name == '{', token
tokens = list(self.GetScope())
# Replace the trailing } with the internal namespace pop token.
tokens[-1] = internal_token
# Handle namespace with nothing in it.
self._AddBackTokens(tokens)
return None
def handle_using(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert tokens
return Using(tokens[0].start, tokens[0].end, tokens)
def handle_explicit(self):
assert self.in_class
# Nothing much to do.
# TODO(nnorwitz): maybe verify the method name == class name.
# This must be a ctor.
return self.GetMethod(FUNCTION_CTOR, None)
def handle_this(self):
pass # Nothing to do.
def handle_operator(self):
# Pull off the next token(s?) and make that part of the method name.
pass
def handle_sizeof(self):
pass
def handle_case(self):
pass
def handle_switch(self):
pass
def handle_default(self):
token = self._GetNextToken()
assert token.token_type == tokenize.SYNTAX
assert token.name == ':'
def handle_if(self):
pass
def handle_else(self):
pass
def handle_return(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
if not tokens:
return Return(self.current_token.start, self.current_token.end, None)
return Return(tokens[0].start, tokens[0].end, tokens)
def handle_goto(self):
tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
assert len(tokens) == 1, str(tokens)
return Goto(tokens[0].start, tokens[0].end, tokens[0].name)
def handle_try(self):
pass # Not needed yet.
def handle_catch(self):
pass # Not needed yet.
def handle_throw(self):
pass # Not needed yet.
def handle_while(self):
pass
def handle_do(self):
pass
def handle_for(self):
pass
def handle_break(self):
self._IgnoreUpTo(tokenize.SYNTAX, ';')
def handle_continue(self):
self._IgnoreUpTo(tokenize.SYNTAX, ';')
def BuilderFromSource(source, filename):
"""Utility method that returns an AstBuilder from source code.
Args:
source: 'C++ source code'
filename: 'file1'
Returns:
AstBuilder
"""
return AstBuilder(tokenize.GetTokens(source), filename)
def PrintIndentifiers(filename, should_print):
"""Prints all identifiers for a C++ source file.
Args:
filename: 'file1'
should_print: predicate with signature: bool Function(token)
"""
source = utils.ReadFile(filename, False)
if source is None:
sys.stderr.write('Unable to find: %s\n' % filename)
return
#print('Processing %s' % actual_filename)
builder = BuilderFromSource(source, filename)
try:
for node in builder.Generate():
if should_print(node):
print(node.name)
except KeyboardInterrupt:
return
except:
pass
def PrintAllIndentifiers(filenames, should_print):
"""Prints all identifiers for each C++ source file in filenames.
Args:
filenames: ['file1', 'file2', ...]
should_print: predicate with signature: bool Function(token)
"""
for path in filenames:
PrintIndentifiers(path, should_print)
def main(argv):
for filename in argv[1:]:
source = utils.ReadFile(filename)
if source is None:
continue
print('Processing %s' % filename)
builder = BuilderFromSource(source, filename)
try:
entire_ast = filter(None, builder.Generate())
except KeyboardInterrupt:
return
except:
# Already printed a warning, print the traceback and continue.
traceback.print_exc()
else:
if utils.DEBUG:
for ast in entire_ast:
print(ast)
if __name__ == '__main__':
main(sys.argv)
|
funkyHat/cloudify-gcp-plugin
|
refs/heads/master
|
cloudify_gcp/compute/tests/test_health_check.py
|
1
|
# -*- coding: utf-8 -*-
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import unittest
from mock import patch, MagicMock
from cloudify.exceptions import NonRecoverableError
from .. import health_check
from ...tests import TestGCP
@patch('cloudify_gcp.utils.assure_resource_id_correct', return_value=True)
@patch('cloudify_gcp.gcp.ServiceAccountCredentials.from_json_keyfile_dict')
@patch('cloudify_gcp.utils.get_gcp_resource_name', return_value='valid_name')
@patch('cloudify_gcp.gcp.build')
class TestHealthCheck(TestGCP):
def test_create_http(self, mock_build, *args):
health_check.create(
'name',
'http',
{},
)
mock_build.assert_called_once()
mock_build().httpHealthChecks().insert.assert_called_with(
body={
'description': 'Cloudify generated httpHealthCheck',
'name': 'name'},
project='not really a project'
)
def test_create_https(self, mock_build, *args):
health_check.create(
'name',
'https',
{},
)
mock_build.assert_called_once()
mock_build().httpsHealthChecks().insert.assert_called_with(
body={
'description': 'Cloudify generated httpsHealthCheck',
'name': 'name'},
project='not really a project'
)
@patch('cloudify_gcp.utils.response_to_operation')
def test_delete(self, mock_response, mock_build, *args):
self.ctxmock.instance.runtime_properties.update({
'name': 'delete_name',
'kind': 'compute#httpHealthCheck',
})
operation = MagicMock()
operation.has_finished.return_value = True
mock_response.return_value = operation
health_check.delete()
mock_build.assert_called_once()
mock_build().httpHealthChecks().delete.assert_called_with(
httpHealthCheck='delete_name',
project='not really a project',
)
class TestHealthCheckHelpers(unittest.TestCase):
def test_health_check_of_type_raises(self):
with self.assertRaises(NonRecoverableError) as e:
health_check.health_check_of_type('carrots')
self.assertIn('Unexpected type', e.exception.message)
|
roadmapper/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/iam_saml_federation.py
|
18
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: iam_saml_federation
version_added: "2.10"
short_description: Maintain IAM SAML federation configuration.
requirements:
- boto3
description:
- Provides a mechanism to manage AWS IAM SAML Identity Federation providers (create/update/delete metadata).
options:
name:
description:
- The name of the provider to create.
required: true
type: str
saml_metadata_document:
description:
- The XML document generated by an identity provider (IdP) that supports SAML 2.0.
type: str
state:
description:
- Whether to create or delete identity provider. If 'present' is specified it will attempt to update the identity provider matching the name field.
default: present
choices: [ "present", "absent" ]
type: str
extends_documentation_fragment:
- aws
- ec2
author:
- Tony (@axc450)
- Aidan Rowe (@aidan-)
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# It is assumed that their matching environment variables are set.
# Creates a new iam saml identity provider if not present
- name: saml provider
iam_saml_federation:
name: example1
# the > below opens an indented block, so no escaping/quoting is needed when in the indentation level under this key
saml_metadata_document: >
<?xml version="1.0"?>...
<md:EntityDescriptor
# Creates a new iam saml identity provider if not present
- name: saml provider
iam_saml_federation:
name: example2
saml_metadata_document: "{{ item }}"
with_file: /path/to/idp/metdata.xml
# Removes iam saml identity provider
- name: remove saml provider
iam_saml_federation:
name: example3
state: absent
'''
RETURN = '''
saml_provider:
description: Details of the SAML Identity Provider that was created/modified.
type: complex
returned: present
contains:
arn:
description: The ARN of the identity provider.
type: str
returned: present
sample: "arn:aws:iam::123456789012:saml-provider/my_saml_provider"
metadata_document:
description: The XML metadata document that includes information about an identity provider.
type: str
returned: present
create_date:
description: The date and time when the SAML provider was created in ISO 8601 date-time format.
type: str
returned: present
sample: "2017-02-08T04:36:28+00:00"
expire_date:
description: The expiration date and time for the SAML provider in ISO 8601 date-time format.
type: str
returned: present
sample: "2017-02-08T04:36:28+00:00"
'''
try:
import botocore.exceptions
except ImportError:
pass
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import AWSRetry
class SAMLProviderManager:
"""Handles SAML Identity Provider configuration"""
def __init__(self, module):
self.module = module
try:
self.conn = module.client('iam')
except botocore.exceptions.ClientError as e:
self.module.fail_json_aws(e, msg="Unknown boto error")
# use retry decorator for boto3 calls
@AWSRetry.backoff(tries=3, delay=5)
def _list_saml_providers(self):
return self.conn.list_saml_providers()
@AWSRetry.backoff(tries=3, delay=5)
def _get_saml_provider(self, arn):
return self.conn.get_saml_provider(SAMLProviderArn=arn)
@AWSRetry.backoff(tries=3, delay=5)
def _update_saml_provider(self, arn, metadata):
return self.conn.update_saml_provider(SAMLProviderArn=arn, SAMLMetadataDocument=metadata)
@AWSRetry.backoff(tries=3, delay=5)
def _create_saml_provider(self, metadata, name):
return self.conn.create_saml_provider(SAMLMetadataDocument=metadata, Name=name)
@AWSRetry.backoff(tries=3, delay=5)
def _delete_saml_provider(self, arn):
return self.conn.delete_saml_provider(SAMLProviderArn=arn)
def _get_provider_arn(self, name):
providers = self._list_saml_providers()
for p in providers['SAMLProviderList']:
provider_name = p['Arn'].split('/', 1)[1]
if name == provider_name:
return p['Arn']
return None
def create_or_update_saml_provider(self, name, metadata):
if not metadata:
self.module.fail_json(msg="saml_metadata_document must be defined for present state")
res = {'changed': False}
try:
arn = self._get_provider_arn(name)
except (botocore.exceptions.ValidationError, botocore.exceptions.ClientError) as e:
self.module.fail_json_aws(e, msg="Could not get the ARN of the identity provider '{0}'".format(name))
if arn: # see if metadata needs updating
try:
resp = self._get_saml_provider(arn)
except (botocore.exceptions.ValidationError, botocore.exceptions.ClientError) as e:
self.module.fail_json_aws(e, msg="Could not retrieve the identity provider '{0}'".format(name))
if metadata.strip() != resp['SAMLMetadataDocument'].strip():
# provider needs updating
res['changed'] = True
if not self.module.check_mode:
try:
resp = self._update_saml_provider(arn, metadata)
res['saml_provider'] = self._build_res(resp['SAMLProviderArn'])
except botocore.exceptions.ClientError as e:
self.module.fail_json_aws(e, msg="Could not update the identity provider '{0}'".format(name))
else: # create
res['changed'] = True
if not self.module.check_mode:
try:
resp = self._create_saml_provider(metadata, name)
res['saml_provider'] = self._build_res(resp['SAMLProviderArn'])
except botocore.exceptions.ClientError as e:
self.module.fail_json_aws(e, msg="Could not create the identity provider '{0}'".format(name))
self.module.exit_json(**res)
def delete_saml_provider(self, name):
res = {'changed': False}
try:
arn = self._get_provider_arn(name)
except (botocore.exceptions.ValidationError, botocore.exceptions.ClientError) as e:
self.module.fail_json_aws(e, msg="Could not get the ARN of the identity provider '{0}'".format(name))
if arn: # delete
res['changed'] = True
if not self.module.check_mode:
try:
self._delete_saml_provider(arn)
except botocore.exceptions.ClientError as e:
self.module.fail_json_aws(e, msg="Could not delete the identity provider '{0}'".format(name))
self.module.exit_json(**res)
def _build_res(self, arn):
saml_provider = self._get_saml_provider(arn)
return {
"arn": arn,
"metadata_document": saml_provider["SAMLMetadataDocument"],
"create_date": saml_provider["CreateDate"].isoformat(),
"expire_date": saml_provider["ValidUntil"].isoformat()
}
def main():
argument_spec = dict(
name=dict(required=True),
saml_metadata_document=dict(default=None, required=False),
state=dict(default='present', required=False, choices=['present', 'absent']),
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[('state', 'present', ['saml_metadata_document'])]
)
name = module.params['name']
state = module.params.get('state')
saml_metadata_document = module.params.get('saml_metadata_document')
sp_man = SAMLProviderManager(module)
if state == 'present':
sp_man.create_or_update_saml_provider(name, saml_metadata_document)
elif state == 'absent':
sp_man.delete_saml_provider(name)
if __name__ == '__main__':
main()
|
ryfeus/lambda-packs
|
refs/heads/master
|
Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/datasets/imdb/__init__.py
|
1
|
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.keras._impl.keras.datasets.imdb import get_word_index
from tensorflow.python.keras._impl.keras.datasets.imdb import load_data
|
shakamunyi/neutron-vrrp
|
refs/heads/master
|
neutron/plugins/nec/db/router.py
|
15
|
# Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc as sa_exc
from neutron.db import l3_db
from neutron.db import models_v2
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class RouterProvider(models_v2.model_base.BASEV2):
"""Represents a binding of router_id to provider."""
provider = sa.Column(sa.String(255))
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id', ondelete="CASCADE"),
primary_key=True)
router = orm.relationship(l3_db.Router, uselist=False,
backref=orm.backref('provider', uselist=False,
lazy='joined',
cascade='delete'))
def _get_router_providers_query(query, provider=None, router_ids=None):
if provider:
query = query.filter_by(provider=provider)
if router_ids:
column = RouterProvider.router_id
query = query.filter(column.in_(router_ids))
return query
def get_router_providers(session, provider=None, router_ids=None):
"""Retrieve a list of a pair of router ID and its provider."""
query = session.query(RouterProvider)
query = _get_router_providers_query(query, provider, router_ids)
return [{'provider': router.provider, 'router_id': router.router_id}
for router in query]
def get_routers_by_provider(session, provider, router_ids=None):
"""Retrieve a list of router IDs with the given provider."""
query = session.query(RouterProvider.router_id)
query = _get_router_providers_query(query, provider, router_ids)
return [router[0] for router in query]
def get_router_count_by_provider(session, provider, tenant_id=None):
"""Return the number of routers with the given provider."""
query = session.query(RouterProvider).filter_by(provider=provider)
if tenant_id:
query = (query.join('router').
filter(l3_db.Router.tenant_id == tenant_id))
return query.count()
def get_provider_by_router(session, router_id):
"""Retrieve a provider of the given router."""
try:
binding = (session.query(RouterProvider).
filter_by(router_id=router_id).
one())
except sa_exc.NoResultFound:
return None
return binding.provider
def add_router_provider_binding(session, provider, router_id):
"""Add a router provider association."""
LOG.debug(_("Add provider binding "
"(router=%(router_id)s, provider=%(provider)s)"),
{'router_id': router_id, 'provider': provider})
binding = RouterProvider(provider=provider, router_id=router_id)
session.add(binding)
return binding
|
Tvlistings/tuxtrax
|
refs/heads/master
|
runserver.py
|
2
|
import os
os.environ["DEBUG"] = "true"
import penguicontrax
penguicontrax.init()
app = penguicontrax.app
apppath = os.path.abspath(os.path.dirname(__file__))
extra_dirs = [apppath + '/penguicontrax/templates/js']
extra_files = extra_dirs[:]
for extra_dir in extra_dirs:
for dirname, dirs, files in os.walk(extra_dir):
for filename in files:
filename = os.path.join(dirname, filename)
if os.path.isfile(filename):
extra_files.append(filename)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True, extra_files=extra_files)
|
marianotepper/dask
|
refs/heads/master
|
dask/dataframe/tests/test_io.py
|
3
|
import gzip
import pandas as pd
import numpy as np
import pandas.util.testing as tm
import os
import dask
import bcolz
from pframe import pframe
from operator import getitem
from toolz import valmap
import dask.dataframe as dd
from dask.dataframe.io import (read_csv, file_size, categories_and_quantiles,
dataframe_from_ctable, from_array, from_bcolz, infer_header)
from dask.compatibility import StringIO
from dask.utils import filetext, tmpfile
########
# CSVS #
########
text = """
name,amount
Alice,100
Bob,-200
Charlie,300
Dennis,400
Edith,-500
Frank,600
""".strip()
def test_read_csv():
with filetext(text) as fn:
f = read_csv(fn, chunkbytes=30)
assert list(f.columns) == ['name', 'amount']
assert f.npartitions > 1
result = f.compute(get=dask.get).sort('name')
assert (result.values == pd.read_csv(fn).sort('name').values).all()
def test_read_gzip_csv():
with filetext(text.encode(), open=gzip.open) as fn:
f = read_csv(fn, chunkbytes=30, compression='gzip')
assert list(f.columns) == ['name', 'amount']
assert f.npartitions > 1
result = f.compute(get=dask.get).sort('name')
assert (result.values == pd.read_csv(fn, compression='gzip').sort('name').values).all()
def test_file_size():
counts = (len(text), len(text) + text.count('\n'))
with filetext(text) as fn:
assert file_size(fn) in counts
with filetext(text.encode(), open=gzip.open) as fn:
assert file_size(fn, 'gzip') in counts
def test_cateogories_and_quantiles():
with filetext(text) as fn:
cats, quant = categories_and_quantiles(fn, (), {})
assert list(cats['name']) == ['Alice', 'Bob', 'Charlie', 'Dennis', 'Edith', 'Frank']
cats, quant = categories_and_quantiles(fn, (), {}, index='amount',
chunkbytes=30)
assert len(quant) == 2
assert (-600 < quant).all() and (600 > quant).all()
def test_read_multiple_csv():
try:
with open('_foo.1.csv', 'w') as f:
f.write(text)
with open('_foo.2.csv', 'w') as f:
f.write(text)
df = read_csv('_foo.*.csv')
assert (len(read_csv('_foo.*.csv').compute()) ==
len(read_csv('_foo.1.csv').compute()) * 2)
finally:
os.remove('_foo.1.csv')
os.remove('_foo.2.csv')
def test_read_csv_categorize():
with filetext(text) as fn:
f = read_csv(fn, chunkbytes=30, categorize=True)
assert list(f.dtypes) == ['category', 'i8']
expected = pd.read_csv(fn)
expected['name'] = expected.name.astype('category')
assert (f.dtypes == expected.dtypes).all()
assert len(f.compute().name.cat.categories) == 6
def test_consistent_dtypes():
text = """
name,amount
Alice,100.5
Bob,-200.5
Charlie,300
Dennis,400
Edith,-500
Frank,600
""".strip()
with filetext(text) as fn:
df = read_csv(fn, chunkbytes=30)
assert isinstance(df.amount.sum().compute(), float)
def test_infer_header():
with filetext('name,val\nAlice,100\nNA,200') as fn:
assert infer_header(fn) == True
with filetext('Alice,100\nNA,200') as fn:
assert infer_header(fn) == False
def eq(a, b):
if hasattr(a, 'dask'):
a = a.compute(get=dask.get)
if hasattr(b, 'dask'):
b = b.compute(get=dask.get)
if isinstance(a, pd.DataFrame):
a = a.sort_index()
b = b.sort_index()
tm.assert_frame_equal(a, b)
return True
if isinstance(a, pd.Series):
tm.assert_series_equal(a, b)
return True
assert np.allclose(a, b)
return True
datetime_csv_file = """
name,amount,when
Alice,100,2014-01-01
Bob,200,2014-01-01
Charlie,300,2014-01-01
Dan,400,2014-01-01
""".strip()
def test_read_csv_categorize_with_parse_dates():
with filetext(datetime_csv_file) as fn:
f = read_csv(fn, chunkbytes=30, categorize=True, parse_dates=['when'])
assert list(f.dtypes) == ['category', 'i8', 'M8[ns]']
def test_read_csv_categorize_and_index():
with filetext(text) as fn:
f = read_csv(fn, chunkbytes=20, index='amount')
assert f.index.compute().name == 'amount'
expected = pd.read_csv(fn).set_index('amount')
expected['name'] = expected.name.astype('category')
assert eq(f, expected)
def test_usecols():
with filetext(datetime_csv_file) as fn:
df = read_csv(fn, chunkbytes=30, usecols=['when', 'amount'])
expected = pd.read_csv(fn, usecols=['when', 'amount'])
assert (df.compute().values == expected.values).all()
####################
# Arrays and BColz #
####################
def test_from_array():
x = np.array([(i, i*10) for i in range(10)],
dtype=[('a', 'i4'), ('b', 'i4')])
d = dd.from_array(x, chunksize=4)
assert list(d.columns) == ['a', 'b']
assert d.divisions == (4, 8)
assert (d.compute().to_records(index=False) == x).all()
def test_from_bcolz():
try:
import bcolz
except ImportError:
return
t = bcolz.ctable([[1, 2, 3], [1., 2., 3.], ['a', 'b', 'a']],
names=['x', 'y', 'a'])
d = dd.from_bcolz(t, chunksize=2)
assert d.npartitions == 2
assert str(d.dtypes['a']) == 'category'
assert list(d.x.compute(get=dask.get)) == [1, 2, 3]
assert list(d.a.compute(get=dask.get)) == ['a', 'b', 'a']
d = dd.from_bcolz(t, chunksize=2, index='x')
assert list(d.index.compute()) == [1, 2, 3]
def test_from_bcolz_filename():
try:
import bcolz
except ImportError:
return
with tmpfile('.bcolz') as fn:
t = bcolz.ctable([[1, 2, 3], [1., 2., 3.], ['a', 'b', 'a']],
names=['x', 'y', 'a'],
rootdir=fn)
t.flush()
d = dd.from_bcolz(fn, chunksize=2)
assert list(d.x.compute()) == [1, 2, 3]
#####################
# Play with PFrames #
#####################
dsk = {('x', 0): pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]},
index=[0, 1, 3]),
('x', 1): pd.DataFrame({'a': [4, 5, 6], 'b': [3, 2, 1]},
index=[5, 6, 8]),
('x', 2): pd.DataFrame({'a': [7, 8, 9], 'b': [0, 0, 0]},
index=[9, 9, 9])}
dfs = list(dsk.values())
pf = pframe(like=dfs[0], divisions=[5])
for df in dfs:
pf.append(df)
def test_from_pframe():
d = dd.from_pframe(pf)
assert list(d.columns) == list(dfs[0].columns)
assert list(d.divisions) == list(pf.divisions)
def test_column_store_from_pframe():
d = dd.from_pframe(pf)
assert eq(d[['a']].head(), pd.DataFrame({'a': [1, 2, 3]}, index=[0, 1, 3]))
assert eq(d.a.head(), pd.Series([1, 2, 3], index=[0, 1, 3], name='a'))
def test_skipinitialspace():
text = """
name, amount
Alice,100
Bob,-200
Charlie,300
Dennis,400
Edith,-500
Frank,600
""".strip()
with filetext(text) as fn:
df = dd.read_csv(fn, skipinitialspace=True, chunkbytes=20)
assert 'amount' in df.columns
assert df.amount.max().compute() == 600
def test_consistent_dtypes():
text1 = """
name,amount
Alice,100
Bob,-200
Charlie,300
""".strip()
text2 = """
name,amount
1,400
2,-500
Frank,600
""".strip()
try:
with open('_foo.1.csv', 'w') as f:
f.write(text1)
with open('_foo.2.csv', 'w') as f:
f.write(text2)
df = dd.read_csv('_foo.*.csv', chunkbytes=25)
assert df.amount.max().compute() == 600
finally:
pass
os.remove('_foo.1.csv')
os.remove('_foo.2.csv')
|
guileschool/BEAGLEBONE-tutorials
|
refs/heads/master
|
BBB-firmware/u-boot-v2018.05-rc2/tools/dtoc/dtb_platdata.py
|
1
|
#!/usr/bin/python
#
# Copyright (C) 2017 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# SPDX-License-Identifier: GPL-2.0+
#
"""Device tree to platform data class
This supports converting device tree data to C structures definitions and
static data.
"""
import collections
import copy
import sys
import fdt
import fdt_util
# When we see these properties we ignore them - i.e. do not create a structure member
PROP_IGNORE_LIST = [
'#address-cells',
'#gpio-cells',
'#size-cells',
'compatible',
'linux,phandle',
"status",
'phandle',
'u-boot,dm-pre-reloc',
'u-boot,dm-tpl',
'u-boot,dm-spl',
]
# C type declarations for the tyues we support
TYPE_NAMES = {
fdt.TYPE_INT: 'fdt32_t',
fdt.TYPE_BYTE: 'unsigned char',
fdt.TYPE_STRING: 'const char *',
fdt.TYPE_BOOL: 'bool',
fdt.TYPE_INT64: 'fdt64_t',
}
STRUCT_PREFIX = 'dtd_'
VAL_PREFIX = 'dtv_'
# This holds information about a property which includes phandles.
#
# max_args: integer: Maximum number or arguments that any phandle uses (int).
# args: Number of args for each phandle in the property. The total number of
# phandles is len(args). This is a list of integers.
PhandleInfo = collections.namedtuple('PhandleInfo', ['max_args', 'args'])
def conv_name_to_c(name):
"""Convert a device-tree name to a C identifier
This uses multiple replace() calls instead of re.sub() since it is faster
(400ms for 1m calls versus 1000ms for the 're' version).
Args:
name: Name to convert
Return:
String containing the C version of this name
"""
new = name.replace('@', '_at_')
new = new.replace('-', '_')
new = new.replace(',', '_')
new = new.replace('.', '_')
return new
def tab_to(num_tabs, line):
"""Append tabs to a line of text to reach a tab stop.
Args:
num_tabs: Tab stop to obtain (0 = column 0, 1 = column 8, etc.)
line: Line of text to append to
Returns:
line with the correct number of tabs appeneded. If the line already
extends past that tab stop then a single space is appended.
"""
if len(line) >= num_tabs * 8:
return line + ' '
return line + '\t' * (num_tabs - len(line) // 8)
def get_value(ftype, value):
"""Get a value as a C expression
For integers this returns a byte-swapped (little-endian) hex string
For bytes this returns a hex string, e.g. 0x12
For strings this returns a literal string enclosed in quotes
For booleans this return 'true'
Args:
type: Data type (fdt_util)
value: Data value, as a string of bytes
"""
if ftype == fdt.TYPE_INT:
return '%#x' % fdt_util.fdt32_to_cpu(value)
elif ftype == fdt.TYPE_BYTE:
return '%#x' % ord(value[0])
elif ftype == fdt.TYPE_STRING:
return '"%s"' % value
elif ftype == fdt.TYPE_BOOL:
return 'true'
elif ftype == fdt.TYPE_INT64:
return '%#x' % value
def get_compat_name(node):
"""Get a node's first compatible string as a C identifier
Args:
node: Node object to check
Return:
Tuple:
C identifier for the first compatible string
List of C identifiers for all the other compatible strings
(possibly empty)
"""
compat = node.props['compatible'].value
aliases = []
if isinstance(compat, list):
compat, aliases = compat[0], compat[1:]
return conv_name_to_c(compat), [conv_name_to_c(a) for a in aliases]
class DtbPlatdata(object):
"""Provide a means to convert device tree binary data to platform data
The output of this process is C structures which can be used in space-
constrained encvironments where the ~3KB code overhead of device tree
code is not affordable.
Properties:
_fdt: Fdt object, referencing the device tree
_dtb_fname: Filename of the input device tree binary file
_valid_nodes: A list of Node object with compatible strings
_include_disabled: true to include nodes marked status = "disabled"
_outfile: The current output file (sys.stdout or a real file)
_lines: Stashed list of output lines for outputting in the future
"""
def __init__(self, dtb_fname, include_disabled):
self._fdt = None
self._dtb_fname = dtb_fname
self._valid_nodes = None
self._include_disabled = include_disabled
self._outfile = None
self._lines = []
self._aliases = {}
def setup_output(self, fname):
"""Set up the output destination
Once this is done, future calls to self.out() will output to this
file.
Args:
fname: Filename to send output to, or '-' for stdout
"""
if fname == '-':
self._outfile = sys.stdout
else:
self._outfile = open(fname, 'w')
def out(self, line):
"""Output a string to the output file
Args:
line: String to output
"""
self._outfile.write(line)
def buf(self, line):
"""Buffer up a string to send later
Args:
line: String to add to our 'buffer' list
"""
self._lines.append(line)
def get_buf(self):
"""Get the contents of the output buffer, and clear it
Returns:
The output buffer, which is then cleared for future use
"""
lines = self._lines
self._lines = []
return lines
def out_header(self):
"""Output a message indicating that this is an auto-generated file"""
self.out('''/*
* DO NOT MODIFY
*
* This file was generated by dtoc from a .dtb (device tree binary) file.
*/
''')
def get_phandle_argc(self, prop, node_name):
"""Check if a node contains phandles
We have no reliable way of detecting whether a node uses a phandle
or not. As an interim measure, use a list of known property names.
Args:
prop: Prop object to check
Return:
Number of argument cells is this is a phandle, else None
"""
if prop.name in ['clocks']:
val = prop.value
if not isinstance(val, list):
val = [val]
i = 0
max_args = 0
args = []
while i < len(val):
phandle = fdt_util.fdt32_to_cpu(val[i])
target = self._fdt.phandle_to_node.get(phandle)
if not target:
raise ValueError("Cannot parse '%s' in node '%s'" %
(prop.name, node_name))
prop_name = '#clock-cells'
cells = target.props.get(prop_name)
if not cells:
raise ValueError("Node '%s' has no '%s' property" %
(target.name, prop_name))
num_args = fdt_util.fdt32_to_cpu(cells.value)
max_args = max(max_args, num_args)
args.append(num_args)
i += 1 + num_args
return PhandleInfo(max_args, args)
return None
def scan_dtb(self):
"""Scan the device tree to obtain a tree of nodes and properties
Once this is done, self._fdt.GetRoot() can be called to obtain the
device tree root node, and progress from there.
"""
self._fdt = fdt.FdtScan(self._dtb_fname)
def scan_node(self, root):
"""Scan a node and subnodes to build a tree of node and phandle info
This adds each node to self._valid_nodes.
Args:
root: Root node for scan
"""
for node in root.subnodes:
if 'compatible' in node.props:
status = node.props.get('status')
if (not self._include_disabled and not status or
status.value != 'disabled'):
self._valid_nodes.append(node)
# recurse to handle any subnodes
self.scan_node(node)
def scan_tree(self):
"""Scan the device tree for useful information
This fills in the following properties:
_valid_nodes: A list of nodes we wish to consider include in the
platform data
"""
self._valid_nodes = []
return self.scan_node(self._fdt.GetRoot())
@staticmethod
def get_num_cells(node):
"""Get the number of cells in addresses and sizes for this node
Args:
node: Node to check
Returns:
Tuple:
Number of address cells for this node
Number of size cells for this node
"""
parent = node.parent
na, ns = 2, 2
if parent:
na_prop = parent.props.get('#address-cells')
ns_prop = parent.props.get('#size-cells')
if na_prop:
na = fdt_util.fdt32_to_cpu(na_prop.value)
if ns_prop:
ns = fdt_util.fdt32_to_cpu(ns_prop.value)
return na, ns
def scan_reg_sizes(self):
"""Scan for 64-bit 'reg' properties and update the values
This finds 'reg' properties with 64-bit data and converts the value to
an array of 64-values. This allows it to be output in a way that the
C code can read.
"""
for node in self._valid_nodes:
reg = node.props.get('reg')
if not reg:
continue
na, ns = self.get_num_cells(node)
total = na + ns
if reg.type != fdt.TYPE_INT:
raise ValueError("Node '%s' reg property is not an int")
if len(reg.value) % total:
raise ValueError("Node '%s' reg property has %d cells "
'which is not a multiple of na + ns = %d + %d)' %
(node.name, len(reg.value), na, ns))
reg.na = na
reg.ns = ns
if na != 1 or ns != 1:
reg.type = fdt.TYPE_INT64
i = 0
new_value = []
val = reg.value
if not isinstance(val, list):
val = [val]
while i < len(val):
addr = fdt_util.fdt_cells_to_cpu(val[i:], reg.na)
i += na
size = fdt_util.fdt_cells_to_cpu(val[i:], reg.ns)
i += ns
new_value += [addr, size]
reg.value = new_value
def scan_structs(self):
"""Scan the device tree building up the C structures we will use.
Build a dict keyed by C struct name containing a dict of Prop
object for each struct field (keyed by property name). Where the
same struct appears multiple times, try to use the 'widest'
property, i.e. the one with a type which can express all others.
Once the widest property is determined, all other properties are
updated to match that width.
"""
structs = {}
for node in self._valid_nodes:
node_name, _ = get_compat_name(node)
fields = {}
# Get a list of all the valid properties in this node.
for name, prop in node.props.items():
if name not in PROP_IGNORE_LIST and name[0] != '#':
fields[name] = copy.deepcopy(prop)
# If we've seen this node_name before, update the existing struct.
if node_name in structs:
struct = structs[node_name]
for name, prop in fields.items():
oldprop = struct.get(name)
if oldprop:
oldprop.Widen(prop)
else:
struct[name] = prop
# Otherwise store this as a new struct.
else:
structs[node_name] = fields
upto = 0
for node in self._valid_nodes:
node_name, _ = get_compat_name(node)
struct = structs[node_name]
for name, prop in node.props.items():
if name not in PROP_IGNORE_LIST and name[0] != '#':
prop.Widen(struct[name])
upto += 1
struct_name, aliases = get_compat_name(node)
for alias in aliases:
self._aliases[alias] = struct_name
return structs
def scan_phandles(self):
"""Figure out what phandles each node uses
We need to be careful when outputing nodes that use phandles since
they must come after the declaration of the phandles in the C file.
Otherwise we get a compiler error since the phandle struct is not yet
declared.
This function adds to each node a list of phandle nodes that the node
depends on. This allows us to output things in the right order.
"""
for node in self._valid_nodes:
node.phandles = set()
for pname, prop in node.props.items():
if pname in PROP_IGNORE_LIST or pname[0] == '#':
continue
info = self.get_phandle_argc(prop, node.name)
if info:
if not isinstance(prop.value, list):
prop.value = [prop.value]
# Process the list as pairs of (phandle, id)
pos = 0
for args in info.args:
phandle_cell = prop.value[pos]
phandle = fdt_util.fdt32_to_cpu(phandle_cell)
target_node = self._fdt.phandle_to_node[phandle]
node.phandles.add(target_node)
pos += 1 + args
def generate_structs(self, structs):
"""Generate struct defintions for the platform data
This writes out the body of a header file consisting of structure
definitions for node in self._valid_nodes. See the documentation in
README.of-plat for more information.
"""
self.out_header()
self.out('#include <stdbool.h>\n')
self.out('#include <linux/libfdt.h>\n')
# Output the struct definition
for name in sorted(structs):
self.out('struct %s%s {\n' % (STRUCT_PREFIX, name))
for pname in sorted(structs[name]):
prop = structs[name][pname]
info = self.get_phandle_argc(prop, structs[name])
if info:
# For phandles, include a reference to the target
struct_name = 'struct phandle_%d_arg' % info.max_args
self.out('\t%s%s[%d]' % (tab_to(2, struct_name),
conv_name_to_c(prop.name),
len(info.args)))
else:
ptype = TYPE_NAMES[prop.type]
self.out('\t%s%s' % (tab_to(2, ptype),
conv_name_to_c(prop.name)))
if isinstance(prop.value, list):
self.out('[%d]' % len(prop.value))
self.out(';\n')
self.out('};\n')
for alias, struct_name in self._aliases.iteritems():
self.out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias,
STRUCT_PREFIX, struct_name))
def output_node(self, node):
"""Output the C code for a node
Args:
node: node to output
"""
struct_name, _ = get_compat_name(node)
var_name = conv_name_to_c(node.name)
self.buf('static struct %s%s %s%s = {\n' %
(STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name))
for pname, prop in node.props.items():
if pname in PROP_IGNORE_LIST or pname[0] == '#':
continue
member_name = conv_name_to_c(prop.name)
self.buf('\t%s= ' % tab_to(3, '.' + member_name))
# Special handling for lists
if isinstance(prop.value, list):
self.buf('{')
vals = []
# For phandles, output a reference to the platform data
# of the target node.
info = self.get_phandle_argc(prop, node.name)
if info:
# Process the list as pairs of (phandle, id)
pos = 0
for args in info.args:
phandle_cell = prop.value[pos]
phandle = fdt_util.fdt32_to_cpu(phandle_cell)
target_node = self._fdt.phandle_to_node[phandle]
name = conv_name_to_c(target_node.name)
arg_values = []
for i in range(args):
arg_values.append(str(fdt_util.fdt32_to_cpu(prop.value[pos + 1 + i])))
pos += 1 + args
vals.append('\t{&%s%s, {%s}}' % (VAL_PREFIX, name,
', '.join(arg_values)))
for val in vals:
self.buf('\n\t\t%s,' % val)
else:
for val in prop.value:
vals.append(get_value(prop.type, val))
# Put 8 values per line to avoid very long lines.
for i in xrange(0, len(vals), 8):
if i:
self.buf(',\n\t\t')
self.buf(', '.join(vals[i:i + 8]))
self.buf('}')
else:
self.buf(get_value(prop.type, prop.value))
self.buf(',\n')
self.buf('};\n')
# Add a device declaration
self.buf('U_BOOT_DEVICE(%s) = {\n' % var_name)
self.buf('\t.name\t\t= "%s",\n' % struct_name)
self.buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name))
self.buf('\t.platdata_size\t= sizeof(%s%s),\n' % (VAL_PREFIX, var_name))
self.buf('};\n')
self.buf('\n')
self.out(''.join(self.get_buf()))
def generate_tables(self):
"""Generate device defintions for the platform data
This writes out C platform data initialisation data and
U_BOOT_DEVICE() declarations for each valid node. Where a node has
multiple compatible strings, a #define is used to make them equivalent.
See the documentation in doc/driver-model/of-plat.txt for more
information.
"""
self.out_header()
self.out('#include <common.h>\n')
self.out('#include <dm.h>\n')
self.out('#include <dt-structs.h>\n')
self.out('\n')
nodes_to_output = list(self._valid_nodes)
# Keep outputing nodes until there is none left
while nodes_to_output:
node = nodes_to_output[0]
# Output all the node's dependencies first
for req_node in node.phandles:
if req_node in nodes_to_output:
self.output_node(req_node)
nodes_to_output.remove(req_node)
self.output_node(node)
nodes_to_output.remove(node)
def run_steps(args, dtb_file, include_disabled, output):
"""Run all the steps of the dtoc tool
Args:
args: List of non-option arguments provided to the problem
dtb_file: Filename of dtb file to process
include_disabled: True to include disabled nodes
output: Name of output file
"""
if not args:
raise ValueError('Please specify a command: struct, platdata')
plat = DtbPlatdata(dtb_file, include_disabled)
plat.scan_dtb()
plat.scan_tree()
plat.scan_reg_sizes()
plat.setup_output(output)
structs = plat.scan_structs()
plat.scan_phandles()
for cmd in args[0].split(','):
if cmd == 'struct':
plat.generate_structs(structs)
elif cmd == 'platdata':
plat.generate_tables()
else:
raise ValueError("Unknown command '%s': (use: struct, platdata)" %
cmd)
|
alexcuellar/odoo
|
refs/heads/8.0
|
addons/l10n_es/migrations/8.0.5.1/pre-migration.py
|
52
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 Serv. Tecnol. Avanz. (<http://www.serviciosbaeza.com>)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
__name__ = u"Renombrar impuestos y posiciones fiscales"
def rename_fiscal_positions(cr):
fp_mapping = {
'Retención IRPF 19% Arrendamientos': 'Retención 19% arrendamientos',
'Retención IRPF 20% Arrendamientos': 'Retención 20% arrendamientos',
'Retención IRPF 21% Arrendamientos': 'Retención 21% arrendamientos',
}
for fp_old, fp_new in fp_mapping.iteritems():
cr.execute(
"""
UPDATE account_fiscal_position
SET name=%s
WHERE name=%s
""", (fp_new, fp_old))
def rename_taxes(cr):
tax_mapping = {
'S_IRPF19A': 'S_RAC19A',
'S_IRPF20A': 'S_RAC20A',
'S_IRPF21A': 'S_RAC21A',
'P_IRPF19A': 'P_RAC19A',
'P_IRPF20A': 'P_RAC20A',
'P_IRPF21A': 'P_RAC21A',
}
for tax_old, tax_new in tax_mapping.iteritems():
cr.execute(
"""
UPDATE account_tax
SET description=%s
WHERE description=%s""", (tax_new, tax_old))
def migrate(cr, version):
if not version:
return
rename_fiscal_positions(cr)
rename_taxes(cr)
|
theotherphp/relay
|
refs/heads/master
|
relay_rfid.py
|
1
|
"""
Read and write RFID tags for the Relay app
"""
import argparse
from functools import partial
import logging
from signal import signal, SIGTERM, SIGINT
import sys
import time
import mercury
from relay_config import Config
from relay_websocket import RelayWebsocket
logging.basicConfig(
name=__name__,
filename='rfid.log',
level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s'
)
cfg = Config()
ws = None
dedup_cache = {}
DEDUP_THRESHOLD = 5.0
def post(epc_obj):
epc = repr(epc_obj).strip('\'') # ugh
hex_numbers = [epc[i:i+2] for i in range(0, len(epc), 2)]
chars = [chr(int(ch, 16)) for ch in hex_numbers]
tag = ''.join(chars)
now = time.time()
if now - dedup_cache.get(tag, 0.0) > DEDUP_THRESHOLD:
dedup_cache[tag] = now
if ws:
ws.send(tag)
else:
logging.debug('duplicate read %s' % tag)
def sig_handler(sig, frame):
logging.info('caught signal %d' % sig)
sys.exit(0)
if __name__ == '__main__':
logging.info('starting')
reader = None
signal(SIGTERM, partial(sig_handler))
signal(SIGINT, partial(sig_handler))
parser = argparse.ArgumentParser(description='Relay RFID reader/writer')
parser.add_argument('--write-range', default='', help='batch write tags')
ns = parser.parse_args()
try:
reader = mercury.Reader('tmr:///dev/ttyUSB0')
reader.set_region('NA2')
pwr = 500 if ns.write_range else 2600 # hundredths of dBm
reader.set_read_plan([1], 'GEN2', read_power=pwr)
if ns.write_range:
rng = ns.write_range.split('-')
for t in range(int(rng[0]), int(rng[1]) + 1):
time.sleep(5)
reader.write(str(t).zfill(4))
logging.info('wrote %d' % t)
else:
reader.start_reading(post, on_time=250, off_time=250)
ws = RelayWebsocket()
except Exception as e:
logging.error(str(e))
try:
if not ns.write_range:
while True:
time.sleep(60)
finally:
logging.info('exiting')
if reader:
reader.stop_reading()
if ws:
ws.close()
|
Volune/npm
|
refs/heads/master
|
node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
|
1407
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
|
tchernomax/ansible
|
refs/heads/devel
|
test/units/modules/network/ios/test_ios_vlan.py
|
12
|
# (c) 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.ios import ios_vlan
from ansible.modules.network.ios.ios_vlan import parse_vlan_brief
from units.modules.utils import set_module_args
from .ios_module import TestIosModule, load_fixture
class TestIosVlanModule(TestIosModule):
module = ios_vlan
def setUp(self):
super(TestIosVlanModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.ios.ios_vlan.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.ios.ios_vlan.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestIosVlanModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
self.run_commands.return_value = [load_fixture('ios_vlan_config.cfg')]
self.load_config.return_value = {'diff': None, 'session': 'session'}
def test_ios_vlan_create(self):
set_module_args({'vlan_id': '3', 'name': 'test', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 3',
'name test',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_id_startwith_9(self):
set_module_args({'vlan_id': '9', 'name': 'vlan9', 'state': 'present'})
result = self.execute_module(changed=False)
expected_commands = []
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_rename(self):
set_module_args({'vlan_id': '2', 'name': 'test', 'state': 'present'})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 2',
'name test',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_with_interfaces(self):
set_module_args({'vlan_id': '2', 'name': 'vlan2', 'state': 'present', 'interfaces': ['GigabitEthernet1/0/8', 'GigabitEthernet1/0/7']})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 2',
'interface GigabitEthernet1/0/8',
'switchport mode access',
'switchport access vlan 2',
'vlan 2',
'interface GigabitEthernet1/0/6',
'switchport mode access',
'no switchport access vlan 2',
]
self.assertEqual(result['commands'], expected_commands)
def test_ios_vlan_with_interfaces_and_newvlan(self):
set_module_args({'vlan_id': '3', 'name': 'vlan3', 'state': 'present', 'interfaces': ['GigabitEthernet1/0/8', 'GigabitEthernet1/0/7']})
result = self.execute_module(changed=True)
expected_commands = [
'vlan 3',
'name vlan3',
'interface GigabitEthernet1/0/8',
'switchport mode access',
'switchport access vlan 3',
'interface GigabitEthernet1/0/7',
'switchport mode access',
'switchport access vlan 3',
]
self.assertEqual(result['commands'], expected_commands)
def test_parse_vlan_brief(self):
result = parse_vlan_brief(load_fixture('ios_vlan_config.cfg'))
obj = [
{
'name': 'default',
'interfaces': [
'GigabitEthernet1/0/4',
'GigabitEthernet1/0/5',
'GigabitEthernet1/0/52',
'GigabitEthernet1/0/54',
],
'state': 'active',
'vlan_id': '1',
},
{
'name': 'vlan2',
'interfaces': [
'GigabitEthernet1/0/6',
'GigabitEthernet1/0/7',
],
'state': 'active',
'vlan_id': '2',
},
{
'name': 'vlan9',
'interfaces': [
'GigabitEthernet1/0/6',
],
'state': 'active',
'vlan_id': '9',
},
{
'name': 'fddi-default',
'interfaces': [],
'state': 'act/unsup',
'vlan_id': '1002',
},
{
'name': 'fddo-default',
'interfaces': [],
'state': 'act/unsup',
'vlan_id': '1003',
},
]
self.assertEqual(result, obj)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.