repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
lkorigin/laniakea | src/laniakea/repository.py | 1 | 16631 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2016-2019 Matthias Klumpp <matthias@tenstral.net>
#
# Licensed under the GNU Lesser General Public License Version 3
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the license, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import os
from apt_pkg import TagFile, TagSection, sha256sum, version_compare
from laniakea.utils import is_remote_url, download_file, split_strip
from laniakea.utils.gpg import SignedFile
from laniakea.localconfig import LocalConfig
from laniakea.db import ArchiveFile, SourcePackage, BinaryPackage, PackageInfo, DebType, \
packagepriority_from_string, debtype_from_string, ArchiveSuite, ArchiveComponent, ArchiveArchitecture, \
ArchiveRepository
from laniakea.logging import log
def parse_checksums_list(data, base_dir=None):
files = []
if not data:
return files
for line in data.split('\n'):
parts = split_strip(line, ' ') # f43923ace1c558ad9f9fa88eb3f1764a8c0379013aafbc682a35769449fe8955 2455 0ad_0.0.20-1.dsc
if len(parts) != 3:
continue
af = ArchiveFile()
af.sha256sum = parts[0]
af.size = int(parts[1])
if not base_dir:
af.fname = parts[2]
else:
af.fname = os.path.join(base_dir, parts[2])
files.append(af)
return files
def parse_package_list_str(pkg_list_raw, default_version=None):
'''
Parse a "Package-List" field and return its information in
PackageInfo data structures.
See https://www.debian.org/doc/debian-policy/ch-controlfields.html#package-list
'''
res = []
for line in pkg_list_raw.split('\n'):
parts = split_strip(line, ' ')
if len(parts) < 4:
continue
pi = PackageInfo()
pi.name = parts[0]
pi.version = default_version
pi.deb_type = debtype_from_string(parts[1])
pi.section = parts[2]
pi.priority = packagepriority_from_string(parts[3])
if len(parts) > 4:
# we have additional data
raw_vals = split_strip(parts[4], ' ')
for v in raw_vals:
if v.startswith('arch='):
# handle architectures
pi.architectures = v[5:].split(',')
res.append(pi)
return res
def version_revision(version: str, full_for_native: bool = True) -> str:
'''
Get the Debian revision string from a version number.
:param full_for_native: Return the full version if we have a native package.
'''
idx = version.rfind('-')
if idx < 0:
return version if full_for_native else ''
return version[idx + 1:]
class Repository:
'''
Allows reading data from a Debian repository.
'''
class InReleaseData:
files = []
def __init__(self, location, repo_name=None, trusted_keyrings=[], entity=None):
lconf = LocalConfig()
if not repo_name:
repo_name = 'unknown'
if is_remote_url(location):
self._root_dir = os.path.join(lconf.cache_dir, 'repo_cache', repo_name)
os.makedirs(self._root_dir, exist_ok=True)
self._repo_url = location
else:
self._root_dir = location
self._repo_url = None
self._keyrings = trusted_keyrings
self._trusted = False
self._name = repo_name
if entity:
self._repo_entity = entity
else:
self._repo_entity = ArchiveRepository(self._name)
self._inrelease = {} # dict of str->InReleaseData
@property
def base_dir(self) -> str:
'''
The on-disk location of this repository.
'''
return self._root_dir
@property
def location(self) -> str:
'''
A location string identifier of where this repository resides.
'''
if not self._repo_url:
return self._root_dir
return self._repo_url
def set_trusted(self, trusted):
self._trusted = trusted
if self._trusted:
log.debug('Explicitly marked repository "{}" as trusted.'.format(self.location))
def _fetch_repo_file_internal(self, location, check=False):
'''
Download a file and retrieve a filename.
This function does not validate the result, this step
has to be done by the caller.
'''
if self._repo_url:
source_url = os.path.join(self._repo_url, location)
target_fname = os.path.join(self._root_dir, location)
os.makedirs(os.path.dirname(target_fname), exist_ok=True)
download_file(source_url, target_fname, check=check)
return target_fname
else:
fname = os.path.join(self._root_dir, location)
if os.path.isfile(fname):
return fname
# There was an error, we couldn't find or download the file
log.error('Could not find repository file "{}"'.format(location))
return None
def get_file(self, afile, check=True) -> str:
'''
Get a file from the repository.
Returns: An absolute path to the repository file.
'''
assert type(afile) is ArchiveFile
fname = self._fetch_repo_file_internal(afile.fname, check=True)
if check:
with open(fname, 'rb') as f:
sha256h = sha256sum(f)
if sha256h != afile.sha256sum:
raise Exception('Checksum validation of "{}" failed ({} != {}).'.format(fname, sha256h, afile.sha256sum))
return fname
def _read_repo_information(self, suite_name, check=True):
if suite_name in self._inrelease:
return self._inrelease[suite_name]
irfname = self._fetch_repo_file_internal(os.path.join('dists', suite_name, 'InRelease'))
if not irfname:
if check:
raise Exception('Unable to find InRelease data for repository "{}"'.format(self.location))
return Repository.InReleaseData()
with open(irfname, 'rb') as irf:
contents = irf.read()
require_signature = True
if self._trusted and not self._keyrings:
# no keyrings, but the repository was explicitly trusted - no need to validate
# the stuff.
# TODO: Maybe we should change the code to simply *always* validate everything?
require_signature = False
sf = SignedFile(contents, self._keyrings, require_signature=require_signature)
contents = sf.contents
section = TagSection(contents)
ird = Repository.InReleaseData()
files_raw = section['SHA256']
ird.files = parse_checksums_list(files_raw)
self._inrelease[suite_name] = ird
return ird
def index_file(self, suite, fname, check=True):
'''
Retrieve a package list (index) file from the repository.
The file will be downloaded if necessary:
Returns: A file path to the index file.
'''
if type(suite) is ArchiveSuite:
suite_name = suite.name
else:
suite_name = suite
ird = self._read_repo_information(suite_name)
index_fname = self._fetch_repo_file_internal(os.path.join('dists', suite_name, fname))
if not index_fname:
return None
# validate the file
with open(index_fname, 'rb') as f:
index_sha256sum = sha256sum(f)
valid = False
for af in ird.files:
if af.fname == fname:
if index_sha256sum != af.sha256sum:
raise Exception('Checksum validation of "{}" failed ({} != {})'.format(fname, index_sha256sum, af.sha256sum))
valid = True
if not valid and check:
raise Exception('Unable to validate "{}": File not mentioned in InRelease.'.format(fname))
return index_fname
def source_packages(self, suite, component):
''' Return a list of all source packages in the given suite and component. '''
assert type(suite) is ArchiveSuite
assert type(component) is ArchiveComponent
index_fname = self.index_file(suite.name, os.path.join(component.name, 'source', 'Sources.xz'))
if not index_fname:
return []
pkgs = []
with TagFile(index_fname) as tf:
for e in tf:
pkgname = e['Package']
pkgversion = e['Version']
if not pkgname or not pkgversion:
raise Exception('Found invalid block (no Package and Version fields) in Sources file "{}".'.format(index_fname))
break
pkg = SourcePackage()
pkg.repo = self._repo_entity
pkg.name = pkgname
pkg.component = component
if suite not in pkg.suites:
pkg.suites.append(suite)
pkg.version = pkgversion
pkg.architectures = split_strip(e['Architecture'], ' ')
pkg.standards_version = e.get('Standards-Version', '0~notset')
pkg.format_version = e['Format']
pkg.vcs_browser = e.get('Vcs-Browser')
pkg.homepage = e.get('Homepage')
pkg.maintainer = e['Maintainer']
pkg.uploaders = split_strip(e.get('Uploaders', ''), ',') # FIXME: Careful! Splitting just by comma isn't enough! We need to parse this properly.
pkg.build_depends = split_strip(e.get('Build-Depends', ''), ',')
pkg.directory = e['Directory']
pkg.files = parse_checksums_list(e.get('Checksums-Sha256'), pkg.directory)
binaries = []
raw_pkg_list = e.get('Package-List', None)
if not raw_pkg_list:
for bpname in e.get('Binary', '').split(','):
if not bpname:
continue
bpname = bpname.strip()
pi = PackageInfo()
pi.deb_type = DebType.DEB
pi.name = bpname
pi.ver = pkg.version
binaries.append(pi)
else:
binaries = parse_package_list_str(raw_pkg_list, pkg.version)
pkg.binaries = binaries
# do some issue-reporting
if not pkg.files and pkg.format_version != '1.0':
log.warning('Source package {}/{} seems to have no files (in {}).'.format(pkg.name, pkg.version, self.location))
# add package to results set
pkg.update_uuid()
pkgs.append(pkg)
return pkgs
def _read_binary_packages_from_tf(self, tf, tf_fname, suite, component, arch, deb_type):
requested_arch_is_all = arch.name == 'all'
pkgs = []
for e in tf:
pkgname = e['Package']
pkgversion = e['Version']
if not pkgname or not pkgversion:
raise Exception('Found invalid block (no Package and Version fields) in Packages file "{}".'.format(tf_fname))
break
arch_name = e['Architecture']
# we deal with arch:all packages separately
if not requested_arch_is_all and arch_name == 'all':
continue
# sanity check
if arch_name != arch.name:
log.warning('Found package "{}::{}/{}" with unexpeced architecture "{}" (expected "{}")'.format(self._name, pkgname, pkgversion, arch_name, arch.name))
pkg = BinaryPackage()
pkg.repo = self._repo_entity
pkg.name = pkgname
pkg.component = component
pkg.version = pkgversion
if suite not in pkg.suites:
pkg.suites.append(suite)
pkg.architecture = arch
pkg.maintainer = e['Maintainer']
source_id = e.get('Source')
if not source_id:
pkg.source_name = pkg.name
pkg.source_version = pkg.version
elif '(' in source_id:
pkg.source_name = source_id[0:source_id.index('(') - 1].strip()
pkg.source_version = source_id[source_id.index('(') + 1:source_id.index(')')].strip()
else:
pkg.source_name = source_id
pkg.source_version = pkg.version
pkg.size_installed = int(e.get('Installed-Size', '0'))
pkg.depends = split_strip(e.get('Depends', ''), ',')
pkg.pre_depends = split_strip(e.get('Pre-Depends', ''), ',')
pkg.homepage = e.get('Homepage')
pkg.section = e['Section']
pkg.description = e['Description']
pkg.description_md5 = e.get('Description-md5')
pkg.priority = packagepriority_from_string(e['Priority'])
pkg.bin_file = ArchiveFile()
pkg.bin_file.fname = e['Filename']
pkg.bin_file.size = int(e.get('Size', '0'))
pkg.bin_file.sha256sum = e['SHA256']
pkg.deb_type = DebType.DEB
if pkg.bin_file.fname.endswith('.udeb'):
pkg.deb_type = DebType.UDEB
# do some issue-reporting
if not pkg.bin_file.fname:
log.warning('Binary package "{}/{}/{}" seems to have no files.'.format(pkg.name, pkg.version, arch.name))
# update UUID and add package to results set
pkg.update_uuid()
pkgs.append(pkg)
return pkgs
def binary_packages(self, suite, component, arch):
'''
Get a list of binary package information for the given repository suite,
component and architecture.
'''
assert type(suite) is ArchiveSuite
assert type(component) is ArchiveComponent
assert type(arch) is ArchiveArchitecture
index_fname = self.index_file(suite.name, os.path.join(component.name, 'binary-{}'.format(arch.name), 'Packages.xz'))
if not index_fname:
return []
with TagFile(index_fname) as tf:
return self._read_binary_packages_from_tf(tf,
index_fname,
suite,
component,
arch,
DebType.DEB)
def installer_packages(self, suite, component, arch):
'''
Get a list of binary installer packages for the given repository suite, component
and architecture.
These binary packages are typically udebs used by the debian-installer, and should not
be installed on an user's system.
'''
assert type(suite) is ArchiveSuite
assert type(component) is ArchiveComponent
assert type(arch) is ArchiveArchitecture
index_fname = self.index_file(suite.name, os.path.join(component.name, 'debian-installer', 'binary-{}'.format(arch.name), 'Packages.xz'))
if not index_fname:
return []
with TagFile(index_fname) as tf:
return self._read_binary_packages_from_tf(tf,
index_fname,
suite,
component,
arch,
DebType.UDEB)
def make_newest_packages_dict(pkgs):
'''
Create a dictionary of name->pkg containing only
the packages with the highest version number from :pkgs
'''
res = {}
for pkg in pkgs:
epkg = res.get(pkg.name)
if epkg:
if version_compare(pkg.version, epkg.version) > 0:
res[pkg.name] = pkg
else:
res[pkg.name] = pkg
return res
| gpl-3.0 |
p0psicles/SickRage | sickbeard/indexers/indexer_exceptions.py | 9 | 1303 | #!/usr/bin/env python2.7
# encoding:utf-8
# author:echel0n
# project:indexer_api
# repository:http://github.com/echel0n/Sick-Beard
# license:unlicense (http://unlicense.org/)
"""Custom exceptions used or raised by indexer_api"""
from tvdb_api.tvdb_exceptions import (tvdb_exception, tvdb_error, tvdb_userabort, tvdb_shownotfound, tvdb_showincomplete,
tvdb_seasonnotfound, tvdb_episodenotfound, tvdb_attributenotfound)
__author__ = "echel0n"
__version__ = "1.0"
indexerExcepts = ["indexer_exception", "indexer_error", "indexer_userabort", "indexer_shownotfound", "indexer_showincomplete",
"indexer_seasonnotfound", "indexer_episodenotfound", "indexer_attributenotfound"]
tvdbExcepts = ["tvdb_exception", "tvdb_error", "tvdb_userabort", "tvdb_shownotfound", "tvdb_showincomplete",
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
# link API exceptions to our exception handler
indexer_exception = tvdb_exception
indexer_error = tvdb_error
indexer_userabort = tvdb_userabort
indexer_attributenotfound = tvdb_attributenotfound
indexer_episodenotfound = tvdb_episodenotfound
indexer_seasonnotfound = tvdb_seasonnotfound
indexer_shownotfound = tvdb_shownotfound
indexer_showincomplete = tvdb_showincomplete
| gpl-3.0 |
sinkuri256/python-for-android | python3-alpha/python3-src/Lib/distutils/tests/test_cygwinccompiler.py | 147 | 5671 | """Tests for distutils.cygwinccompiler."""
import unittest
import sys
import os
from io import BytesIO
import subprocess
from test.support import run_unittest
from distutils import cygwinccompiler
from distutils.cygwinccompiler import (CygwinCCompiler, check_config_h,
CONFIG_H_OK, CONFIG_H_NOTOK,
CONFIG_H_UNCERTAIN, get_versions,
get_msvcr)
from distutils.tests import support
class FakePopen(object):
test_class = None
def __init__(self, cmd, shell, stdout):
self.cmd = cmd.split()[0]
exes = self.test_class._exes
if self.cmd in exes:
# issue #6438 in Python 3.x, Popen returns bytes
self.stdout = BytesIO(exes[self.cmd])
else:
self.stdout = os.popen(cmd, 'r')
class CygwinCCompilerTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
super(CygwinCCompilerTestCase, self).setUp()
self.version = sys.version
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
from distutils import sysconfig
self.old_get_config_h_filename = sysconfig.get_config_h_filename
sysconfig.get_config_h_filename = self._get_config_h_filename
self.old_find_executable = cygwinccompiler.find_executable
cygwinccompiler.find_executable = self._find_executable
self._exes = {}
self.old_popen = cygwinccompiler.Popen
FakePopen.test_class = self
cygwinccompiler.Popen = FakePopen
def tearDown(self):
sys.version = self.version
from distutils import sysconfig
sysconfig.get_config_h_filename = self.old_get_config_h_filename
cygwinccompiler.find_executable = self.old_find_executable
cygwinccompiler.Popen = self.old_popen
super(CygwinCCompilerTestCase, self).tearDown()
def _get_config_h_filename(self):
return self.python_h
def _find_executable(self, name):
if name in self._exes:
return name
return None
def test_check_config_h(self):
# check_config_h looks for "GCC" in sys.version first
# returns CONFIG_H_OK if found
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
'4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
# then it tries to see if it can find "__GNUC__" in pyconfig.h
sys.version = 'something without the *CC word'
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
self.write_file(self.python_h, 'xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
# and CONFIG_H_OK if __GNUC__ is found
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
def test_get_versions(self):
# get_versions calls distutils.spawn.find_executable on
# 'gcc', 'ld' and 'dllwrap'
self.assertEqual(get_versions(), (None, None, None))
# Let's fake we have 'gcc' and it returns '3.4.5'
self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF'
res = get_versions()
self.assertEqual(str(res[0]), '3.4.5')
# and let's see what happens when the version
# doesn't match the regular expression
# (\d+\.\d+(\.\d+)*)
self._exes['gcc'] = b'very strange output'
res = get_versions()
self.assertEqual(res[0], None)
# same thing for ld
self._exes['ld'] = b'GNU ld version 2.17.50 20060824'
res = get_versions()
self.assertEqual(str(res[1]), '2.17.50')
self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77'
res = get_versions()
self.assertEqual(res[1], None)
# and dllwrap
self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF'
res = get_versions()
self.assertEqual(str(res[2]), '2.17.50')
self._exes['dllwrap'] = b'Cheese Wrap'
res = get_versions()
self.assertEqual(res[2], None)
def test_get_msvcr(self):
# none
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(get_msvcr(), None)
# MSVC 7.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1300 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr70'])
# MSVC 7.1
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1310 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr71'])
# VS2005 / MSVC 8.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1400 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr80'])
# VS2008 / MSVC 9.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1500 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr90'])
# unknown
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1999 32 bits (Intel)]')
self.assertRaises(ValueError, get_msvcr)
def test_suite():
return unittest.makeSuite(CygwinCCompilerTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
| apache-2.0 |
Ballz0fSteel/Umeko | lib/youtube_dl/extractor/americastestkitchen.py | 13 | 3098 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
clean_html,
int_or_none,
try_get,
unified_strdate,
)
class AmericasTestKitchenIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?americastestkitchen\.com/(?:episode|videos)/(?P<id>\d+)'
_TESTS = [{
'url': 'https://www.americastestkitchen.com/episode/548-summer-dinner-party',
'md5': 'b861c3e365ac38ad319cfd509c30577f',
'info_dict': {
'id': '1_5g5zua6e',
'title': 'Summer Dinner Party',
'ext': 'mp4',
'description': 'md5:858d986e73a4826979b6a5d9f8f6a1ec',
'thumbnail': r're:^https?://.*\.jpg',
'timestamp': 1497285541,
'upload_date': '20170612',
'uploader_id': 'roger.metcalf@americastestkitchen.com',
'release_date': '20170617',
'series': "America's Test Kitchen",
'season_number': 17,
'episode': 'Summer Dinner Party',
'episode_number': 24,
},
'params': {
'skip_download': True,
},
}, {
'url': 'https://www.americastestkitchen.com/videos/3420-pan-seared-salmon',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
partner_id = self._search_regex(
r'src=["\'](?:https?:)?//(?:[^/]+\.)kaltura\.com/(?:[^/]+/)*(?:p|partner_id)/(\d+)',
webpage, 'kaltura partner id')
video_data = self._parse_json(
self._search_regex(
r'window\.__INITIAL_STATE__\s*=\s*({.+?})\s*;\s*</script>',
webpage, 'initial context'),
video_id)
ep_data = try_get(
video_data,
(lambda x: x['episodeDetail']['content']['data'],
lambda x: x['videoDetail']['content']['data']), dict)
ep_meta = ep_data.get('full_video', {})
external_id = ep_data.get('external_id') or ep_meta['external_id']
title = ep_data.get('title') or ep_meta.get('title')
description = clean_html(ep_meta.get('episode_description') or ep_data.get(
'description') or ep_meta.get('description'))
thumbnail = try_get(ep_meta, lambda x: x['photo']['image_url'])
release_date = unified_strdate(ep_data.get('aired_at'))
season_number = int_or_none(ep_meta.get('season_number'))
episode = ep_meta.get('title')
episode_number = int_or_none(ep_meta.get('episode_number'))
return {
'_type': 'url_transparent',
'url': 'kaltura:%s:%s' % (partner_id, external_id),
'ie_key': 'Kaltura',
'title': title,
'description': description,
'thumbnail': thumbnail,
'release_date': release_date,
'series': "America's Test Kitchen",
'season_number': season_number,
'episode': episode,
'episode_number': episode_number,
}
| gpl-3.0 |
shivaenigma/bitcointools | bitcointools/fixwallet.py | 4 | 3386 | #!/usr/bin/env python
#
# Recover from a semi-corrupt wallet
#
from bsddb.db import *
import logging
import sys
from wallet import rewrite_wallet, trim_wallet
from util import determine_db_dir, create_env
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--datadir", dest="datadir", default=None,
help="Look for files here (defaults to bitcoin default)")
parser.add_option("--out", dest="outfile", default="walletNEW.dat",
help="Name of output file (default: walletNEW.dat)")
parser.add_option("--clean", action="store_true", dest="clean", default=False,
help="Clean out old, spent change addresses and transactions")
parser.add_option("--skipkey", dest="skipkey",
help="Skip entries with keys that contain given string")
parser.add_option("--tweakspent", dest="tweakspent",
help="Tweak transaction to mark unspent")
parser.add_option("--noaccounts", action="store_true", dest="noaccounts", default=False,
help="Drops all accounts from the old wallet")
parser.add_option("--nosettings", action="store_true", dest="nosettings", default=False,
help="Drops all settings from the old wallet")
parser.add_option("--notxes", action="store_true", dest="notxes", default=False,
help="Drops transactions from the old wallet, open Bitcoin with -rescan after this")
parser.add_option("--noaddresses", action="store_true", dest="nopubkeys", default=False,
help="Drops addresses from the old wallet, this will clear your address book leaving only one address\
WARNING: Make sure to refill your keypool after using this (by simply unlocking the wallet)")
(options, args) = parser.parse_args()
if options.datadir is None:
db_dir = determine_db_dir()
else:
db_dir = options.datadir
skip_types = []
if options.nosettings:
skip_types.append("version")
skip_types.append("setting")
skip_types.append("defaultkey")
if options.noaccounts:
skip_types.append("acc")
skip_types.append("acentry")
if options.notxes:
skip_types.append("tx")
skip_types.append("bestblock")
if options.nopubkeys:
skip_types.append("name")
skip_types.append("pool")
try:
db_env = create_env(db_dir)
except DBNoSuchFileError:
logging.error("Couldn't open " + db_dir)
sys.exit(1)
if options.clean:
trim_wallet(db_env, options.outfile)
elif options.skipkey:
def pre_put_callback(type, data):
if options.skipkey in data['__key__']:
return False
return True
rewrite_wallet(db_env, options.outfile, pre_put_callback)
elif options.tweakspent:
txid = options.tweakspent.decode('hex_codec')[::-1]
def tweak_spent_callback(type, data):
if txid in data['__key__']:
data['__value__'] = data['__value__'][:-1]+'\0'
return True
rewrite_wallet(db_env, options.outfile, tweak_spent_callback)
pass
elif len(skip_types) > 0:
def pre_put_callback(type, data):
if skip_types.count(type) > 0:
return False
return True
rewrite_wallet(db_env, options.outfile, pre_put_callback)
else:
rewrite_wallet(db_env, options.outfile)
db_env.close()
if __name__ == '__main__':
main()
| mit |
pletoss/poclbm | guiminer.py | 1 | 97479 | """GUIMiner - graphical frontend to Bitcoin miners.
Currently supports:
- m0mchil's "poclbm"
- puddinpop's "rpcminer"
- jedi95's "Phoenix"
- ufasoft's "bitcoin-miner"
Copyright 2011 Chris MacLeod
This program is released under the GNU GPL. See LICENSE.txt for details.
"""
import sys, os, subprocess, errno, re, threading, logging, time, httplib, urllib
import wx
import json
import collections
try:
import win32api, win32con, win32process
except ImportError:
pass
from wx.lib.agw import flatnotebook as fnb
from wx.lib.agw import hyperlink
from wx.lib.newevent import NewEvent
__version__ = '2011-11-22'
def get_module_path():
"""Return the folder containing this script (or its .exe)."""
module_name = sys.executable if hasattr(sys, 'frozen') else __file__
abs_path = os.path.abspath(module_name)
return os.path.dirname(abs_path)
USE_MOCK = '--mock' in sys.argv
# Set up localization; requires the app to be created
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
_ = wx.GetTranslation
LANGUAGES = {
"Chinese Simplified": wx.LANGUAGE_CHINESE_SIMPLIFIED,
"English": wx.LANGUAGE_ENGLISH,
"French": wx.LANGUAGE_FRENCH,
"German": wx.LANGUAGE_GERMAN,
"Hungarian": wx.LANGUAGE_HUNGARIAN,
"Italian": wx.LANGUAGE_ITALIAN,
"Spanish": wx.LANGUAGE_SPANISH,
"Russian": wx.LANGUAGE_RUSSIAN,
"Dutch": wx.LANGUAGE_DUTCH,
}
LANGUAGES_REVERSE = dict((v, k) for (k, v) in LANGUAGES.items())
DONATION_ADDRESS = "1MDDh2h4cAZDafgc94mr9q95dhRYcJbNQo"
locale = None
language = None
def update_language(new_language):
global locale, language
language = new_language
if locale:
del locale
locale = wx.Locale(language)
if locale.IsOk():
locale.AddCatalogLookupPathPrefix(os.path.join(get_module_path(), "locale"))
locale.AddCatalog("guiminer")
else:
locale = None
def load_language():
language_config = os.path.join(get_module_path(), 'default_language.ini')
language_data = dict()
if os.path.exists(language_config):
with open(language_config) as f:
language_data.update(json.load(f))
language_str = language_data.get('language', "English")
update_language(LANGUAGES.get(language_str, wx.LANGUAGE_ENGLISH))
def save_language():
language_config = os.path.join(get_module_path(), 'default_language.ini')
language_str = LANGUAGES_REVERSE.get(language)
with open(language_config, 'w') as f:
json.dump(dict(language=language_str), f)
load_language()
ABOUT_TEXT = _(
"""GUIMiner
Version: %(version)s
GUI by Chris 'Kiv' MacLeod
Original poclbm miner by m0mchil
Original rpcminer by puddinpop
Get the source code or file issues at GitHub:
https://github.com/Kiv/poclbm
If you enjoyed this software, support its development
by donating to:
%(address)s
Even a single Bitcoin is appreciated and helps motivate
further work on this software.
""")
# Translatable strings that are used repeatedly
STR_NOT_STARTED = _("Not started")
STR_STARTING = _("Starting...")
STR_STOPPED = _("Stopped")
STR_PAUSED = _("Paused")
STR_START_MINING = _("Start mining!")
STR_STOP_MINING = _("Stop mining")
STR_REFRESH_BALANCE = _("Refresh balance")
STR_CONNECTION_ERROR = _("Connection error")
STR_USERNAME = _("Username:")
STR_PASSWORD = _("Password:")
STR_QUIT = _("Quit this program")
STR_ABOUT = _("Show about dialog")
# Alternate backends that we know how to call
SUPPORTED_BACKENDS = [
"rpcminer-4way.exe",
"rpcminer-cpu.exe",
"rpcminer-cuda.exe",
"rpcminer-opencl.exe",
"phoenix.py",
"phoenix.exe",
"bitcoin-miner.exe"
]
USER_AGENT = "guiminer/" + __version__
# Time constants
SAMPLE_TIME_SECS = 3600
REFRESH_RATE_MILLIS = 2000
# Layout constants
LBL_STYLE = wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL
BTN_STYLE = wx.ALIGN_CENTER_HORIZONTAL | wx.ALL
# Events sent from the worker threads
(UpdateHashRateEvent, EVT_UPDATE_HASHRATE) = NewEvent()
(UpdateAcceptedEvent, EVT_UPDATE_ACCEPTED) = NewEvent()
(UpdateSoloCheckEvent, EVT_UPDATE_SOLOCHECK) = NewEvent()
(UpdateStatusEvent, EVT_UPDATE_STATUS) = NewEvent()
# Utility functions
def merge_whitespace(s):
"""Combine multiple whitespace characters found in s into one."""
s = re.sub(r"( +)|\t+", " ", s)
return s.strip()
def get_opencl_devices():
"""Return a list of available OpenCL devices.
Raises ImportError if OpenCL is not found.
Raises IOError if no OpenCL devices are found.
"""
import pyopencl
device_strings = []
platforms = pyopencl.get_platforms() #@UndefinedVariable
for i, platform in enumerate(platforms):
devices = platform.get_devices()
for j, device in enumerate(devices):
device_strings.append('[%d-%d] %s' %
(i, j, merge_whitespace(device.name)[:25]))
if len(device_strings) == 0:
raise IOError
return device_strings
def get_icon_bundle():
"""Return the Bitcoin program icon bundle."""
return wx.IconBundleFromFile(os.path.join(get_module_path(), "logo.ico"), wx.BITMAP_TYPE_ICO)
def get_taskbar_icon():
"""Return the taskbar icon.
This works around Window's annoying behavior of ignoring the 16x16 image
and using nearest neighbour downsampling on the 32x32 image instead."""
ib = get_icon_bundle()
return ib.GetIcon((16, 16))
def mkdir_p(path):
"""If the directory 'path' doesn't exist, create it. Same as mkdir -p."""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
def add_tooltip(widget, text):
"""Add a tooltip to widget with the specified text."""
tooltip = wx.ToolTip(text)
widget.SetToolTip(tooltip)
def format_khash(rate):
"""Format rate for display. A rate of 0 means just connected."""
if rate > 10 ** 6:
return _("%.3f Ghash/s") % (rate / 1000000.)
if rate > 10 ** 3:
return _("%.1f Mhash/s") % (rate / 1000.)
elif rate == 0:
return _("Connecting...")
else:
return _("%d khash/s") % rate
def format_balance(amount):
"""Format a quantity of Bitcoins in BTC."""
return "%.3f BTC" % float(amount)
def init_logger():
"""Set up and return the logging object and custom formatter."""
logger = logging.getLogger("poclbm-gui")
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler(
os.path.join(get_module_path(), 'guiminer.log'), 'w')
formatter = logging.Formatter("%(asctime)s: %(message)s",
"%Y-%m-%d %H:%M:%S")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger, formatter
logger, formatter = init_logger()
def http_request(hostname, *args, **kwargs):
"""Do a HTTP request and return the response data."""
conn_cls = httplib.HTTPSConnection if kwargs.get('use_https') else httplib.HTTPConnection
conn = conn_cls(hostname)
try:
logger.debug(_("Requesting balance: %(request)s"), dict(request=args))
conn.request(*args)
response = conn.getresponse()
data = response.read()
logger.debug(_("Server replied: %(status)s, %(data)s"),
dict(status=str(response.status), data=data))
return response, data
finally:
conn.close()
def get_process_affinity(pid):
"""Return the affinity mask for the specified process."""
flags = win32con.PROCESS_QUERY_INFORMATION
handle = win32api.OpenProcess(flags, 0, pid)
return win32process.GetProcessAffinityMask(handle)[0]
def set_process_affinity(pid, mask):
"""Set the affinity for process to mask."""
flags = win32con.PROCESS_QUERY_INFORMATION | win32con.PROCESS_SET_INFORMATION
handle = win32api.OpenProcess(flags, 0, pid)
win32process.SetProcessAffinityMask(handle, mask)
def find_nth(haystack, needle, n):
"""Return the index of the nth occurrence of needle in haystack."""
start = haystack.find(needle)
while start >= 0 and n > 1:
start = haystack.find(needle, start + len(needle))
n -= 1
return start
class ConsolePanel(wx.Panel):
"""Panel that displays logging events.
Uses with a StreamHandler to log events to a TextCtrl. Thread-safe.
"""
def __init__(self, parent, n_max_lines):
wx.Panel.__init__(self, parent, -1)
self.parent = parent
self.n_max_lines = n_max_lines
vbox = wx.BoxSizer(wx.VERTICAL)
style = wx.TE_MULTILINE | wx.TE_READONLY | wx.HSCROLL
self.text = wx.TextCtrl(self, -1, "", style=style)
vbox.Add(self.text, 1, wx.EXPAND)
self.SetSizer(vbox)
self.handler = logging.StreamHandler(self)
formatter = logging.Formatter("%(asctime)s: %(message)s",
"%Y-%m-%d %H:%M:%S")
self.handler.setFormatter(formatter)
logger.addHandler(self.handler)
def on_focus(self):
"""On focus, clear the status bar."""
self.parent.statusbar.SetStatusText("", 0)
self.parent.statusbar.SetStatusText("", 1)
def on_close(self):
"""On closing, stop handling logging events."""
logger.removeHandler(self.handler)
def append_text(self, text):
self.text.AppendText(text)
lines_to_cut = self.text.GetNumberOfLines() - self.n_max_lines
if lines_to_cut > 0:
contents = self.text.GetValue()
position = find_nth(contents, '\n', lines_to_cut)
self.text.ChangeValue(contents[position + 1:])
def write(self, text):
"""Forward logging events to our TextCtrl."""
wx.CallAfter(self.append_text, text)
class SummaryPanel(wx.Panel):
"""Panel that displays a summary of all miners."""
def __init__(self, parent):
wx.Panel.__init__(self, parent, -1)
self.parent = parent
self.timer = wx.Timer(self)
self.timer.Start(REFRESH_RATE_MILLIS)
self.Bind(wx.EVT_TIMER, self.on_timer)
flags = wx.ALIGN_CENTER_HORIZONTAL | wx.ALL
border = 5
self.column_headers = [
(wx.StaticText(self, -1, _("Miner")), 0, flags, border),
(wx.StaticText(self, -1, _("Speed")), 0, flags, border),
(wx.StaticText(self, -1, _("Accepted")), 0, flags, border),
(wx.StaticText(self, -1, _("Stale")), 0, flags, border),
(wx.StaticText(self, -1, _("Start/Stop")), 0, flags, border),
(wx.StaticText(self, -1, _("Autostart")), 0, flags, border),
]
font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.SetUnderlined(True)
for st in self.column_headers:
st[0].SetFont(font)
self.grid = wx.FlexGridSizer(0, len(self.column_headers), 2, 2)
self.grid.AddMany(self.column_headers)
self.add_miners_to_grid()
self.grid.AddGrowableCol(0)
self.grid.AddGrowableCol(1)
self.grid.AddGrowableCol(2)
self.grid.AddGrowableCol(3)
self.SetSizer(self.grid)
def add_miners_to_grid(self):
"""Add a summary row for each miner to the summary grid."""
# Remove any existing widgets except the column headers.
for i in reversed(range(len(self.column_headers), len(self.grid.GetChildren()))):
self.grid.Hide(i)
self.grid.Remove(i)
for p in self.parent.profile_panels:
p.clear_summary_widgets()
self.grid.AddMany(p.get_summary_widgets(self))
self.grid.Layout()
def on_close(self):
self.timer.Stop()
def on_timer(self, event=None):
"""Whenever the timer goes off, fefresh the summary data."""
if self.parent.nb.GetSelection() != self.parent.nb.GetPageIndex(self):
return
for p in self.parent.profile_panels:
p.update_summary()
self.parent.statusbar.SetStatusText("", 0) # TODO: show something
total_rate = sum(p.last_rate for p in self.parent.profile_panels
if p.is_mining)
if any(p.is_mining for p in self.parent.profile_panels):
self.parent.statusbar.SetStatusText(format_khash(total_rate), 1)
else:
self.parent.statusbar.SetStatusText("", 1)
def on_focus(self):
"""On focus, show the statusbar text."""
self.on_timer()
class GUIMinerTaskBarIcon(wx.TaskBarIcon):
"""Taskbar icon for the GUI.
Shows status messages on hover and opens on click.
"""
TBMENU_RESTORE = wx.NewId()
TBMENU_PAUSE = wx.NewId()
TBMENU_CLOSE = wx.NewId()
TBMENU_CHANGE = wx.NewId()
TBMENU_REMOVE = wx.NewId()
def __init__(self, frame):
wx.TaskBarIcon.__init__(self)
self.frame = frame
self.icon = get_taskbar_icon()
self.timer = wx.Timer(self)
self.timer.Start(REFRESH_RATE_MILLIS)
self.is_paused = False
self.SetIcon(self.icon, "GUIMiner")
self.imgidx = 1
self.Bind(wx.EVT_TASKBAR_LEFT_DCLICK, self.on_taskbar_activate)
self.Bind(wx.EVT_MENU, self.on_taskbar_activate, id=self.TBMENU_RESTORE)
self.Bind(wx.EVT_MENU, self.on_taskbar_close, id=self.TBMENU_CLOSE)
self.Bind(wx.EVT_MENU, self.on_pause, id=self.TBMENU_PAUSE)
self.Bind(wx.EVT_TIMER, self.on_timer)
def CreatePopupMenu(self):
"""Override from wx.TaskBarIcon. Creates the right-click menu."""
menu = wx.Menu()
menu.AppendCheckItem(self.TBMENU_PAUSE, _("Pause all"))
menu.Check(self.TBMENU_PAUSE, self.is_paused)
menu.Append(self.TBMENU_RESTORE, _("Restore"))
menu.Append(self.TBMENU_CLOSE, _("Close"))
return menu
def on_taskbar_activate(self, evt):
if self.frame.IsIconized():
self.frame.Iconize(False)
if not self.frame.IsShown():
self.frame.Show(True)
self.frame.Raise()
def on_taskbar_close(self, evt):
wx.CallAfter(self.frame.Close, force=True)
def on_timer(self, event):
"""Refresh the taskbar icon's status message."""
objs = self.frame.profile_panels
if objs:
text = '\n'.join(p.get_taskbar_text() for p in objs)
self.SetIcon(self.icon, text)
def on_pause(self, event):
"""Pause or resume the currently running miners."""
self.is_paused = event.Checked()
for miner in self.frame.profile_panels:
if self.is_paused:
miner.pause()
else:
miner.resume()
class MinerListenerThread(threading.Thread):
LINES = [
(r"Target =|average rate|Sending to server|found hash|connected to|Setting server",
lambda _: None), # Just ignore lines like these
(r"accepted|\"result\":\s*true",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"invalid|stale", lambda _:
UpdateAcceptedEvent(accepted=False)),
(r"(\d+)\s*khash/s", lambda match:
UpdateHashRateEvent(rate=int(match.group(1)))),
(r"(\d+\.\d+)\s*MH/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1)) * 1000)),
(r"(\d+\.\d+)\s*Mhash/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1)) * 1000)),
(r"(\d+)\s*Mhash/s", lambda match:
UpdateHashRateEvent(rate=int(match.group(1)) * 1000)),
(r"checking (\d+)", lambda _:
UpdateSoloCheckEvent()),
]
def __init__(self, parent, miner):
threading.Thread.__init__(self)
self.shutdown_event = threading.Event()
self.parent = parent
self.parent_name = parent.name
self.miner = miner
def run(self):
logger.info(_('Listener for "%s" started') % self.parent_name)
while not self.shutdown_event.is_set():
line = self.miner.stdout.readline().strip()
#logger.debug("Line: %s", line)
if not line: continue
for s, event_func in self.LINES: # Use self to allow subclassing
match = re.search(s, line, flags=re.I)
if match is not None:
event = event_func(match)
if event is not None:
wx.PostEvent(self.parent, event)
break
else:
# Possible error or new message, just pipe it through
event = UpdateStatusEvent(text=line)
logger.info(_('Listener for "%(name)s": %(line)s'),
dict(name=self.parent_name, line=line))
wx.PostEvent(self.parent, event)
logger.info(_('Listener for "%s" shutting down'), self.parent_name)
class PhoenixListenerThread(MinerListenerThread):
LINES = [
(r"Result: .* accepted",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"Result: .* rejected", lambda _:
UpdateAcceptedEvent(accepted=False)),
(r"(\d+)\.?(\d*) Khash/sec", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)))),
(r"(\d+)\.?(\d*) Mhash/sec", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)) * 1000)),
(r"Currently on block",
lambda _: None), # Just ignore lines like these
]
class CgListenerThread(MinerListenerThread):
LINES = [
(r"Accepted .* GPU \d+ thread \d+",
lambda _: UpdateAcceptedEvent(accepted=True)),
(r"Rejected .* GPU \d+ thread \d+",
lambda _: UpdateAcceptedEvent(accepted=False)),
(r"\(\d+s\):(\d+)\.?(\d*) .* Mh/s", lambda match:
UpdateHashRateEvent(rate=float(match.group(1) + '.' + match.group(2)) * 1000)),
(r"^GPU\s*\d+",
lambda _: None), # Just ignore lines like these
]
class MinerTab(wx.Panel):
"""A tab in the GUI representing a miner instance.
Each MinerTab has these responsibilities:
- Persist its data to and from the config file
- Launch a backend subprocess and monitor its progress
by creating a MinerListenerThread.
- Post updates to the GUI's statusbar & summary panel; the format depends
whether the backend is working solo or in a pool.
"""
def __init__(self, parent, id, devices, servers, defaults, statusbar, data):
wx.Panel.__init__(self, parent, id)
self.parent = parent
self.servers = servers
self.defaults = defaults
self.statusbar = statusbar
self.is_mining = False
self.is_paused = False
self.is_possible_error = False
self.miner = None # subprocess.Popen instance when mining
self.miner_listener = None # MinerListenerThread when mining
self.solo_blocks_found = 0
self.accepted_shares = 0 # shares for pool, diff1 hashes for solo
self.accepted_times = collections.deque()
self.invalid_shares = 0
self.invalid_times = collections.deque()
self.last_rate = 0 # units of khash/s
self.autostart = False
self.num_processors = int(os.getenv('NUMBER_OF_PROCESSORS', 1))
self.affinity_mask = 0
self.server_lbl = wx.StaticText(self, -1, _("Server:"))
self.summary_panel = None # SummaryPanel instance if summary open
self.server = wx.ComboBox(self, -1,
choices=[s['name'] for s in servers],
style=wx.CB_READONLY)
self.website_lbl = wx.StaticText(self, -1, _("Website:"))
self.website = hyperlink.HyperLinkCtrl(self, -1, "")
self.external_lbl = wx.StaticText(self, -1, _("Ext. Path:"))
self.txt_external = wx.TextCtrl(self, -1, "")
self.host_lbl = wx.StaticText(self, -1, _("Host:"))
self.txt_host = wx.TextCtrl(self, -1, "")
self.port_lbl = wx.StaticText(self, -1, _("Port:"))
self.txt_port = wx.TextCtrl(self, -1, "")
self.user_lbl = wx.StaticText(self, -1, STR_USERNAME)
self.txt_username = wx.TextCtrl(self, -1, "")
self.pass_lbl = wx.StaticText(self, -1, STR_PASSWORD)
self.txt_pass = wx.TextCtrl(self, -1, "", style=wx.TE_PASSWORD)
self.device_lbl = wx.StaticText(self, -1, _("Device:"))
self.device_listbox = wx.ComboBox(self, -1, choices=devices or [_("No OpenCL devices")], style=wx.CB_READONLY)
self.flags_lbl = wx.StaticText(self, -1, _("Extra flags:"))
self.txt_flags = wx.TextCtrl(self, -1, "")
self.extra_info = wx.StaticText(self, -1, "")
self.affinity_lbl = wx.StaticText(self, -1, _("CPU Affinity:"))
self.affinity_chks = [wx.CheckBox(self, label='%d ' % i)
for i in range(self.num_processors)]
self.balance_lbl = wx.StaticText(self, -1, _("Balance:"))
self.balance_amt = wx.StaticText(self, -1, "0")
self.balance_refresh = wx.Button(self, -1, STR_REFRESH_BALANCE)
self.balance_refresh_timer = wx.Timer()
self.withdraw = wx.Button(self, -1, _("Withdraw"))
self.balance_cooldown_seconds = 0
self.balance_auth_token = ""
self.labels = [self.server_lbl, self.website_lbl,
self.host_lbl, self.port_lbl,
self.user_lbl, self.pass_lbl,
self.device_lbl, self.flags_lbl,
self.balance_lbl]
self.txts = [self.txt_host, self.txt_port,
self.txt_username, self.txt_pass,
self.txt_flags]
self.all_widgets = [self.server, self.website,
self.device_listbox,
self.balance_amt,
self.balance_refresh,
self.withdraw] + self.labels + self.txts + self.affinity_chks
self.hidden_widgets = [self.extra_info,
self.txt_external,
self.external_lbl]
self.start = wx.Button(self, -1, STR_START_MINING)
self.device_listbox.SetSelection(0)
self.server.SetStringSelection(self.defaults.get('default_server'))
self.set_data(data)
for txt in self.txts:
txt.Bind(wx.EVT_KEY_UP, self.check_if_modified)
self.device_listbox.Bind(wx.EVT_COMBOBOX, self.check_if_modified)
self.start.Bind(wx.EVT_BUTTON, self.toggle_mining)
self.server.Bind(wx.EVT_COMBOBOX, self.on_select_server)
self.balance_refresh_timer.Bind(wx.EVT_TIMER, self.on_balance_cooldown_tick)
self.balance_refresh.Bind(wx.EVT_BUTTON, self.on_balance_refresh)
self.withdraw.Bind(wx.EVT_BUTTON, self.on_withdraw)
for chk in self.affinity_chks:
chk.Bind(wx.EVT_CHECKBOX, self.on_affinity_check)
self.Bind(EVT_UPDATE_HASHRATE, lambda event: self.update_khash(event.rate))
self.Bind(EVT_UPDATE_ACCEPTED, lambda event: self.update_shares(event.accepted))
self.Bind(EVT_UPDATE_STATUS, lambda event: self.update_status(event.text))
self.Bind(EVT_UPDATE_SOLOCHECK, lambda event: self.update_solo())
self.update_statusbar()
self.clear_summary_widgets()
@property
def last_update_time(self):
"""Return the local time of the last accepted share."""
if self.accepted_times:
return time.localtime(self.accepted_times[-1])
return None
@property
def server_config(self):
hostname = self.txt_host.GetValue()
return self.get_server_by_field(hostname, 'host')
@property
def is_solo(self):
"""Return True if this miner is configured for solo mining."""
return self.server.GetStringSelection() == "solo"
@property
def is_modified(self):
"""Return True if this miner has unsaved changes pending."""
return self.last_data != self.get_data()
@property
def external_path(self):
"""Return the path to an external miner, or "" if none is present."""
return self.txt_external.GetValue()
@property
def is_external_miner(self):
"""Return True if this miner has an external path configured."""
return self.txt_external.GetValue() != ""
@property
def host_with_http_prefix(self):
"""Return the host address, with http:// prepended if needed."""
host = self.txt_host.GetValue()
if not host.startswith("http://"):
host = "http://" + host
return host
@property
def host_without_http_prefix(self):
"""Return the host address, with http:// stripped off if needed."""
host = self.txt_host.GetValue()
if host.startswith("http://"):
return host[len('http://'):]
return host
@property
def device_index(self):
"""Return the index of the currently selected OpenCL device."""
s = self.device_listbox.GetStringSelection()
match = re.search(r'\[(\d+)-(\d+)\]', s)
try: return int(match.group(2))
except: return 0
@property
def platform_index(self):
"""Return the index of the currently selected OpenCL platform."""
s = self.device_listbox.GetStringSelection()
match = re.search(r'\[(\d+)-(\d+)\]', s)
try: return int(match.group(1))
except: return 0
@property
def is_device_visible(self):
"""Return True if we are using a backend with device selection."""
NO_DEVICE_SELECTION = ['rpcminer', 'bitcoin-miner']
return not any(d in self.external_path for d in NO_DEVICE_SELECTION)
def on_affinity_check(self, event):
"""Set the affinity mask to the selected value."""
self.affinity_mask = 0
for i in range(self.num_processors):
is_checked = self.affinity_chks[i].GetValue()
self.affinity_mask += (is_checked << i)
if self.is_mining:
try:
set_process_affinity(self.miner.pid, self.affinity_mask)
except:
pass # TODO: test on Linux
def pause(self):
"""Pause the miner if we are mining, otherwise do nothing."""
if self.is_mining:
self.stop_mining()
self.is_paused = True
def resume(self):
"""Resume the miner if we are paused, otherwise do nothing."""
if self.is_paused:
self.start_mining()
self.is_paused = False
def get_data(self):
"""Return a dict of our profile data."""
return dict(name=self.name,
hostname=self.txt_host.GetValue(),
port=self.txt_port.GetValue(),
username=self.txt_username.GetValue(),
password=self.txt_pass.GetValue(),
device=self.device_listbox.GetSelection(),
flags=self.txt_flags.GetValue(),
autostart=self.autostart,
affinity_mask=self.affinity_mask,
balance_auth_token=self.balance_auth_token,
external_path=self.external_path)
def set_data(self, data):
"""Set our profile data to the information in data. See get_data()."""
self.last_data = data
default_server_config = self.get_server_by_field(
self.defaults['default_server'], 'name')
self.name = (data.get('name') or _('Default'))
# Backwards compatibility: hostname key used to be called server.
# We only save out hostname now but accept server from old INI files.
hostname = (data.get('hostname') or
data.get('server') or
default_server_config['host'])
self.txt_host.SetValue(hostname)
self.server.SetStringSelection(self.server_config.get('name', "Other"))
self.txt_username.SetValue(
data.get('username') or
self.defaults.get('default_username', ''))
self.txt_pass.SetValue(
data.get('password') or
self.defaults.get('default_password', ''))
self.txt_port.SetValue(str(
data.get('port') or
self.server_config.get('port', 8332)))
self.txt_flags.SetValue(data.get('flags', ''))
self.autostart = data.get('autostart', False)
self.affinity_mask = data.get('affinity_mask', 1)
for i in range(self.num_processors):
self.affinity_chks[i].SetValue((self.affinity_mask >> i) & 1)
self.txt_external.SetValue(data.get('external_path', ''))
# Handle case where they removed devices since last run.
device_index = data.get('device', None)
if device_index is not None and device_index < self.device_listbox.GetCount():
self.device_listbox.SetSelection(device_index)
self.change_server(self.server_config)
self.balance_auth_token = data.get('balance_auth_token', '')
def clear_summary_widgets(self):
"""Release all our summary widgets."""
self.summary_name = None
self.summary_status = None
self.summary_shares_accepted = None
self.summary_shares_stale = None
self.summary_start = None
self.summary_autostart = None
def get_start_stop_state(self):
"""Return appropriate text for the start/stop button."""
return _("Stop") if self.is_mining else _("Start")
def get_start_label(self):
return STR_STOP_MINING if self.is_mining else STR_START_MINING
def update_summary(self):
"""Update our summary fields if possible."""
if not self.summary_panel:
return
self.summary_name.SetLabel(self.name)
if self.is_paused:
text = STR_PAUSED
elif not self.is_mining:
text = STR_STOPPED
elif self.is_possible_error:
text = _("Connection problems")
else:
text = format_khash(self.last_rate)
self.summary_status.SetLabel(text)
self.summary_shares_accepted.SetLabel("%d (%d)" %
(self.accepted_shares, len(self.accepted_times)))
if self.is_solo:
self.summary_shares_invalid.SetLabel("-")
else:
self.summary_shares_invalid.SetLabel("%d (%d)" %
(self.invalid_shares, len(self.invalid_times)))
self.summary_start.SetLabel(self.get_start_stop_state())
self.summary_autostart.SetValue(self.autostart)
self.summary_panel.grid.Layout()
def get_summary_widgets(self, summary_panel):
"""Return a list of summary widgets suitable for sizer.AddMany."""
self.summary_panel = summary_panel
self.summary_name = wx.StaticText(summary_panel, -1, self.name)
self.summary_name.Bind(wx.EVT_LEFT_UP, self.show_this_panel)
self.summary_status = wx.StaticText(summary_panel, -1, STR_STOPPED)
self.summary_shares_accepted = wx.StaticText(summary_panel, -1, "0")
self.summary_shares_invalid = wx.StaticText(summary_panel, -1, "0")
self.summary_start = wx.Button(summary_panel, -1, self.get_start_stop_state(), style=wx.BU_EXACTFIT)
self.summary_start.Bind(wx.EVT_BUTTON, self.toggle_mining)
self.summary_autostart = wx.CheckBox(summary_panel, -1)
self.summary_autostart.Bind(wx.EVT_CHECKBOX, self.toggle_autostart)
self.summary_autostart.SetValue(self.autostart)
return [
(self.summary_name, 0, wx.ALIGN_CENTER_HORIZONTAL),
(self.summary_status, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_shares_accepted, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_shares_invalid, 0, wx.ALIGN_CENTER_HORIZONTAL, 0),
(self.summary_start, 0, wx.ALIGN_CENTER, 0),
(self.summary_autostart, 0, wx.ALIGN_CENTER, 0)
]
def show_this_panel(self, event):
"""Set focus to this panel."""
self.parent.SetSelection(self.parent.GetPageIndex(self))
def toggle_autostart(self, event):
self.autostart = event.IsChecked()
def toggle_mining(self, event):
"""Stop or start the miner."""
if self.is_mining:
self.stop_mining()
else:
self.start_mining()
self.update_summary()
#############################
# Begin backend specific code
def configure_subprocess_poclbm(self):
"""Set up the command line for poclbm."""
folder = get_module_path()
if USE_MOCK:
executable = "python mockBitcoinMiner.py"
else:
if hasattr(sys, 'frozen'):
executable = "poclbm.app/Contents/MacOS/poclbm"
else:
executable = "python poclbm.py"
cmd = "%s %s:%s@%s:%s --device=%d --platform=%d --verbose %s" % (
executable,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.txt_host.GetValue(),
self.txt_port.GetValue(),
self.device_index,
self.platform_index,
self.txt_flags.GetValue()
)
return cmd, folder
def configure_subprocess_rpcminer(self):
"""Set up the command line for rpcminer.
The hostname must start with http:// for these miners.
"""
cmd = "%s -user=%s -password=%s -url=%s:%s %s" % (
self.external_path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_with_http_prefix,
self.txt_port.GetValue(),
self.txt_flags.GetValue()
)
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_ufasoft(self):
"""Set up the command line for ufasoft's SSE2 miner.
The hostname must start with http:// for these miners.
"""
cmd = "%s -u %s -p %s -o %s:%s %s" % (
self.external_path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_with_http_prefix,
self.txt_port.GetValue(),
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_phoenix(self):
"""Set up the command line for phoenix miner."""
path = self.external_path
if path.endswith('.py'):
path = "python " + path
cmd = "%s -u http://%s:%s@%s:%s PLATFORM=%d DEVICE=%d %s" % (
path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_without_http_prefix,
self.txt_port.GetValue(),
self.platform_index,
self.device_index,
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
def configure_subprocess_cgminer(self):
"""Set up the command line for cgminer."""
path = self.external_path
if path.endswith('.py'):
path = "python " + path
# Command line arguments for cgminer here:
# -u <username>
# -p <password>
# -o <http://server.ip:port>
# -d <device appear in pyopencl>
# -l <log message period in second>
# -T <disable curses interface and output to console (stdout)>
cmd = "%s -u %s -p %s -o http://%s:%s -d %s -l 1 -T %s" % (
path,
self.txt_username.GetValue(),
self.txt_pass.GetValue(),
self.host_without_http_prefix,
self.txt_port.GetValue(),
self.device_index,
self.txt_flags.GetValue())
return cmd, os.path.dirname(self.external_path)
# End backend specific code
###########################
def start_mining(self):
"""Launch a miner subprocess and attach a MinerListenerThread."""
self.is_paused = False
# Avoid showing a console window when frozen
try: import win32process
except ImportError: flags = 0
else: flags = win32process.CREATE_NO_WINDOW
# Determine what command line arguments to use
listener_cls = MinerListenerThread
if not self.is_external_miner:
conf_func = self.configure_subprocess_poclbm
elif "rpcminer" in self.external_path:
conf_func = self.configure_subprocess_rpcminer
elif "bitcoin-miner" in self.external_path:
conf_func = self.configure_subprocess_ufasoft
elif "phoenix" in self.external_path:
conf_func = self.configure_subprocess_phoenix
listener_cls = PhoenixListenerThread
elif "cgminer" in self.external_path:
conf_func = self.configure_subprocess_cgminer
listener_cls = CgListenerThread
else:
raise ValueError # TODO: handle unrecognized miner
cmd, cwd = conf_func()
# for ufasoft:
# redirect stderr to stdout
# use universal_newlines to catch the \r output on Mhash/s lines
try:
logger.debug(_('Running command: ') + cmd)
# for cgminer:
# We need only the STDOUT for meaningful messages.
if conf_func == self.configure_subprocess_cgminer:
self.miner = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=None,
universal_newlines=True,
creationflags=flags,
shell=(sys.platform != 'win32'))
else:
self.miner = subprocess.Popen(cmd, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
creationflags=flags,
shell=(sys.platform != 'win32'))
except OSError:
raise #TODO: the folder or exe could not exist
self.miner_listener = listener_cls(self, self.miner)
self.miner_listener.daemon = True
self.miner_listener.start()
self.is_mining = True
self.set_status(STR_STARTING, 1)
self.start.SetLabel(self.get_start_label())
try:
set_process_affinity(self.miner.pid, self.affinity_mask)
except:
pass # TODO: test on Linux
def on_close(self):
"""Prepare to close gracefully."""
self.stop_mining()
self.balance_refresh_timer.Stop()
def stop_mining(self):
"""Terminate the poclbm process if able and its associated listener."""
if self.miner is not None:
if self.miner.returncode is None:
# It didn't return yet so it's still running.
try:
self.miner.terminate()
except OSError:
pass # TODO: Guess it wasn't still running?
self.miner = None
if self.miner_listener is not None:
self.miner_listener.shutdown_event.set()
self.miner_listener = None
self.is_mining = False
self.is_paused = False
self.set_status(STR_STOPPED, 1)
self.start.SetLabel(self.get_start_label())
def update_khash(self, rate):
"""Update our rate according to a report from the listener thread.
If we are receiving rate messages then it means poclbm is no longer
reporting errors.
"""
self.last_rate = rate
self.set_status(format_khash(rate), 1)
if self.is_possible_error:
self.update_statusbar()
self.is_possible_error = False
def update_statusbar(self):
"""Show the shares or equivalent on the statusbar."""
if self.is_solo:
text = _("Difficulty 1 hashes: %(nhashes)d %(update_time)s") % \
dict(nhashes=self.accepted_shares,
update_time=self.format_last_update_time())
if self.solo_blocks_found > 0:
block_text = _("Blocks: %d, ") % self.solo_blocks_found
text = block_text + text
else:
text = _("Shares: %d accepted") % self.accepted_shares
if self.invalid_shares > 0:
text += _(", %d stale/invalid") % self.invalid_shares
text += " %s" % self.format_last_update_time()
self.set_status(text, 0)
def update_last_time(self, accepted):
"""Set the last update time to now (in local time)."""
now = time.time()
if accepted:
self.accepted_times.append(now)
while now - self.accepted_times[0] > SAMPLE_TIME_SECS:
self.accepted_times.popleft()
else:
self.invalid_times.append(now)
while now - self.invalid_times[0] > SAMPLE_TIME_SECS:
self.invalid_times.popleft()
def format_last_update_time(self):
"""Format last update time for display."""
time_fmt = '%I:%M:%S%p'
if self.last_update_time is None:
return ""
return _("- last at %s") % time.strftime(time_fmt, self.last_update_time)
def update_shares(self, accepted):
"""Update our shares with a report from the listener thread."""
if self.is_solo and accepted:
self.solo_blocks_found += 1
elif accepted:
self.accepted_shares += 1
else:
self.invalid_shares += 1
self.update_last_time(accepted)
self.update_statusbar()
def update_status(self, msg):
"""Update our status with a report from the listener thread.
If we receive a message from poclbm we don't know how to interpret,
it's probably some kind of error state - in this case the best
thing to do is just show it to the user on the status bar.
"""
self.set_status(msg)
self.is_possible_error = True
def set_status(self, msg, index=0):
"""Set the current statusbar text, but only if we have focus."""
if self.parent.GetSelection() == self.parent.GetPageIndex(self):
self.statusbar.SetStatusText(msg, index)
def on_focus(self):
"""When we receive focus, update our status.
This ensures that when switching tabs, the statusbar always
shows the current tab's status.
"""
self.update_statusbar()
if self.is_mining:
self.update_khash(self.last_rate)
else:
self.set_status(STR_STOPPED, 1)
def get_taskbar_text(self):
"""Return text for the hover state of the taskbar."""
rate = format_khash(self.last_rate) if self.is_mining else STR_STOPPED
return "%s: %s" % (self.name, rate)
def update_solo(self):
"""Update our easy hashes with a report from the listener thread."""
self.accepted_shares += 1
self.update_last_time(True)
self.update_statusbar()
def on_select_server(self, event):
"""Update our info in response to a new server choice."""
new_server_name = self.server.GetValue()
new_server = self.get_server_by_field(new_server_name, 'name')
self.change_server(new_server)
def get_server_by_field(self, target_val, field):
"""Return the first server dict with the specified val, or {}."""
for s in self.servers:
if s.get(field) == target_val:
return s
return {}
def set_widgets_visible(self, widgets, show=False):
"""Show or hide each widget in widgets according to the show flag."""
for w in widgets:
if show:
w.Show()
else:
w.Hide()
def set_tooltips(self):
add_tooltip(self.server, _("Server to connect to. Different servers have different fees and features.\nCheck their websites for full information."))
add_tooltip(self.website, _("Website of the currently selected server. Click to visit."))
add_tooltip(self.device_listbox, _("Available OpenCL devices on your system."))
add_tooltip(self.txt_host, _("Host address, without http:// prefix."))
add_tooltip(self.txt_port, _("Server port. This is usually 8332."))
add_tooltip(self.txt_username, _("The miner's username.\nMay be different than your account username.\nExample: Kiv.GPU"))
add_tooltip(self.txt_pass, _("The miner's password.\nMay be different than your account password."))
add_tooltip(self.txt_flags, _("""Extra flags to pass to the miner.
For poclbm use -v -w 128 for dedicated mining, append -f 60 for desktop usage.
For cgminer use -I 8 or -I 9. Without any params for desktop usage."""))
for chk in self.affinity_chks:
add_tooltip(chk, _("CPU cores used for mining.\nUnchecking some cores can reduce high CPU usage in some systems."))
def reset_statistics(self):
"""Reset our share statistics to zero."""
self.solo_blocks_found = 0
self.accepted_shares = 0
self.accepted_times.clear()
self.invalid_shares = 0
self.invalid_times.clear()
self.update_statusbar()
def change_server(self, new_server):
"""Change the server to new_server, updating fields as needed."""
self.reset_statistics()
# Set defaults before we do server specific code
self.set_tooltips()
self.set_widgets_visible(self.all_widgets, True)
self.withdraw.Disable()
url = new_server.get('url', 'n/a')
self.website.SetLabel(url)
self.website.SetURL(url)
# Invalidate any previous auth token since it won't be valid for the
# new server.
self.balance_auth_token = ""
if 'host' in new_server:
self.txt_host.SetValue(new_server['host'])
if 'port' in new_server:
self.txt_port.SetValue(str(new_server['port']))
# Call server specific code.
host = new_server.get('host', "").lower()
if host == "api2.bitcoin.cz" or host == "mtred.com": self.layout_slush()
elif host == "bitpenny.dyndns.biz": self.layout_bitpenny()
elif host == "pit.deepbit.net": self.layout_deepbit()
elif host == "btcmine.com": self.layout_btcmine()
elif host == "rr.btcmp.com": self.layout_btcmp()
elif "btcguild.com" in host: self.layout_btcguild()
elif host == "bitcoin-server.de": self.layout_bitcoinserver
elif host == "pit.x8s.de": self.layout_x8s()
else: self.layout_default()
self.Layout()
self.update_tab_name()
def on_balance_cooldown_tick(self, event=None):
"""Each second, decrement the cooldown for refreshing balance."""
self.balance_cooldown_seconds -= 1
self.balance_refresh.SetLabel("%d..." % self.balance_cooldown_seconds)
if self.balance_cooldown_seconds <= 0:
self.balance_refresh_timer.Stop()
self.balance_refresh.Enable()
self.balance_refresh.SetLabel(STR_REFRESH_BALANCE)
def require_auth_token(self):
"""Prompt the user for an auth token if they don't have one already.
Set the result to self.balance_auth_token and return None.
"""
if self.balance_auth_token:
return
url = self.server_config.get('balance_token_url')
dialog = BalanceAuthRequest(self, url)
dialog.txt_token.SetFocus()
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
self.balance_auth_token = dialog.get_value() # TODO: validate token?
def is_auth_token_rejected(self, response):
"""If the server rejected our token, reset auth_token and return True.
Otherwise, return False.
"""
if response.status in [401, 403]: # 401 Unauthorized or 403 Forbidden
# Token rejected by the server - reset their token so they'll be
# prompted again
self.balance_auth_token = ""
return True
return False
def request_balance_get(self, balance_auth_token, use_https=False):
"""Request our balance from the server via HTTP GET and auth token.
This method should be run in its own thread.
"""
response, data = http_request(
self.server_config['balance_host'],
"GET",
self.server_config["balance_url"] % balance_auth_token,
use_https=use_https
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
try:
info = json.loads(data)
confirmed = (info.get('confirmed_reward') or
info.get('confirmed') or
info.get('balance') or
info.get('user', {}).get('confirmed_rewards') or
0)
unconfirmed = (info.get('unconfirmed_reward') or
info.get('unconfirmed') or
info.get('user', {}).get('unconfirmed_rewards') or
0)
if self.server_config.get('host') == "pit.deepbit.net":
ipa = info.get('ipa', False)
self.withdraw.Enable(ipa)
if self.server_config.get('host') == "rr.btcmp.com":
ipa = info.get('can_payout', False)
self.withdraw.Enable(ipa)
data = _("%s confirmed") % format_balance(confirmed)
if unconfirmed > 0:
data += _(", %s unconfirmed") % format_balance(unconfirmed)
except: # TODO: what exception here?
data = _("Bad response from server.")
wx.CallAfter(self.balance_amt.SetLabel, data)
def on_withdraw(self, event):
self.withdraw.Disable()
host = self.server_config.get('host')
if host == 'bitpenny.dyndns.biz':
self.withdraw_bitpenny()
elif host == 'pit.deepbit.net':
self.withdraw_deepbit()
elif host == 'rr.btcmp.com':
self.withdraw_btcmp()
def requires_auth_token(self, host):
"""Return True if the specified host requires an auth token for balance update."""
HOSTS_REQUIRING_AUTH_TOKEN = ["api2.bitcoin.cz",
"btcmine.com",
"pit.deepbit.net",
"pit.x8s.de",
"mtred.com",
"rr.btcmp.com",
"bitcoin-server.de"]
if host in HOSTS_REQUIRING_AUTH_TOKEN: return True
if "btcguild" in host: return True
return False
def requires_https(self, host):
"""Return True if the specified host requires HTTPs for balance update."""
return host == "mtred.com"
def on_balance_refresh(self, event=None):
"""Refresh the miner's balance from the server."""
host = self.server_config.get("host")
if self.requires_auth_token(host):
self.require_auth_token()
if not self.balance_auth_token: # They cancelled the dialog
return
try:
self.balance_auth_token.decode('ascii')
except UnicodeDecodeError:
return # Invalid characters in auth token
self.http_thread = threading.Thread(
target=self.request_balance_get,
args=(self.balance_auth_token,),
kwargs=dict(use_https=self.requires_https(host)))
self.http_thread.start()
elif host == 'bitpenny.dyndns.biz':
self.http_thread = threading.Thread(
target=self.request_payout_bitpenny, args=(False,))
self.http_thread.start()
self.balance_refresh.Disable()
self.balance_cooldown_seconds = 10
self.balance_refresh_timer.Start(1000)
#################################
# Begin server specific HTTP code
def withdraw_btcmp(self):
"""Launch a thread to withdraw from deepbit."""
self.require_auth_token()
if not self.balance_auth_token: # User refused to provide token
return
self.http_thread = threading.Thread(
target=self.request_payout_btcmp,
args=(self.balance_auth_token,))
self.http_thread.start()
def withdraw_deepbit(self):
"""Launch a thread to withdraw from deepbit."""
self.require_auth_token()
if not self.balance_auth_token: # User refused to provide token
return
self.http_thread = threading.Thread(
target=self.request_payout_deepbit,
args=(self.balance_auth_token,))
self.http_thread.start()
def withdraw_bitpenny(self):
self.http_thread = threading.Thread(
target=self.request_payout_bitpenny, args=(True,))
self.http_thread.start() # TODO: look at aliasing of this variable
def request_payout_btcmp(self, balance_auth_token):
"""Request payout from btcmp's server via HTTP POST."""
response, data = http_request(
self.server_config['balance_host'],
"GET",
self.server_config["payout_url"] % balance_auth_token,
use_https=False
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def request_payout_deepbit(self, balance_auth_token):
"""Request payout from deepbit's server via HTTP POST."""
post_params = dict(id=1,
method="request_payout")
response, data = http_request(
self.server_config['balance_host'],
"POST",
self.server_config['balance_url'] % balance_auth_token,
json.dumps(post_params),
{"Content-type": "application/json; charset=utf-8",
"User-Agent": USER_AGENT}
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
else:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def request_payout_bitpenny(self, withdraw):
"""Request our balance from BitPenny via HTTP POST.
If withdraw is True, also request a withdrawal.
"""
post_params = dict(a=self.txt_username.GetValue(), w=int(withdraw))
response, data = http_request(
self.server_config['balance_host'],
"POST",
self.server_config['balance_url'],
urllib.urlencode(post_params),
{"Content-type": "application/x-www-form-urlencoded"}
)
if self.is_auth_token_rejected(response):
data = _("Auth token rejected by server.")
elif not data:
data = STR_CONNECTION_ERROR
elif withdraw:
data = _("Withdraw OK")
wx.CallAfter(self.on_balance_received, data)
def on_balance_received(self, balance):
"""Set the balance in the GUI."""
try:
amt = float(balance)
except ValueError: # Response was some kind of error
self.balance_amt.SetLabel(balance)
else:
if amt > 0.1:
self.withdraw.Enable()
amt_str = format_balance(amt)
self.balance_amt.SetLabel(amt_str)
self.Layout()
# End server specific HTTP code
###############################
def set_name(self, name):
"""Set the label on this miner's tab to name."""
self.name = name
if self.summary_name:
self.summary_name.SetLabel(self.name)
self.update_tab_name()
def update_tab_name(self):
"""Update the tab name to reflect modified status."""
name = self.name
if self.is_modified:
name += "*"
page = self.parent.GetPageIndex(self)
if page != -1:
self.parent.SetPageText(page, name)
def check_if_modified(self, event):
"""Update the title of the tab to have an asterisk if we are modified."""
self.update_tab_name()
event.Skip()
def on_saved(self):
"""Update our last data after a save."""
self.last_data = self.get_data()
self.update_tab_name()
def layout_init(self):
"""Create the sizers for this frame and set up the external text.
Return the lowest row that is available.
"""
self.frame_sizer = wx.BoxSizer(wx.VERTICAL)
self.frame_sizer.Add((20, 10), 0, wx.EXPAND, 0)
self.inner_sizer = wx.GridBagSizer(10, 5)
self.button_sizer = wx.BoxSizer(wx.HORIZONTAL)
row = 0
if self.is_external_miner:
self.inner_sizer.Add(self.external_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_external, (row, 1), span=(1, 3), flag=wx.EXPAND)
row += 1
return row
def layout_server_and_website(self, row):
"""Lay out the server and website widgets in the specified row."""
self.inner_sizer.Add(self.server_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.server, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.website_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.website, (row, 3), flag=wx.ALIGN_CENTER_VERTICAL)
def layout_host_and_port(self, row):
"""Lay out the host and port widgets in the specified row."""
self.inner_sizer.Add(self.host_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_host, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.port_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_port, (row, 3), flag=wx.EXPAND)
def layout_user_and_pass(self, row):
"""Lay out the user and pass widgets in the specified row."""
self.inner_sizer.Add(self.user_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_username, (row, 1), flag=wx.EXPAND)
self.inner_sizer.Add(self.pass_lbl, (row, 2), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_pass, (row, 3), flag=wx.EXPAND)
def layout_device_and_flags(self, row):
"""Lay out the device and flags widgets in the specified row.
Hide the device dropdown if RPCMiner is present since it doesn't use it.
"""
device_visible = self.is_device_visible
self.set_widgets_visible([self.device_lbl, self.device_listbox], device_visible)
if device_visible:
self.inner_sizer.Add(self.device_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.device_listbox, (row, 1), flag=wx.EXPAND)
col = 2 * (device_visible)
self.inner_sizer.Add(self.flags_lbl, (row, col), flag=LBL_STYLE)
span = (1, 1) if device_visible else (1, 4)
self.inner_sizer.Add(self.txt_flags, (row, col + 1), span=span, flag=wx.EXPAND)
def layout_affinity(self, row):
"""Lay out the affinity checkboxes in the specified row."""
self.inner_sizer.Add(self.affinity_lbl, (row, 0))
affinity_sizer = wx.BoxSizer(wx.HORIZONTAL)
for chk in self.affinity_chks:
affinity_sizer.Add(chk)
self.inner_sizer.Add(affinity_sizer, (row, 1))
def layout_balance(self, row):
"""Lay out the balance widgets in the specified row."""
self.inner_sizer.Add(self.balance_lbl, (row, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.balance_amt, (row, 1))
def layout_finish(self):
"""Lay out the buttons and fit the sizer to the window."""
self.frame_sizer.Add(self.inner_sizer, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 10)
self.frame_sizer.Add(self.button_sizer, 0, wx.ALIGN_CENTER_HORIZONTAL)
self.inner_sizer.AddGrowableCol(1)
self.inner_sizer.AddGrowableCol(3)
for btn in [self.start, self.balance_refresh, self.withdraw]:
self.button_sizer.Add(btn, 0, BTN_STYLE, 5)
self.set_widgets_visible([self.external_lbl, self.txt_external],
self.is_external_miner)
self.SetSizerAndFit(self.frame_sizer)
def layout_default(self):
"""Lay out a default miner with no custom changes."""
self.user_lbl.SetLabel(STR_USERNAME)
self.set_widgets_visible(self.hidden_widgets, False)
self.set_widgets_visible([self.balance_lbl,
self.balance_amt,
self.balance_refresh,
self.withdraw], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
customs = ["other", "solo"]
is_custom = self.server.GetStringSelection().lower() in customs
if is_custom:
self.layout_host_and_port(row=row + 1)
else:
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port], False)
self.layout_user_and_pass(row=row + 1 + int(is_custom))
self.layout_device_and_flags(row=row + 2 + int(is_custom))
self.layout_affinity(row=row + 3 + int(is_custom))
self.layout_finish()
############################
# Begin server specific code
def layout_bitpenny(self):
"""BitPenny doesn't require registration or a password.
The username is just their receiving address.
"""
invisible = [self.txt_pass, self.txt_host, self.txt_port,
self.pass_lbl, self.host_lbl, self.port_lbl]
self.set_widgets_visible(invisible, False)
self.set_widgets_visible([self.extra_info], True)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.inner_sizer.Add(self.user_lbl, (row + 1, 0), flag=LBL_STYLE)
self.inner_sizer.Add(self.txt_username, (row + 1, 1), span=(1, 3), flag=wx.EXPAND)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.inner_sizer.Add(self.extra_info, (row + 5, 0), span=(1, 4), flag=wx.ALIGN_CENTER_HORIZONTAL)
self.layout_finish()
self.extra_info.SetLabel(_("No registration is required - just enter an address and press Start."))
self.txt_pass.SetValue('poclbm-gui')
self.user_lbl.SetLabel(_("Address:"))
add_tooltip(self.txt_username,
_("Your receiving address for Bitcoins.\nE.g.: 1A94cjRpaPBMV9ZNWFihB5rTFEeihBALgc"))
def layout_slush(self):
"""Slush's pool uses a separate username for each miner."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.withdraw, self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your miner username (not your account username).\nExample: Kiv.GPU"))
add_tooltip(self.txt_pass,
_("Your miner password (not your account password)."))
def layout_btcguild(self):
"""BTC Guild has the same layout as slush for now."""
self.layout_slush()
def layout_bitcoinserver(self):
"""Bitcoin-Server.de has the same layout as slush for now."""
self.layout_slush()
def layout_btcmine(self):
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.withdraw, self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your miner username. \nExample: kiv123@kiv123"))
add_tooltip(self.txt_pass,
_("Your miner password (not your account password)."))
def layout_deepbit(self):
"""Deepbit uses an email address for a username."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("The e-mail address you registered with."))
self.user_lbl.SetLabel(_("Email:"))
def layout_btcmp(self):
"""Deepbit uses an email address for a username."""
self.set_widgets_visible([self.host_lbl, self.txt_host,
self.port_lbl, self.txt_port,
self.extra_info], False)
row = self.layout_init()
self.layout_server_and_website(row=row)
self.layout_user_and_pass(row=row + 1)
self.layout_device_and_flags(row=row + 2)
self.layout_affinity(row=row + 3)
self.layout_balance(row=row + 4)
self.layout_finish()
add_tooltip(self.txt_username,
_("Your worker name. Is something in the form of username.workername"))
self.user_lbl.SetLabel(_("Workername:"))
def layout_x8s(self):
"""x8s has the same layout as slush for now."""
self.layout_slush()
# End server specific code
##########################
class GUIMiner(wx.Frame):
def __init__(self, *args, **kwds):
wx.Frame.__init__(self, *args, **kwds)
style = fnb.FNB_X_ON_TAB | fnb.FNB_FF2 | fnb.FNB_HIDE_ON_SINGLE_TAB
self.nb = fnb.FlatNotebook(self, -1, style=style)
# Set up notebook context menu
notebook_menu = wx.Menu()
ID_RENAME, ID_DUPLICATE = wx.NewId(), wx.NewId()
notebook_menu.Append(ID_RENAME, _("&Rename..."), _("Rename this miner"))
notebook_menu.Append(ID_DUPLICATE, _("&Duplicate...", _("Duplicate this miner")))
self.nb.SetRightClickMenu(notebook_menu)
self.Bind(wx.EVT_MENU, self.rename_miner, id=ID_RENAME)
self.Bind(wx.EVT_MENU, self.duplicate_miner, id=ID_DUPLICATE)
self.console_panel = None
self.summary_panel = None
# Servers and defaults are required, it's a fatal error not to have
# them.
server_config_path = os.path.join(get_module_path(), 'servers.ini')
with open(server_config_path) as f:
data = json.load(f)
self.servers = data.get('servers')
defaults_config_path = os.path.join(get_module_path(), 'defaults.ini')
with open(defaults_config_path) as f:
self.defaults = json.load(f)
self.parse_config()
self.do_show_opencl_warning = self.config_data.get('show_opencl_warning', True)
self.console_max_lines = self.config_data.get('console_max_lines', 5000)
ID_NEW_EXTERNAL, ID_NEW_PHOENIX, ID_NEW_CGMINER, ID_NEW_CUDA, ID_NEW_UFASOFT = wx.NewId(), wx.NewId(), wx.NewId(), wx.NewId(), wx.NewId()
self.menubar = wx.MenuBar()
file_menu = wx.Menu()
new_menu = wx.Menu()
new_menu.Append(wx.ID_NEW, _("&New OpenCL miner..."), _("Create a new OpenCL miner (default for ATI cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_PHOENIX, _("New Phoenix miner..."), _("Create a new Phoenix miner (for some ATI cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_CGMINER, _("New CG miner..."), _("Create a new CGMiner (for some ATI cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_CUDA, _("New CUDA miner..."), _("Create a new CUDA miner (for NVIDIA cards)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_UFASOFT, _("New Ufasoft CPU miner..."), _("Create a new Ufasoft miner (for CPUs)"), wx.ITEM_NORMAL)
new_menu.Append(ID_NEW_EXTERNAL, _("New &other miner..."), _("Create a new custom miner (requires external program)"), wx.ITEM_NORMAL)
file_menu.AppendMenu(wx.NewId(), _('&New miner'), new_menu)
file_menu.Append(wx.ID_SAVE, _("&Save settings"), _("Save your settings"), wx.ITEM_NORMAL)
file_menu.Append(wx.ID_OPEN, _("&Load settings"), _("Load stored settings"), wx.ITEM_NORMAL)
file_menu.Append(wx.ID_EXIT, _("Quit"), STR_QUIT, wx.ITEM_NORMAL)
self.menubar.Append(file_menu, _("&File"))
ID_SUMMARY, ID_CONSOLE = wx.NewId(), wx.NewId()
view_menu = wx.Menu()
view_menu.Append(ID_SUMMARY, _("Show summary"), _("Show summary of all miners"), wx.ITEM_NORMAL)
view_menu.Append(ID_CONSOLE, _("Show console"), _("Show console logs"), wx.ITEM_NORMAL)
self.menubar.Append(view_menu, _("&View"))
ID_SOLO, ID_PATHS, ID_LAUNCH = wx.NewId(), wx.NewId(), wx.NewId()
solo_menu = wx.Menu()
solo_menu.Append(ID_SOLO, _("&Create solo password..."), _("Configure a user/pass for solo mining"), wx.ITEM_NORMAL)
solo_menu.Append(ID_PATHS, _("&Set Bitcoin client path..."), _("Set the location of the official Bitcoin client"), wx.ITEM_NORMAL)
solo_menu.Append(ID_LAUNCH, _("&Launch Bitcoin client as server"), _("Launch the official Bitcoin client as a server for solo mining"), wx.ITEM_NORMAL)
self.menubar.Append(solo_menu, _("&Solo utilities"))
ID_START_MINIMIZED = wx.NewId()
self.options_menu = wx.Menu()
self.start_minimized_chk = self.options_menu.Append(ID_START_MINIMIZED, _("Start &minimized"), _("Start the GUI minimized to the tray."), wx.ITEM_CHECK)
self.options_menu.Check(ID_START_MINIMIZED, self.config_data.get('start_minimized', False))
self.menubar.Append(self.options_menu, _("&Options"))
ID_CHANGE_LANGUAGE = wx.NewId()
lang_menu = wx.Menu()
lang_menu.Append(ID_CHANGE_LANGUAGE, _("&Change language..."), "", wx.ITEM_NORMAL)
self.menubar.Append(lang_menu, _("Language"))
ID_DONATE_SMALL = wx.NewId()
donate_menu = wx.Menu()
donate_menu.Append(ID_DONATE_SMALL, _("&Donate..."), _("Donate Bitcoins to support GUIMiner development"))
self.menubar.Append(donate_menu, _("&Donate"))
help_menu = wx.Menu()
help_menu.Append(wx.ID_ABOUT, _("&About..."), STR_ABOUT, wx.ITEM_NORMAL)
self.menubar.Append(help_menu, _("&Help"))
self.SetMenuBar(self.menubar)
self.statusbar = self.CreateStatusBar(2, 0)
try:
self.bitcoin_executable = os.path.join(os.getenv("PROGRAMFILES"), "Bitcoin", "bitcoin.exe")
except:
self.bitcoin_executable = "" # TODO: where would Bitcoin probably be on Linux/Mac?
try:
self.tbicon = GUIMinerTaskBarIcon(self)
except:
logging.error(_("Failed to load taskbar icon; continuing."))
self.set_properties()
try:
self.devices = get_opencl_devices()
except:
self.devices = []
file_menu.Enable(wx.ID_NEW, False)
file_menu.SetHelpString(wx.ID_NEW, _("OpenCL not found - can't add a OpenCL miner"))
if self.do_show_opencl_warning:
dialog = OpenCLWarningDialog(self)
dialog.ShowModal()
self.do_show_opencl_warning = not dialog.is_box_checked()
self.Bind(wx.EVT_MENU, self.name_new_profile, id=wx.ID_NEW)
self.Bind(wx.EVT_MENU, self.new_phoenix_profile, id=ID_NEW_PHOENIX)
self.Bind(wx.EVT_MENU, self.new_cgminer_profile, id=ID_NEW_CGMINER)
self.Bind(wx.EVT_MENU, self.new_ufasoft_profile, id=ID_NEW_UFASOFT)
self.Bind(wx.EVT_MENU, self.new_cuda_profile, id=ID_NEW_CUDA)
self.Bind(wx.EVT_MENU, self.new_external_profile, id=ID_NEW_EXTERNAL)
self.Bind(wx.EVT_MENU, self.save_config, id=wx.ID_SAVE)
self.Bind(wx.EVT_MENU, self.load_config, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.on_menu_exit, id=wx.ID_EXIT)
self.Bind(wx.EVT_MENU, self.set_official_client_path, id=ID_PATHS)
self.Bind(wx.EVT_MENU, self.show_console, id=ID_CONSOLE)
self.Bind(wx.EVT_MENU, self.show_summary, id=ID_SUMMARY)
self.Bind(wx.EVT_MENU, self.show_about_dialog, id=wx.ID_ABOUT)
self.Bind(wx.EVT_MENU, self.create_solo_password, id=ID_SOLO)
self.Bind(wx.EVT_MENU, self.launch_solo_server, id=ID_LAUNCH)
self.Bind(wx.EVT_MENU, self.on_change_language, id=ID_CHANGE_LANGUAGE)
self.Bind(wx.EVT_MENU, self.on_donate, id=ID_DONATE_SMALL)
self.Bind(wx.EVT_CLOSE, self.on_close)
self.Bind(wx.EVT_ICONIZE, self.on_iconize)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSING, self.on_page_closing)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CLOSED, self.on_page_closed)
self.Bind(fnb.EVT_FLATNOTEBOOK_PAGE_CHANGED, self.on_page_changed)
self.load_config()
self.do_layout()
if not self.start_minimized_chk.IsChecked():
self.Show()
def on_iconize(self, event):
if event.Iconized() and sys.platform == 'win32':
self.Hide() # On minimize, hide from taskbar.
else:
self.Show()
def set_properties(self):
self.SetIcons(get_icon_bundle())
self.SetTitle(_("GUIMiner - v%s") % __version__)
self.statusbar.SetStatusWidths([-1, 125])
statusbar_fields = ["", STR_NOT_STARTED]
for i in range(len(statusbar_fields)):
self.statusbar.SetStatusText(statusbar_fields[i], i)
def do_layout(self):
self.vertical_sizer = wx.BoxSizer(wx.VERTICAL)
self.vertical_sizer.Add(self.nb, 1, wx.EXPAND, 20)
self.SetSizer(self.vertical_sizer)
self.vertical_sizer.SetSizeHints(self)
self.SetSizerAndFit(self.vertical_sizer)
self.Layout()
@property
def profile_panels(self):
"""Return a list of currently available MinerTab."""
pages = [self.nb.GetPage(i) for i in range(self.nb.GetPageCount())]
return [p for p in pages if
p != self.console_panel and p != self.summary_panel]
def add_profile(self, data={}):
"""Add a new MinerTab to the list of tabs."""
panel = MinerTab(self.nb, -1, self.devices, self.servers,
self.defaults, self.statusbar, data)
self.nb.AddPage(panel, panel.name)
# The newly created profile should have focus.
self.nb.EnsureVisible(self.nb.GetPageCount() - 1)
if self.summary_panel is not None:
self.summary_panel.add_miners_to_grid() # Show new entry on summary
return panel
def message(self, *args, **kwargs):
"""Utility method to show a message dialog and return their choice."""
dialog = wx.MessageDialog(self, *args, **kwargs)
retval = dialog.ShowModal()
dialog.Destroy()
return retval
def name_new_profile(self, event=None, extra_profile_data={}):
"""Prompt for the new miner's name."""
dialog = wx.TextEntryDialog(self, _("Name this miner:"), _("New miner"))
if dialog.ShowModal() == wx.ID_OK:
name = dialog.GetValue().strip()
if not name: name = _("Untitled")
data = extra_profile_data.copy()
data['name'] = name
self.add_profile(data)
def new_external_profile(self, event):
"""Prompt for an external miner path, then create a miner.
On Windows we validate against legal miners; on Linux they can pick
whatever they want.
"""
wildcard = _('External miner (*.exe)|*.exe|(*.py)|*.py') if sys.platform == 'win32' else '*.*'
dialog = wx.FileDialog(self,
_("Select external miner:"),
defaultDir=os.path.join(get_module_path(), 'miners'),
defaultFile="",
wildcard=wildcard,
style=wx.OPEN)
if dialog.ShowModal() != wx.ID_OK:
return
if sys.platform == 'win32' and dialog.GetFilename() not in SUPPORTED_BACKENDS:
self.message(
_("Unsupported external miner %(filename)s. Supported are: %(supported)s") % \
dict(filename=dialog.GetFilename(), supported='\n'.join(SUPPORTED_BACKENDS)),
_("Miner not supported"), wx.OK | wx.ICON_ERROR)
return
path = os.path.join(dialog.GetDirectory(), dialog.GetFilename())
dialog.Destroy()
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_phoenix_profile(self, event):
"""Create a new miner using the Phoenix OpenCL miner backend."""
path = os.path.join(get_module_path(), 'phoenix.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_cgminer_profile(self, event):
"""Create a new miner using the Cgminer OpenCL miner backend."""
path = os.path.join(get_module_path(), 'cgminer.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_ufasoft_profile(self, event):
"""Create a new miner using the Ufasoft CPU miner backend."""
path = os.path.join(get_module_path(), 'miners', 'ufasoft', 'bitcoin-miner.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def new_cuda_profile(self, event):
"""Create a new miner using the CUDA GPU miner backend."""
path = os.path.join(get_module_path(), 'miners', 'puddinpop', 'rpcminer-cuda.exe')
self.name_new_profile(extra_profile_data=dict(external_path=path))
def get_storage_location(self):
"""Get the folder and filename to store our JSON config."""
if sys.platform == 'win32':
folder = os.path.join(os.environ['AppData'], 'poclbm')
config_filename = os.path.join(folder, 'poclbm.ini')
else: # Assume linux? TODO test
folder = os.environ['HOME']
config_filename = os.path.join(folder, '.poclbm')
return folder, config_filename
def on_close(self, event):
"""Minimize to tray if they click "close" but exit otherwise.
On closing, stop any miners that are currently working.
"""
if event.CanVeto():
self.Hide()
event.Veto()
else:
if any(p.is_modified for p in self.profile_panels):
dialog = wx.MessageDialog(self, _('Do you want to save changes?'), _('Save'),
wx.YES_NO | wx.YES_DEFAULT | wx.ICON_QUESTION)
retval = dialog.ShowModal()
dialog.Destroy()
if retval == wx.ID_YES:
self.save_config()
if self.console_panel is not None:
self.console_panel.on_close()
if self.summary_panel is not None:
self.summary_panel.on_close()
for p in self.profile_panels:
p.on_close()
if self.tbicon is not None:
self.tbicon.RemoveIcon()
self.tbicon.timer.Stop()
self.tbicon.Destroy()
event.Skip()
def save_config(self, event=None):
"""Save the current miner profiles to our config file in JSON format."""
folder, config_filename = self.get_storage_location()
mkdir_p(folder)
profile_data = [p.get_data() for p in self.profile_panels]
config_data = dict(show_console=self.is_console_visible(),
show_summary=self.is_summary_visible(),
profiles=profile_data,
bitcoin_executable=self.bitcoin_executable,
show_opencl_warning=self.do_show_opencl_warning,
start_minimized=self.start_minimized_chk.IsChecked(),
console_max_lines=self.console_max_lines,
window_position=list(self.GetRect()))
logger.debug(_('Saving: ') + json.dumps(config_data))
try:
with open(config_filename, 'w') as f:
json.dump(config_data, f, indent=4)
except IOError:
self.message(
_("Couldn't write save file %s.\nCheck the location is writable.") % config_filename,
_("Save unsuccessful"), wx.OK | wx.ICON_ERROR)
else:
self.message(_("Profiles saved OK to %s.") % config_filename,
_("Save successful"), wx.OK | wx.ICON_INFORMATION)
for p in self.profile_panels:
p.on_saved()
def parse_config(self):
"""Set self.config_data to a dictionary of config values."""
self.config_data = {}
try:
config_filename = self.get_storage_location()[1]
if os.path.exists(config_filename):
with open(config_filename) as f:
self.config_data.update(json.load(f))
logger.debug(_('Loaded: %s') % json.dumps(self.config_data))
except ValueError:
self.message(
_("Your settings saved at:\n %s\nare corrupt or could not be read.\nDeleting this file or saving over it may solve the problem." % config_filename),
_("Error"), wx.ICON_ERROR)
def load_config(self, event=None):
"""Load JSON profile info from the config file."""
self.parse_config()
config_data = self.config_data
executable = config_data.get('bitcoin_executable', None)
if executable is not None:
self.bitcoin_executable = executable
# Shut down any existing miners before they get clobbered
if(any(p.is_mining for p in self.profile_panels)):
result = self.message(
_("Loading profiles will stop any currently running miners. Continue?"),
_("Load profile"), wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
return
for p in reversed(self.profile_panels):
p.on_close()
self.nb.DeletePage(self.nb.GetPageIndex(p))
# If present, summary should be the leftmost tab on startup.
if config_data.get('show_summary', False):
self.show_summary()
profile_data = config_data.get('profiles', [])
for d in profile_data:
self.add_profile(d)
if not any(profile_data):
self.add_profile() # Create a default one using defaults.ini
if config_data.get('show_console', False):
self.show_console()
window_position = config_data.get('window_position')
if window_position:
self.SetRect(window_position)
for p in self.profile_panels:
if p.autostart:
p.start_mining()
def set_official_client_path(self, event):
"""Set the path to the official Bitcoin client."""
wildcard = "bitcoin.exe" if sys.platform == 'win32' else '*.*'
dialog = wx.FileDialog(self,
_("Select path to Bitcoin.exe"),
defaultFile="bitcoin.exe",
wildcard=wildcard,
style=wx.OPEN)
if dialog.ShowModal() == wx.ID_OK:
path = os.path.join(dialog.GetDirectory(), dialog.GetFilename())
if os.path.exists(path):
self.bitcoin_executable = path
dialog.Destroy()
def show_about_dialog(self, event):
"""Show the 'about' dialog."""
dialog = AboutGuiminer(self, -1, _('About'))
dialog.ShowModal()
dialog.Destroy()
def on_page_closing(self, event):
"""Handle a tab closing event.
If they are closing a special panel, we have to shut it down.
If the tab has a miner running in it, we have to stop the miner
before letting the tab be removed.
"""
p = self.nb.GetPage(event.GetSelection())
if p == self.console_panel:
self.console_panel.on_close()
self.console_panel = None
event.Skip()
return
if p == self.summary_panel:
self.summary_panel.on_close()
self.summary_panel = None
event.Skip()
return
if p.is_mining:
result = self.message(
_("Closing this miner will stop it. Continue?"),
_("Close miner"),
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
event.Veto()
return
p.on_close()
event.Skip() # OK to close the tab now
def on_page_closed(self, event):
if self.summary_panel is not None:
self.summary_panel.add_miners_to_grid() # Remove miner summary
def on_page_changed(self, event):
"""Handle a tab change event.
Ensures the status bar shows the status of the tab that has focus.
"""
p = self.nb.GetPage(event.GetSelection())
p.on_focus()
def launch_solo_server(self, event):
"""Launch the official bitcoin client in server mode.
This allows poclbm to connect to it for mining solo.
"""
try:
subprocess.Popen(self.bitcoin_executable + " -server")
except OSError:
self.message(
_("Couldn't find Bitcoin at %s. Is your path set correctly?") % self.bitcoin_executable,
_("Launch failed"), wx.ICON_ERROR | wx.OK)
return
self.message(
_("The Bitcoin client will now launch in server mode.\nOnce it connects to the network and downloads the block chain, you can start a miner in 'solo' mode."),
_("Launched ok."),
wx.OK)
def create_solo_password(self, event):
"""Prompt the user for login credentials to the bitcoin client.
These are required to connect to the client over JSON-RPC and are
stored in 'bitcoin.conf'.
"""
if sys.platform == 'win32':
filename = os.path.join(os.getenv("APPDATA"), "Bitcoin", "bitcoin.conf")
else: # Assume Linux for now TODO test
filename = os.path.join(os.getenv('HOME'), ".bitcoin")
if os.path.exists(filename):
result = self.message(
_("%s already exists. Overwrite?") % filename,
_("bitcoin.conf already exists."),
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_INFORMATION)
if result == wx.ID_NO:
return
dialog = SoloPasswordRequest(self, _('Enter password'))
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
with open(filename, "w") as f:
f.write('\nrpcuser=%s\nrpcpassword=%s\nrpcallowip=*' % dialog.get_value())
f.close()
self.message(_("Wrote bitcoin config ok."), _("Success"), wx.OK)
def is_console_visible(self):
"""Return True if the console is visible."""
return self.nb.GetPageIndex(self.console_panel) != -1
def show_console(self, event=None):
"""Show the console log in its own tab."""
if self.is_console_visible():
return # Console already shown
self.console_panel = ConsolePanel(self, self.console_max_lines)
self.nb.AddPage(self.console_panel, _("Console"))
self.nb.EnsureVisible(self.nb.GetPageCount() - 1)
def is_summary_visible(self):
"""Return True if the summary is visible."""
return self.nb.GetPageIndex(self.summary_panel) != -1
def show_summary(self, event=None):
"""Show the summary window in its own tab."""
if self.is_summary_visible():
return
self.summary_panel = SummaryPanel(self)
self.nb.AddPage(self.summary_panel, _("Summary"))
index = self.nb.GetPageIndex(self.summary_panel)
self.nb.SetSelection(index)
def on_menu_exit(self, event):
self.Close(force=True)
def rename_miner(self, event):
"""Change the name of a miner as displayed on the tab."""
p = self.nb.GetPage(self.nb.GetSelection())
if p not in self.profile_panels:
return
dialog = wx.TextEntryDialog(self, _("Rename to:"), _("Rename miner"))
if dialog.ShowModal() == wx.ID_OK:
p.set_name(dialog.GetValue().strip())
def duplicate_miner(self, event):
"""Duplicate the current miner to another miner."""
p = self.nb.GetPage(self.nb.GetSelection())
if p not in self.profile_panels:
return
self.name_new_profile(event=None, extra_profile_data=p.get_data())
def on_change_language(self, event):
dialog = ChangeLanguageDialog(self, _('Change language'), language)
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_CANCEL:
return
language_name = dialog.get_value()
update_language(LANGUAGES[language_name])
save_language()
def on_donate(self, event):
dialog = DonateDialog(self, -1, _('Donate'))
dialog.ShowModal()
dialog.Destroy()
class DonateDialog(wx.Dialog):
"""About dialog for the app with a donation address."""
DONATE_TEXT = "If this software helped you, please consider contributing to its development." \
"\nSend donations to: %(address)s"
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title)
vbox = wx.BoxSizer(wx.VERTICAL)
text = DonateDialog.DONATE_TEXT % dict(address=DONATION_ADDRESS)
self.about_text = wx.StaticText(self, -1, text)
self.copy_btn = wx.Button(self, -1, _("Copy address to clipboard"))
vbox.Add(self.about_text, 0, wx.ALL, 10)
vbox.Add(self.copy_btn, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
self.SetSizerAndFit(vbox)
self.copy_btn.Bind(wx.EVT_BUTTON, self.on_copy)
def on_copy(self, event):
"""Copy the donation address to the clipboard."""
if wx.TheClipboard.Open():
data = wx.TextDataObject()
data.SetText(DONATION_ADDRESS)
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class ChangeLanguageDialog(wx.Dialog):
"""Dialog prompting the user to change languages."""
def __init__(self, parent, title, current_language):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, title, style=style)
self.lbl = wx.StaticText(self, -1,
_("Choose language (requires restart to take full effect)"))
vbox.Add(self.lbl, 0, wx.ALL, 10)
self.language_choices = wx.ComboBox(self, -1,
choices=sorted(LANGUAGES.keys()),
style=wx.CB_READONLY)
self.language_choices.SetStringSelection(LANGUAGES_REVERSE[current_language])
vbox.Add(self.language_choices, 0, wx.ALL, 10)
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.Add(buttons, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
self.SetSizerAndFit(vbox)
def get_value(self):
return self.language_choices.GetStringSelection()
class SoloPasswordRequest(wx.Dialog):
"""Dialog prompting user for login credentials for solo mining."""
def __init__(self, parent, title):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, title, style=style)
self.user_lbl = wx.StaticText(self, -1, STR_USERNAME)
self.txt_username = wx.TextCtrl(self, -1, "")
self.pass_lbl = wx.StaticText(self, -1, STR_PASSWORD)
self.txt_pass = wx.TextCtrl(self, -1, "", style=wx.TE_PASSWORD)
grid_sizer_1 = wx.FlexGridSizer(2, 2, 5, 5)
grid_sizer_1.Add(self.user_lbl, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.txt_username, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.pass_lbl, 0, wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.txt_pass, 0, wx.EXPAND, 0)
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.Add(grid_sizer_1, wx.EXPAND | wx.ALL, 10)
vbox.Add(buttons)
self.SetSizerAndFit(vbox)
def get_value(self):
"""Return the (username, password) supplied by the user."""
return self.txt_username.GetValue(), self.txt_pass.GetValue()
class BalanceAuthRequest(wx.Dialog):
"""Dialog prompting user for an auth token to refresh their balance."""
instructions = \
_("""Click the link below to log in to the pool and get a special token.
This token lets you securely check your balance.
To remember this token for the future, save your miner settings.""")
def __init__(self, parent, url):
style = wx.DEFAULT_DIALOG_STYLE
vbox = wx.BoxSizer(wx.VERTICAL)
wx.Dialog.__init__(self, parent, -1, STR_REFRESH_BALANCE, style=style)
self.instructions = wx.StaticText(self, -1, BalanceAuthRequest.instructions)
self.website = hyperlink.HyperLinkCtrl(self, -1, url)
self.txt_token = wx.TextCtrl(self, -1, _("(Paste token here)"))
buttons = self.CreateButtonSizer(wx.OK | wx.CANCEL)
vbox.AddMany([
(self.instructions, 0, wx.ALL, 10),
(self.website, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10),
(self.txt_token, 0, wx.EXPAND | wx.ALIGN_CENTER_HORIZONTAL, 10),
(buttons, 0, wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, 10)
])
self.SetSizerAndFit(vbox)
def get_value(self):
"""Return the auth token supplied by the user."""
return self.txt_token.GetValue()
class AboutGuiminer(wx.Dialog):
"""About dialog for the app with a donation address."""
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title)
vbox = wx.BoxSizer(wx.VERTICAL)
text = ABOUT_TEXT % dict(version=__version__,
address=DONATION_ADDRESS)
self.about_text = wx.StaticText(self, -1, text)
self.copy_btn = wx.Button(self, -1, _("Copy address to clipboard"))
vbox.Add(self.about_text)
vbox.Add(self.copy_btn, 0, wx.ALIGN_BOTTOM | wx.ALIGN_CENTER_HORIZONTAL, 0)
self.SetSizerAndFit(vbox)
self.copy_btn.Bind(wx.EVT_BUTTON, self.on_copy)
def on_copy(self, event):
"""Copy the donation address to the clipboard."""
if wx.TheClipboard.Open():
data = wx.TextDataObject()
data.SetText(DONATION_ADDRESS)
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class OpenCLWarningDialog(wx.Dialog):
"""Warning dialog when a user does not have OpenCL installed."""
def __init__(self, parent):
wx.Dialog.__init__(self, parent, -1, _("No OpenCL devices found."))
vbox = wx.BoxSizer(wx.VERTICAL)
self.message = wx.StaticText(self, -1,
_("""No OpenCL devices were found.
If you only want to mine using CPU or CUDA, you can ignore this message.
If you want to mine on ATI graphics cards, you may need to install the ATI Stream
SDK, or your GPU may not support OpenCL."""))
vbox.Add(self.message, 0, wx.ALL, 10)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.no_show_chk = wx.CheckBox(self, -1)
hbox.Add(self.no_show_chk)
self.no_show_txt = wx.StaticText(self, -1, _("Don't show this message again"))
hbox.Add((5, 0))
hbox.Add(self.no_show_txt)
vbox.Add(hbox, 0, wx.ALL, 10)
buttons = self.CreateButtonSizer(wx.OK)
vbox.Add(buttons, 0, wx.ALIGN_BOTTOM | wx.ALIGN_CENTER_HORIZONTAL, 0)
self.SetSizerAndFit(vbox)
def is_box_checked(self):
return self.no_show_chk.GetValue()
def run():
try:
frame_1 = GUIMiner(None, -1, "")
app.SetTopWindow(frame_1)
app.MainLoop()
except:
logging.exception("Exception:")
raise
if __name__ == "__main__":
run()
| gpl-3.0 |
sdague/home-assistant | homeassistant/components/homematicip_cloud/hap.py | 6 | 9395 | """Access point for the HomematicIP Cloud component."""
import asyncio
import logging
from homematicip.aio.auth import AsyncAuth
from homematicip.aio.home import AsyncHome
from homematicip.base.base_connection import HmipConnectionError
from homematicip.base.enums import EventType
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import HomeAssistantType
from .const import COMPONENTS, HMIPC_AUTHTOKEN, HMIPC_HAPID, HMIPC_NAME, HMIPC_PIN
from .errors import HmipcConnectionError
_LOGGER = logging.getLogger(__name__)
class HomematicipAuth:
"""Manages HomematicIP client registration."""
def __init__(self, hass, config) -> None:
"""Initialize HomematicIP Cloud client registration."""
self.hass = hass
self.config = config
self.auth = None
async def async_setup(self) -> bool:
"""Connect to HomematicIP for registration."""
try:
self.auth = await self.get_auth(
self.hass, self.config.get(HMIPC_HAPID), self.config.get(HMIPC_PIN)
)
return self.auth is not None
except HmipcConnectionError:
return False
async def async_checkbutton(self) -> bool:
"""Check blue butten has been pressed."""
try:
return await self.auth.isRequestAcknowledged()
except HmipConnectionError:
return False
async def async_register(self):
"""Register client at HomematicIP."""
try:
authtoken = await self.auth.requestAuthToken()
await self.auth.confirmAuthToken(authtoken)
return authtoken
except HmipConnectionError:
return False
async def get_auth(self, hass: HomeAssistantType, hapid, pin):
"""Create a HomematicIP access point object."""
auth = AsyncAuth(hass.loop, async_get_clientsession(hass))
try:
await auth.init(hapid)
if pin:
auth.pin = pin
await auth.connectionRequest("HomeAssistant")
except HmipConnectionError:
return None
return auth
class HomematicipHAP:
"""Manages HomematicIP HTTP and WebSocket connection."""
def __init__(self, hass: HomeAssistantType, config_entry: ConfigEntry) -> None:
"""Initialize HomematicIP Cloud connection."""
self.hass = hass
self.config_entry = config_entry
self.home = None
self._ws_close_requested = False
self._retry_task = None
self._tries = 0
self._accesspoint_connected = True
self.hmip_device_by_entity_id = {}
self.reset_connection_listener = None
async def async_setup(self, tries: int = 0) -> bool:
"""Initialize connection."""
try:
self.home = await self.get_hap(
self.hass,
self.config_entry.data.get(HMIPC_HAPID),
self.config_entry.data.get(HMIPC_AUTHTOKEN),
self.config_entry.data.get(HMIPC_NAME),
)
except HmipcConnectionError as err:
raise ConfigEntryNotReady from err
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error connecting with HomematicIP Cloud: %s", err)
return False
_LOGGER.info(
"Connected to HomematicIP with HAP %s", self.config_entry.unique_id
)
for component in COMPONENTS:
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, component
)
)
return True
@callback
def async_update(self, *args, **kwargs) -> None:
"""Async update the home device.
Triggered when the HMIP HOME_CHANGED event has fired.
There are several occasions for this event to happen.
1. We are interested to check whether the access point
is still connected. If not, entity state changes cannot
be forwarded to hass. So if access point is disconnected all devices
are set to unavailable.
2. We need to update home including devices and groups after a reconnect.
3. We need to update home without devices and groups in all other cases.
"""
if not self.home.connected:
_LOGGER.error("HMIP access point has lost connection with the cloud")
self._accesspoint_connected = False
self.set_all_to_unavailable()
elif not self._accesspoint_connected:
# Now the HOME_CHANGED event has fired indicating the access
# point has reconnected to the cloud again.
# Explicitly getting an update as entity states might have
# changed during access point disconnect."""
job = self.hass.async_create_task(self.get_state())
job.add_done_callback(self.get_state_finished)
self._accesspoint_connected = True
@callback
def async_create_entity(self, *args, **kwargs) -> None:
"""Create an entity or a group."""
is_device = EventType(kwargs["event_type"]) == EventType.DEVICE_ADDED
self.hass.async_create_task(self.async_create_entity_lazy(is_device))
async def async_create_entity_lazy(self, is_device=True) -> None:
"""Delay entity creation to allow the user to enter a device name."""
if is_device:
await asyncio.sleep(30)
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
async def get_state(self) -> None:
"""Update HMIP state and tell Home Assistant."""
await self.home.get_current_state()
self.update_all()
def get_state_finished(self, future) -> None:
"""Execute when get_state coroutine has finished."""
try:
future.result()
except HmipConnectionError:
# Somehow connection could not recover. Will disconnect and
# so reconnect loop is taking over.
_LOGGER.error("Updating state after HMIP access point reconnect failed")
self.hass.async_create_task(self.home.disable_events())
def set_all_to_unavailable(self) -> None:
"""Set all devices to unavailable and tell Home Assistant."""
for device in self.home.devices:
device.unreach = True
self.update_all()
def update_all(self) -> None:
"""Signal all devices to update their state."""
for device in self.home.devices:
device.fire_update_event()
async def async_connect(self) -> None:
"""Start WebSocket connection."""
tries = 0
while True:
retry_delay = 2 ** min(tries, 8)
try:
await self.home.get_current_state()
hmip_events = await self.home.enable_events()
tries = 0
await hmip_events
except HmipConnectionError:
_LOGGER.error(
"Error connecting to HomematicIP with HAP %s. "
"Retrying in %d seconds",
self.config_entry.unique_id,
retry_delay,
)
if self._ws_close_requested:
break
self._ws_close_requested = False
tries += 1
try:
self._retry_task = self.hass.async_create_task(
asyncio.sleep(retry_delay)
)
await self._retry_task
except asyncio.CancelledError:
break
async def async_reset(self) -> bool:
"""Close the websocket connection."""
self._ws_close_requested = True
if self._retry_task is not None:
self._retry_task.cancel()
await self.home.disable_events()
_LOGGER.info("Closed connection to HomematicIP cloud server")
for component in COMPONENTS:
await self.hass.config_entries.async_forward_entry_unload(
self.config_entry, component
)
self.hmip_device_by_entity_id = {}
return True
@callback
def shutdown(self, event) -> None:
"""Wrap the call to async_reset.
Used as an argument to EventBus.async_listen_once.
"""
self.hass.async_create_task(self.async_reset())
_LOGGER.debug(
"Reset connection to access point id %s", self.config_entry.unique_id
)
async def get_hap(
self, hass: HomeAssistantType, hapid: str, authtoken: str, name: str
) -> AsyncHome:
"""Create a HomematicIP access point object."""
home = AsyncHome(hass.loop, async_get_clientsession(hass))
home.name = name
home.label = "Access Point"
home.modelType = "HmIP-HAP"
home.set_auth_token(authtoken)
try:
await home.init(hapid)
await home.get_current_state()
except HmipConnectionError as err:
raise HmipcConnectionError from err
home.on_update(self.async_update)
home.on_create(self.async_create_entity)
hass.loop.create_task(self.async_connect())
return home
| apache-2.0 |
kobolabs/calibre | src/calibre/ebooks/conversion/plugins/pml_input.py | 22 | 4852 | # -*- coding: utf-8 -*-
__license__ = 'GPL v3'
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
import glob
import os
import shutil
from calibre.customize.conversion import InputFormatPlugin
from calibre.ptempfile import TemporaryDirectory
class PMLInput(InputFormatPlugin):
name = 'PML Input'
author = 'John Schember'
description = 'Convert PML to OEB'
# pmlz is a zip file containing pml files and png images.
file_types = set(['pml', 'pmlz'])
def process_pml(self, pml_path, html_path, close_all=False):
from calibre.ebooks.pml.pmlconverter import PML_HTMLizer
pclose = False
hclose = False
if not hasattr(pml_path, 'read'):
pml_stream = open(pml_path, 'rb')
pclose = True
else:
pml_stream = pml_path
pml_stream.seek(0)
if not hasattr(html_path, 'write'):
html_stream = open(html_path, 'wb')
hclose = True
else:
html_stream = html_path
ienc = pml_stream.encoding if pml_stream.encoding else 'cp1252'
if self.options.input_encoding:
ienc = self.options.input_encoding
self.log.debug('Converting PML to HTML...')
hizer = PML_HTMLizer()
html = hizer.parse_pml(pml_stream.read().decode(ienc), html_path)
html = '<html><head><title></title></head><body>%s</body></html>'%html
html_stream.write(html.encode('utf-8', 'replace'))
if pclose:
pml_stream.close()
if hclose:
html_stream.close()
return hizer.get_toc()
def get_images(self, stream, tdir, top_level=False):
images = []
imgs = []
if top_level:
imgs = glob.glob(os.path.join(tdir, '*.png'))
# Images not in top level try bookname_img directory because
# that's where Dropbook likes to see them.
if not imgs:
if hasattr(stream, 'name'):
imgs = glob.glob(os.path.join(tdir, os.path.splitext(os.path.basename(stream.name))[0] + '_img', '*.png'))
# No images in Dropbook location try generic images directory
if not imgs:
imgs = glob.glob(os.path.join(os.path.join(tdir, u'images'), u'*.png'))
if imgs:
os.makedirs(os.path.join(os.getcwdu(), u'images'))
for img in imgs:
pimg_name = os.path.basename(img)
pimg_path = os.path.join(os.getcwdu(), 'images', pimg_name)
images.append('images/' + pimg_name)
shutil.copy(img, pimg_path)
return images
def convert(self, stream, options, file_ext, log,
accelerators):
from calibre.ebooks.metadata.toc import TOC
from calibre.ebooks.metadata.opf2 import OPFCreator
from calibre.utils.zipfile import ZipFile
self.options = options
self.log = log
pages, images = [], []
toc = TOC()
if file_ext == 'pmlz':
log.debug('De-compressing content to temporary directory...')
with TemporaryDirectory(u'_unpmlz') as tdir:
zf = ZipFile(stream)
zf.extractall(tdir)
pmls = glob.glob(os.path.join(tdir, u'*.pml'))
for pml in pmls:
html_name = os.path.splitext(os.path.basename(pml))[0]+'.html'
html_path = os.path.join(os.getcwdu(), html_name)
pages.append(html_name)
log.debug('Processing PML item %s...' % pml)
ttoc = self.process_pml(pml, html_path)
toc += ttoc
images = self.get_images(stream, tdir, True)
else:
toc = self.process_pml(stream, u'index.html')
pages.append(u'index.html')
if hasattr(stream, 'name'):
images = self.get_images(stream, os.path.abspath(os.path.dirname(stream.name)))
# We want pages to be orded alphabetically.
pages.sort()
manifest_items = []
for item in pages+images:
manifest_items.append((item, None))
from calibre.ebooks.metadata.meta import get_metadata
log.debug('Reading metadata from input file...')
mi = get_metadata(stream, 'pml')
if 'images/cover.png' in images:
mi.cover = u'images/cover.png'
opf = OPFCreator(os.getcwdu(), mi)
log.debug('Generating manifest...')
opf.create_manifest(manifest_items)
opf.create_spine(pages)
opf.set_toc(toc)
with open(u'metadata.opf', 'wb') as opffile:
with open(u'toc.ncx', 'wb') as tocfile:
opf.render(opffile, tocfile, u'toc.ncx')
return os.path.join(os.getcwdu(), u'metadata.opf')
| gpl-3.0 |
plumgrid/plumgrid-nova | nova/tests/api/openstack/compute/plugins/v3/test_fixed_ips.py | 1 | 7689 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack.compute.plugins.v3 import fixed_ips
from nova import context
from nova import db
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
fake_fixed_ips = [{'id': 1,
'address': '192.168.1.1',
'network_id': 1,
'virtual_interface_id': 1,
'instance_uuid': '1',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'deleted': False},
{'id': 2,
'address': '192.168.1.2',
'network_id': 1,
'virtual_interface_id': 2,
'instance_uuid': '2',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'deleted': False},
{'id': 3,
'address': '10.0.0.2',
'network_id': 1,
'virtual_interface_id': 3,
'instance_uuid': '3',
'allocated': False,
'leased': False,
'reserved': False,
'host': None,
'deleted': True},
]
def fake_fixed_ip_get_by_address(context, address):
for fixed_ip in fake_fixed_ips:
if fixed_ip['address'] == address and not fixed_ip['deleted']:
return fixed_ip
raise exception.FixedIpNotFoundForAddress(address=address)
def fake_fixed_ip_get_by_address_detailed(context, address):
network = {'id': 1,
'cidr': "192.168.1.0/24"}
for fixed_ip in fake_fixed_ips:
if fixed_ip['address'] == address and not fixed_ip['deleted']:
return (fixed_ip, FakeModel(network), None)
raise exception.FixedIpNotFoundForAddress(address=address)
def fake_fixed_ip_update(context, address, values):
fixed_ip = fake_fixed_ip_get_by_address(context, address)
if fixed_ip is None:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
for key in values:
fixed_ip[key] = values[key]
class FakeModel(object):
"""Stubs out for model."""
def __init__(self, values):
self.values = values
def __getattr__(self, name):
return self.values[name]
def __getitem__(self, key):
if key in self.values:
return self.values[key]
else:
raise NotImplementedError()
def __repr__(self):
return '<FakeModel: %s>' % self.values
def fake_network_get_all(context):
network = {'id': 1,
'cidr': "192.168.1.0/24"}
return [FakeModel(network)]
class FixedIpTest(test.TestCase):
def setUp(self):
super(FixedIpTest, self).setUp()
self.stubs.Set(db, "fixed_ip_get_by_address",
fake_fixed_ip_get_by_address)
self.stubs.Set(db, "fixed_ip_get_by_address_detailed",
fake_fixed_ip_get_by_address_detailed)
self.stubs.Set(db, "fixed_ip_update", fake_fixed_ip_update)
self.context = context.get_admin_context()
self.controller = fixed_ips.FixedIPController()
def test_fixed_ips_get(self):
req = fakes.HTTPRequest.blank('/v3/fake/os-fixed-ips/192.168.1.1')
res_dict = self.controller.show(req, '192.168.1.1')
response = {'fixed_ip': {'cidr': '192.168.1.0/24',
'hostname': None,
'host': None,
'address': '192.168.1.1'}}
self.assertEqual(response, res_dict)
def test_fixed_ips_get_bad_ip_fail(self):
req = fakes.HTTPRequest.blank('/v3/fake/os-fixed-ips/10.0.0.1')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req,
'10.0.0.1')
def test_fixed_ips_get_invalid_ip_address(self):
req = fakes.HTTPRequest.blank('/v3/os-fixed-ips/inv.ali.d.ip')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req,
'inv.ali.d.ip')
def test_fixed_ips_get_deleted_ip_fail(self):
req = fakes.HTTPRequest.blank('/v3/fake/os-fixed-ips/10.0.0.2')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show, req,
'10.0.0.2')
def test_fixed_ip_reserve(self):
fake_fixed_ips[0]['reserved'] = False
body = {'reserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/192.168.1.1/action')
result = self.controller.action(req, "192.168.1.1", body)
self.assertEqual('202 Accepted', result.status)
self.assertEqual(fake_fixed_ips[0]['reserved'], True)
def test_fixed_ip_reserve_bad_ip(self):
body = {'reserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/10.0.0.1/action')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.action, req,
'10.0.0.1', body)
def test_fixed_ip_reserve_invalid_ip_address(self):
body = {'reserve': None}
req = fakes.HTTPRequest.blank('/v3/os-fixed-ips/inv.ali.d.ip/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.action, req, 'inv.ali.d.ip', body)
def test_fixed_ip_reserve_deleted_ip(self):
body = {'reserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/10.0.0.2/action')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.action, req,
'10.0.0.2', body)
def test_fixed_ip_unreserve(self):
fake_fixed_ips[0]['reserved'] = True
body = {'unreserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/192.168.1.1/action')
result = self.controller.action(req, "192.168.1.1", body)
self.assertEqual('202 Accepted', result.status)
self.assertEqual(fake_fixed_ips[0]['reserved'], False)
def test_fixed_ip_unreserve_bad_ip(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/10.0.0.1/action')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.action, req,
'10.0.0.1', body)
def test_fixed_ip_unreserve_invalid_ip_address(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank('/v3/os-fixed-ips/inv.ali.d.ip/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.action, req, 'inv.ali.d.ip', body)
def test_fixed_ip_unreserve_deleted_ip(self):
body = {'unreserve': None}
req = fakes.HTTPRequest.blank(
'/v3/fake/os-fixed-ips/10.0.0.2/action')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.action, req,
'10.0.0.2', body)
| apache-2.0 |
bazz-erp/erpnext | erpnext/accounts/doctype/c_form/c_form.py | 49 | 2722 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt
from frappe import _
from frappe.model.document import Document
class CForm(Document):
def validate(self):
"""Validate invoice that c-form is applicable
and no other c-form is received for that"""
for d in self.get('invoices'):
if d.invoice_no:
inv = frappe.db.sql("""select c_form_applicable, c_form_no from
`tabSales Invoice` where name = %s and docstatus = 1""", d.invoice_no)
if inv and inv[0][0] != 'Yes':
frappe.throw(_("C-form is not applicable for Invoice: {0}".format(d.invoice_no)))
elif inv and inv[0][1] and inv[0][1] != self.name:
frappe.throw(_("""Invoice {0} is tagged in another C-form: {1}.
If you want to change C-form no for this invoice,
please remove invoice no from the previous c-form and then try again"""\
.format(d.invoice_no, inv[0][1])))
elif not inv:
frappe.throw(_("Row {0}: Invoice {1} is invalid, it might be cancelled / does not exist. \
Please enter a valid Invoice".format(d.idx, d.invoice_no)))
def on_update(self):
""" Update C-Form No on invoices"""
self.set_total_invoiced_amount()
def on_submit(self):
self.set_cform_in_sales_invoices()
def before_cancel(self):
# remove cform reference
frappe.db.sql("""update `tabSales Invoice` set c_form_no=null where c_form_no=%s""", self.name)
def set_cform_in_sales_invoices(self):
inv = [d.invoice_no for d in self.get('invoices')]
if inv:
frappe.db.sql("""update `tabSales Invoice` set c_form_no=%s, modified=%s where name in (%s)""" %
('%s', '%s', ', '.join(['%s'] * len(inv))), tuple([self.name, self.modified] + inv))
frappe.db.sql("""update `tabSales Invoice` set c_form_no = null, modified = %s
where name not in (%s) and ifnull(c_form_no, '') = %s""" %
('%s', ', '.join(['%s']*len(inv)), '%s'), tuple([self.modified] + inv + [self.name]))
else:
frappe.throw(_("Please enter atleast 1 invoice in the table"))
def set_total_invoiced_amount(self):
total = sum([flt(d.grand_total) for d in self.get('invoices')])
frappe.db.set(self, 'total_invoiced_amount', total)
def get_invoice_details(self, invoice_no):
""" Pull details from invoices for referrence """
if invoice_no:
inv = frappe.db.get_value("Sales Invoice", invoice_no,
["posting_date", "territory", "base_net_total", "base_grand_total"], as_dict=True)
return {
'invoice_date' : inv.posting_date,
'territory' : inv.territory,
'net_total' : inv.base_net_total,
'grand_total' : inv.base_grand_total
}
| gpl-3.0 |
MihaiMoldovanu/ansible | lib/ansible/modules/commands/script.py | 26 | 2754 | # Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: script
version_added: "0.9"
short_description: Runs a local script on a remote node after transferring it
description:
- "The C(script) module takes the script name followed by a list of
space-delimited arguments. "
- "The local script at path will be transferred to the remote node and then executed. "
- "The given script will be processed through the shell environment on the remote node. "
- "This module does not require python on the remote system, much like
the M(raw) module. "
- This module is also supported for Windows targets.
options:
free_form:
description:
- path to the local script file followed by optional arguments. There is no parameter actually named 'free form'; see the examples!
required: true
default: null
aliases: []
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
version_added: "1.5"
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
required: no
default: null
version_added: "1.5"
chdir:
description:
- cd into this directory on the remote node before running the script
version_added: "2.4"
required: false
default: null
notes:
- It is usually preferable to write Ansible modules than pushing scripts. Convert your script to an Ansible module for bonus points!
- The ssh connection plugin will force pseudo-tty allocation via -tt when scripts are executed. pseudo-ttys do not have a stderr channel and all
stderr is sent to stdout. If you depend on separated stdout and stderr result keys, please switch to a copy+command set of tasks instead of using script.
- This module is also supported for Windows targets.
author:
- Ansible Core Team
- Michael DeHaan
extends_documentation_fragment:
- decrypt
"""
EXAMPLES = '''
# Example from Ansible Playbooks
- script: /some/local/script.sh --some-arguments 1234
# Run a script that creates a file, but only if the file is not yet created
- script: /some/local/create_file.sh --some-arguments 1234
args:
creates: /the/created/file.txt
# Run a script that removes a file, but only if the file is not yet removed
- script: /some/local/remove_file.sh --some-arguments 1234
args:
removes: /the/removed/file.txt
'''
| gpl-3.0 |
tanderegg/ansible-modules-core | network/iosxr/iosxr_command.py | 8 | 4918 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = """
---
module: iosxr_command
version_added: "2.1"
author: "Peter sprygada (@privateip)"
short_description: Run arbitrary commands on ios devices.
description:
- Sends arbitrary commands to an IOSXR node and returns the results
read from the device. The M(iosxr_command) module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
extends_documentation_fragment: ios
options:
commands:
description:
- List of commands to send to the remote ios device over the
configured provider. The resulting output from the command
is returned. If the I(waitfor) argument is provided, the
module is not returned until the condition is satisfied or
the number of retires as expired.
required: true
waitfor:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for a each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
required: false
default: null
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
waitfor conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
- iosxr_command:
commands:
- show version
register: output
- iosxr_command:
commands:
- show version
waitfor:
- "result[0] contains 6.0.0"
- iosxr_command:
commands:
- show version
- show interfaces
waitfor:
- "result[2] contains MgmtEth0/0/CPU0/0"
- "result[0] contains 6.0.0"
"""
RETURN = """
stdout:
description: the set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: the conditionals that failed
retured: failed
type: list
sample: ['...', '...']
"""
import time
import shlex
import re
import json
INDEX_RE = re.compile(r'(\[\d+\])')
def to_lines(stdout):
for item in stdout:
if isinstance(item, basestring):
item = str(item).split('\n')
yield item
def main():
spec = dict(
commands=dict(type='list'),
waitfor=dict(type='list'),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
module = get_module(argument_spec=spec,
supports_check_mode=True)
commands = module.params['commands']
retries = module.params['retries']
interval = module.params['interval']
try:
queue = set()
for entry in (module.params['waitfor'] or list()):
queue.add(Conditional(entry))
except AttributeError, exc:
module.fail_json(msg=exc.message)
result = dict(changed=False)
while retries > 0:
response = module.execute(commands)
result['stdout'] = response
for item in list(queue):
if item(response):
queue.remove(item)
if not queue:
break
time.sleep(interval)
retries -= 1
else:
failed_conditions = [item.raw for item in queue]
module.fail_json(msg='timeout waiting for value', failed_conditions=failed_conditions)
result['stdout_lines'] = list(to_lines(result['stdout']))
return module.exit_json(**result)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.shell import *
from ansible.module_utils.netcfg import *
from ansible.module_utils.iosxr import *
if __name__ == '__main__':
main()
| gpl-3.0 |
wdwvt1/qiime | tests/test_parallel/test_map_reads_to_reference.py | 15 | 18488 | #!/usr/bin/env python
# File created on 07 Jul 2012
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2011, The QIIME project"
__credits__ = ["Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Greg Caporaso"
__email__ = "gregcaporaso@gmail.com"
from glob import glob
from shutil import rmtree
from os import close
from os.path import exists, join
from tempfile import mkstemp, mkdtemp
from skbio.util import remove_files
from unittest import TestCase, main
from numpy.testing import assert_almost_equal
from biom import load_table
from qiime.test import initiate_timeout, disable_timeout
from qiime.util import get_qiime_temp_dir
from qiime.parse import parse_otu_map
from qiime.parallel.map_reads_to_reference import (ParallelDatabaseMapperBlat,
ParallelDatabaseMapperUsearch, ParallelDatabaseMapperBwaShort)
class ParallelDatabaseMapperTests(TestCase):
def setUp(self):
""" """
self.files_to_remove = []
self.dirs_to_remove = []
tmp_dir = get_qiime_temp_dir()
self.test_out = mkdtemp(dir=tmp_dir,
prefix='qiime_parallel_tests_',
suffix='')
self.dirs_to_remove.append(self.test_out)
fd, self.refseqs1_fp = mkstemp(dir=self.test_out,
prefix='qiime_refseqs',
suffix='.fasta')
close(fd)
refseqs1_f = open(self.refseqs1_fp, 'w')
refseqs1_f.write(refseqs1)
refseqs1_f.close()
self.files_to_remove.append(self.refseqs1_fp)
fd, self.refseqs2_fp = mkstemp(dir=self.test_out,
prefix='qiime_refseqs',
suffix='.fasta')
close(fd)
refseqs2_f = open(self.refseqs2_fp, 'w')
refseqs2_f.write(refseqs2)
refseqs2_f.close()
self.files_to_remove.append(self.refseqs2_fp)
fd, self.inseqs1_fp = mkstemp(dir=self.test_out,
prefix='qiime_inseqs',
suffix='.fasta')
close(fd)
inseqs1_f = open(self.inseqs1_fp, 'w')
inseqs1_f.write(inseqs1)
inseqs1_f.close()
self.files_to_remove.append(self.inseqs1_fp)
fd, self.inseqs2_fp = mkstemp(dir=self.test_out,
prefix='qiime_inseqs',
suffix='.fasta')
close(fd)
inseqs2_f = open(self.inseqs2_fp, 'w')
inseqs2_f.write(inseqs2)
inseqs2_f.close()
self.files_to_remove.append(self.inseqs2_fp)
initiate_timeout(60)
def tearDown(self):
""" """
disable_timeout()
remove_files(self.files_to_remove)
# remove directories last, so we don't get errors
# trying to remove files which may be in the directories
for d in self.dirs_to_remove:
if exists(d):
rmtree(d)
class ParallelDatabaseMapperUsearchTests(ParallelDatabaseMapperTests):
def test_parallel_database_mapper_usearch(self):
""" parallel_database_mapper_usearch functions as expected """
params = {'refseqs_fp': self.refseqs1_fp,
'min_percent_id': 0.97,
'evalue': 1e-10,
'max_accepts': 1,
'max_rejects': 32,
'queryalnfract': 0.35,
'targetalnfract': 0.0,
'observation_metadata_fp': None
}
app = ParallelDatabaseMapperUsearch()
r = app(self.inseqs1_fp,
self.test_out,
params,
job_prefix='PTEST',
poll_directly=True,
suppress_submit_jobs=False)
observation_map_fp = glob(
join(self.test_out, 'observation_map.txt'))[0]
omap = parse_otu_map(open(observation_map_fp, 'U'))
self.assertEqual(len(omap[0]), 3)
self.assertItemsEqual(
omap[1],
['eco:b0015',
'eco:b0122',
'eco:b0015:duplicate'])
self.assertItemsEqual(omap[2], ['eco:b0015-pr', 'eco:b0122-pr'])
class ParallelDatabaseMapperBlatTests(ParallelDatabaseMapperTests):
def test_parallel_database_mapper_blat(self):
""" parallel_database_mapper_blat functions as expected """
params = {'refseqs_fp': self.refseqs1_fp,
'min_percent_id': 0.97,
'evalue': 1e-10,
'max_accepts': 1,
'max_rejects': 32,
'queryalnfract': 0.35,
'targetalnfract': 0.0,
'observation_metadata_fp': None
}
app = ParallelDatabaseMapperBlat()
r = app(self.inseqs1_fp,
self.test_out,
params,
job_prefix='PTEST',
poll_directly=True,
suppress_submit_jobs=False)
observation_map_fp = glob(
join(self.test_out, 'observation_map.txt'))[0]
omap = parse_otu_map(open(observation_map_fp, 'U'))
self.assertEqual(len(omap[0]), 3)
self.assertItemsEqual(
omap[1],
['eco:b0015',
'eco:b0122',
'eco:b0015:duplicate'])
self.assertItemsEqual(omap[2], ['eco:b0015-pr', 'eco:b0122-pr'])
class ParallelDatabaseMapperBwaShortTests(ParallelDatabaseMapperTests):
def test_bwa_short_database_mapper(self):
"""bwa_short_database_mapper functions as expected """
params = {'refseqs_fp': self.refseqs2_fp,
'max_diff': None,
'observation_metadata_fp': None}
app = ParallelDatabaseMapperBwaShort()
r = app(self.inseqs2_fp,
self.test_out,
params,
poll_directly=True,
suppress_submit_jobs=False)
observation_map_fp = join(self.test_out, 'observation_map.txt')
self.assertTrue(exists(observation_map_fp))
observation_table_fp = join(self.test_out, 'observation_table.biom')
table = load_table(observation_table_fp)
self.assertItemsEqual(table.ids(), ['s2', 's1'])
self.assertItemsEqual(
table.ids(axis='observation'),
['r1',
'r2',
'r3',
'r4',
'r5'])
self.assertEqual(table.sum(), 6)
def test_bwa_short_database_mapper_alt_params(self):
"""bwa_short_database_mapper functions as expected """
params = {'refseqs_fp': self.refseqs2_fp,
'max_diff': 1,
'observation_metadata_fp': None}
app = ParallelDatabaseMapperBwaShort()
r = app(self.inseqs2_fp,
self.test_out,
params,
poll_directly=True,
suppress_submit_jobs=False)
observation_map_fp = join(self.test_out, 'observation_map.txt')
self.assertTrue(exists(observation_map_fp))
observation_table_fp = join(self.test_out, 'observation_table.biom')
table = load_table(observation_table_fp)
self.assertItemsEqual(table.ids(), ['s2', 's1'])
self.assertItemsEqual(table.ids(axis='observation'),
['r2', 'r3', 'r4', 'r5'])
self.assertEqual(table.sum(), 5)
refseqs1 = """>eco:b0001-pr
MKRISTTITTTITITTGNGAG
>eco:b0015-pr dnaJ
MAKQDYYEILGVSKTAEEREIRKAYKRLAMKYHPDRNQGDKEAEAKFKEIKEAYEVLTDS
QKRAAYDQYGHAAFEQGGMGGGGFGGGADFSDIFGDVFGDIFGGGRGRQRAARGADLRYN
MELTLEEAVRGVTKEIRIPTLEECDVCHGSGAKPGTQPQTCPTCHGSGQVQMRQGFFAVQ
QTCPHCQGRGTLIKDPCNKCHGHGRVERSKTLSVKIPAGVDTGDRIRLAGEGEAGEHGAP
AGDLYVQVQVKQHPIFEREGNNLYCEVPINFAMAALGGEIEVPTLDGRVKLKVPGETQTG
KLFRMRGKGVKSVRGGAQGDLLCRVVVETPVGLNERQKQLLQELQESFGGPTGEHNSPRS
KSFFDGVKKFFDDLTR
>eco:b0122-pr
MKTFFRTVLFGSLMAVCANSYALSESEAEDMADLTAVFVFLKNDCGYQNLPNGQIRRALV
FFAQQNQWDLSNYDTFDMKALGEDSYRDLSGIGIPVAKKCKALARDSLSLLAYVK
"""
refseqs2 = """>r1
atgaaacgcattagcaccaccattaccaccaccatcaccattaccacaggtaacggtgcg
ggctga
>r2 some comments...
atggctaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgagaa
>r3
atgaagacgtttttcagaacagtgttattcggcagcctgatggccgtctgcgcaaacagt
tacgcgctcagcgagtctgaagccgaagatatggccgatttaacggcagtttttgtcttt
ctgaagaacgattgtggttaccagaacttacctaacgggcaaattcgtcgcgcactggtc
tttttcgctcagcaaaaccagtgggacctcagtaattacgacaccttcgacatgaaagcc
ctcggtgaagacagctaccgcgatctcagcggcattggcattcccgtcgctaaaaaatgc
aaagccctggcccgcgattccttaagcctgcttgcctacgtcaaataa
>r4
atgaagaaaattttcagaacagtgttattcggcagcctgatggccgtctgcgcaaacagt
tacgcgctcagcgagtctgaagccgaagatatggccgatttaacggcagtttttgtcttt
ctgaagaacgattgtggttaccagaacttacctaacgggcaaattcgtcgcgcactggtc
tttttcgctcagcaaaaccagtgggacctcagtaattacgacaccttcgacatgaaagcc
ctcggtgaagacagctaccgcgatctcagcggcattggcattcccgtcgctaaaaaatgc
aaagccctggcccgcgattccttaagcctgcttgcctacgtcaaatcc
>r5 some comments...
aatgactaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgctaa
"""
inseqs1 = """>eco:b0001 thrL; thr operon leader peptide; K08278 thr operon leader peptide (N)
atgaaacgcattagcaccaccattaccaccaccatcaccattaccacaggtaacggtgcg
ggctga
>eco:b0015 dnaJ; chaperone Hsp40, co-chaperone with DnaK; K03686 molecular chaperone DnaJ (N)
atggctaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgctaa
>eco:b0122
atgaagacgtttttcagaacagtgttattcggcagcctgatggccgtctgcgcaaacagt
tacgcgctcagcgagtctgaagccgaagatatggccgatttaacggcagtttttgtcttt
ctgaagaacgattgtggttaccagaacttacctaacgggcaaattcgtcgcgcactggtc
tttttcgctcagcaaaaccagtgggacctcagtaattacgacaccttcgacatgaaagcc
ctcggtgaagacagctaccgcgatctcagcggcattggcattcccgtcgctaaaaaatgc
aaagccctggcccgcgattccttaagcctgcttgcctacgtcaaataa
>eco:b0015:duplicate
atggctaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgctaa
"""
inseqs2 = """>s1_1
atgttacgcattagcaccaccattaccaccaccatcaccattaccacaggtaacggtgcg
ggctga
>s2_2 some comments...
atggctaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgagaa
>s1_3
atgaagacgtttttcagaacagtgttattcggcagcctgatggccgtctgcgcaaacagt
tacgcgctcagcgagtctgaagccgaagatatggccgatttaacggcagtttttgtcttt
ctgaagaacgattgtggttaccagaacttacctaacgggcaaattcgtcgcgcactggtc
tttttcgctcagcaaaaccagtgggacctcagtaattacgacaccttcgacatgaaagcc
ctcggtgaagacagctaccgcgatctcagcggcattggcattcccgtcgctaaaaaatgc
aaagccctggcccgcgattccttaagcctgcttgcctacgtcaaataa
>s1_4
atgaagaaaattttcagaacagtgttattcggcagcctgatggccgtctgcgcaaacagt
tacgcgctcagcgagtctgaagccgaagatatggccgatttaacggcagtttttgtcttt
ctgaagaacgattgtggttaccagaacttacctaacgggcaaattcgtcgcgcactggtc
tttttcgctcagcaaaaccagtgggacctcagtaattacgacaccttcgacatgaaagcc
ctcggtgaagacagctaccgcgatctcagcggcattggcattcccgtcgctaaaaaatgc
aaagccctggcccgcgattccttaagcctgcttgcctacgtcaaatcc
>s1_5
atggctaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgctaa
>s1_6 some comments...
aatgactaagcaagattattacgagattttaggcgtttccaaaacagcggaagagcgtgaa
atcagaaaggcctacaaacgcctggccatgaaataccacccggaccgtaaccagggtgac
aaagaggccgaggcgaaatttaaagagatcaaggaagcttatgaagttctgaccgactcg
caaaaacgtgcggcatacgatcagtatggtcatgctgcgtttgagcaaggtggcatgggc
ggcggcggttttggcggcggcgcagacttcagcgatatttttggtgacgttttcggcgat
atttttggcggcggacgtggtcgtcaacgtgcggcgcgcggtgctgatttacgctataac
atggagctcaccctcgaagaagctgtacgtggcgtgaccaaagagatccgcattccgact
ctggaagagtgtgacgtttgccacggtagcggtgcaaaaccaggtacacagccgcagact
tgtccgacctgtcatggttctggtcaggtgcagatgcgccagggattcttcgctgtacag
cagacctgtccacactgtcagggccgcggtacgctgatcaaagatccgtgcaacaaatgt
catggtcatggtcgtgttgagcgcagcaaaacgctgtccgttaaaatcccggcaggggtg
gacactggagaccgcatccgtcttgcgggcgaaggtgaagcgggcgagcatggcgcaccg
gcaggcgatctgtacgttcaggttcaggttaaacagcacccgattttcgagcgtgaaggc
aacaacctgtattgcgaagtcccgatcaacttcgctatggcggcgctgggtggcgaaatc
gaagtaccgacccttgatggtcgcgtcaaactgaaagtgcctggcgaaacccagaccggt
aagctattccgtatgcgcggtaaaggcgtcaagtctgtccgcggtggcgcacagggtgat
ttgctgtgccgcgttgtcgtcgaaacaccggtaggcctgaacgaaaggcagaaacagctg
ctgcaagagctgcaagaaagcttcggtggcccaaccggcgagcacaacagcccgcgctca
aagagcttctttgatggtgtgaagaagttttttgacgacctgacccgctaa
"""
if __name__ == "__main__":
main()
| gpl-2.0 |
xundaokeji/three.js | utils/exporters/blender/addons/io_three/exporter/api/material.py | 26 | 8268 | from bpy import data, types
from .. import constants, logger
from .constants import MULTIPLY, WIRE, IMAGE
def _material(func):
"""
:param func:
"""
def inner(name, *args, **kwargs):
"""
:param name:
:param *args:
:param **kwargs:
"""
material = None
if isinstance(name, types.Material):
material = name
elif name:
material = data.materials[name]
return func(material, *args, **kwargs) if material else None
return inner
@_material
def blending(material):
"""
:param material:
:return: THREE_blending_type value
"""
logger.debug("material.blending(%s)", material)
try:
blend = material.THREE_blending_type
except AttributeError:
logger.debug("No THREE_blending_type attribute found")
blend = constants.NORMAL_BLENDING
return blend
@_material
def bump_map(material):
"""
:param material:
:return: texture node for bump
"""
logger.debug("material.bump_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal and not \
texture.texture.use_normal_map:
return texture.texture
@_material
def bump_scale(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.bump_scale(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal:
return texture.normal_factor
@_material
def depth_test(material):
"""
:param material:
:return: THREE_depth_test value
:rtype: bool
"""
logger.debug("material.depth_test(%s)", material)
try:
test = material.THREE_depth_test
except AttributeError:
logger.debug("No THREE_depth_test attribute found")
test = True
return test
@_material
def depth_write(material):
"""
:param material:
:return: THREE_depth_write value
:rtype: bool
"""
logger.debug("material.depth_write(%s)", material)
try:
write = material.THREE_depth_write
except AttributeError:
logger.debug("No THREE_depth_write attribute found")
write = True
return write
@_material
def double_sided(material):
"""
:param material:
:return: THREE_double_sided value
:rtype: bool
"""
logger.debug("material.double_sided(%s)", material)
try:
write = material.THREE_double_sided
except AttributeError:
logger.debug("No THREE_double_sided attribute found")
write = False
return write
@_material
def diffuse_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.diffuse_color(%s)", material)
return (material.diffuse_intensity * material.diffuse_color[0],
material.diffuse_intensity * material.diffuse_color[1],
material.diffuse_intensity * material.diffuse_color[2])
@_material
def diffuse_map(material):
"""
:param material:
:return: texture node for map
"""
logger.debug("material.diffuse_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_color_diffuse and not \
texture.blend_type == MULTIPLY:
return texture.texture
@_material
def emissive_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.emissive_color(%s)", material)
diffuse = diffuse_color(material)
return (material.emit * diffuse[0],
material.emit * diffuse[1],
material.emit * diffuse[2])
@_material
def light_map(material):
"""
:param material:
:return: texture node for light maps
"""
logger.debug("material.light_map(%s)", material)
for texture in _valid_textures(material, strict_use=False):
if texture.use_map_color_diffuse and \
texture.blend_type == MULTIPLY:
return texture.texture
@_material
def normal_scale(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.normal_scale(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal:
return (texture.normal_factor, texture.normal_factor)
@_material
def normal_map(material):
"""
:param material:
:return: texture node for normals
"""
logger.debug("material.normal_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_normal and \
texture.texture.use_normal_map:
return texture.texture
@_material
def opacity(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.opacity(%s)", material)
return round(material.alpha, 2)
@_material
def shading(material):
"""
:param material:
:return: shading type (phong or lambert)
"""
logger.debug("material.shading(%s)", material)
dispatch = {
True: constants.PHONG,
False: constants.LAMBERT
}
if material.use_shadeless:
return constants.BASIC
return dispatch[material.specular_intensity > 0.0]
@_material
def specular_coef(material):
"""
:param material:
:rtype: float
"""
logger.debug("material.specular_coef(%s)", material)
return material.specular_hardness
@_material
def specular_color(material):
"""
:param material:
:return: rgb value
:rtype: tuple
"""
logger.debug("material.specular_color(%s)", material)
return (material.specular_intensity * material.specular_color[0],
material.specular_intensity * material.specular_color[1],
material.specular_intensity * material.specular_color[2])
@_material
def specular_map(material):
"""
:param material:
:return: texture node for specular
"""
logger.debug("material.specular_map(%s)", material)
for texture in _valid_textures(material):
if texture.use_map_specular:
return texture.texture
@_material
def transparent(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.transparent(%s)", material)
return material.use_transparency
@_material
def type(material):
"""
:param material:
:return: THREE compatible shader type
"""
logger.debug("material.type(%s)", material)
if material.diffuse_shader != 'LAMBERT':
material_type = constants.BASIC
elif material.specular_intensity > 0:
material_type = constants.PHONG
else:
material_type = constants.LAMBERT
return material_type
@_material
def use_vertex_colors(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.use_vertex_colors(%s)", material)
return material.use_vertex_color_paint
def used_materials():
"""
:return: list of materials that are in use
:rtype: generator
"""
logger.debug("material.used_materials()")
for material in data.materials:
if material.users > 0:
yield material.name
@_material
def visible(material):
"""
:param material:
:return: THREE_visible value
:rtype: bool
"""
logger.debug("material.visible(%s)", material)
try:
vis = material.THREE_visible
except AttributeError:
logger.debug("No THREE_visible attribute found")
vis = True
return vis
@_material
def wireframe(material):
"""
:param material:
:rtype: bool
"""
logger.debug("material.wireframe(%s)", material)
return material.type == WIRE
def _valid_textures(material, strict_use=True):
"""
:param material:
:rtype: generator
"""
for texture in material.texture_slots:
if not texture:
continue
if strict_use:
in_use = texture.use
else:
in_use = True
if not in_use:
continue
if not texture.texture or texture.texture.type != IMAGE:
logger.warning("Unable to export non-image texture %s", texture)
continue
logger.debug("Valid texture found %s", texture)
yield texture
| mit |
tsunammis/software-craftsmanship | development/python/python3-by-learnxinyminutes.py | 2 | 21442 | """
Created by learnxinyminutes
http://learnxinyminutes.com/docs/python3/
"""
# Single line comments start with a number symbol.
""" Multiline strings can be written
using three "s, and are often used
as comments
"""
####################################################
## 1. Primitive Datatypes and Operators
####################################################
# You have numbers
3 # => 3
# Math is what you would expect
1 + 1 # => 2
8 - 1 # => 7
10 * 2 # => 20
# Except division which returns floats, real numbers, by default
35 / 5 # => 7.0
# Result of integer division truncated down both for positive and negative.
5 // 3 # => 1
5.0 // 3.0 # => 1.0 # works on floats too
-5 // 3 # => -2
-5.0 // 3.0 # => -2.0
# When you use a float, results are floats
3 * 2.0 # => 6.0
# Modulo operation
7 % 3 # => 1
# Exponentiation (x**y, x to the yth power)
2**4 # => 16
# Enforce precedence with parentheses
(1 + 3) * 2 # => 8
# Boolean values are primitives (Note: the capitalization)
True
False
# negate with not
not True # => False
not False # => True
# Boolean Operators
# Note "and" and "or" are case-sensitive
True and False # => False
False or True # => True
# Note using Bool operators with ints
0 and 2 # => 0
-5 or 0 # => -5
0 == False # => True
2 == True # => False
1 == True # => True
# Equality is ==
1 == 1 # => True
2 == 1 # => False
# Inequality is !=
1 != 1 # => False
2 != 1 # => True
# More comparisons
1 < 10 # => True
1 > 10 # => False
2 <= 2 # => True
2 >= 2 # => True
# Comparisons can be chained!
1 < 2 < 3 # => True
2 < 3 < 2 # => False
# (is vs. ==) is checks if two variable refer to the same object, but == checks
# if the objects pointed to have the same values.
a = [1, 2, 3, 4] # Point a at a new list, [1, 2, 3, 4]
b = a # Point b at what a is pointing to
b is a # => True, a and b refer to the same object
b == a # => True, a's and b's objects are equal
b = [1, 2, 3, 4] # Point a at a new list, [1, 2, 3, 4]
b is a # => False, a and b do not refer to the same object
b == a # => True, a's and b's objects are equal
# Strings are created with " or '
"This is a string."
'This is also a string.'
# Strings can be added too! But try not to do this.
"Hello " + "world!" # => "Hello world!"
# Strings can be added without using '+'
"Hello " "world!" # => "Hello world!"
# A string can be treated like a list of characters
"This is a string"[0] # => 'T'
# .format can be used to format strings, like this:
"{} can be {}".format("Strings", "interpolated") # => "Strings can be interpolated"
# You can repeat the formatting arguments to save some typing.
"{0} be nimble, {0} be quick, {0} jump over the {1}".format("Jack", "candle stick")
# => "Jack be nimble, Jack be quick, Jack jump over the candle stick"
# You can use keywords if you don't want to count.
"{name} wants to eat {food}".format(name="Bob", food="lasagna") # => "Bob wants to eat lasagna"
# If your Python 3 code also needs to run on Python 2.5 and below, you can also
# still use the old style of formatting:
"%s can be %s the %s way" % ("Strings", "interpolated", "old") # => "Strings can be interpolated the old way"
# None is an object
None # => None
# Don't use the equality "==" symbol to compare objects to None
# Use "is" instead. This checks for equality of object identity.
"etc" is None # => False
None is None # => True
# None, 0, and empty strings/lists/dicts all evaluate to False.
# All other values are True
bool(0) # => False
bool("") # => False
bool([]) # => False
bool({}) # => False
####################################################
## 2. Variables and Collections
####################################################
# Python has a print function
print("I'm Python. Nice to meet you!") # => I'm Python. Nice to meet you!
# By default the print function also prints out a newline at the end.
# Use the optional argument end to change the end character.
print("Hello, World", end="!") # => Hello, World!
# Simple way to get input data from console
input_string_var = input("Enter some data: ") # Returns the data as a string
# Note: In earlier versions of Python, input() method was named as raw_input()
# No need to declare variables before assigning to them.
# Convention is to use lower_case_with_underscores
some_var = 5
some_var # => 5
# Accessing a previously unassigned variable is an exception.
# See Control Flow to learn more about exception handling.
some_unknown_var # Raises a NameError
# Lists store sequences
li = []
# You can start with a prefilled list
other_li = [4, 5, 6]
# Add stuff to the end of a list with append
li.append(1) # li is now [1]
li.append(2) # li is now [1, 2]
li.append(4) # li is now [1, 2, 4]
li.append(3) # li is now [1, 2, 4, 3]
# Remove from the end with pop
li.pop() # => 3 and li is now [1, 2, 4]
# Let's put it back
li.append(3) # li is now [1, 2, 4, 3] again.
# Access a list like you would any array
li[0] # => 1
# Look at the last element
li[-1] # => 3
# Looking out of bounds is an IndexError
li[4] # Raises an IndexError
# You can look at ranges with slice syntax.
# (It's a closed/open range for you mathy types.)
li[1:3] # => [2, 4]
# Omit the beginning
li[2:] # => [4, 3]
# Omit the end
li[:3] # => [1, 2, 4]
# Select every second entry
li[::2] # =>[1, 4]
# Return a reversed copy of the list
li[::-1] # => [3, 4, 2, 1]
# Use any combination of these to make advanced slices
# li[start:end:step]
# Make a one layer deep copy using slices
li2 = li[:] # => li2 = [1, 2, 4, 3] but (li2 is li) will result in false.
# Remove arbitrary elements from a list with "del"
del li[2] # li is now [1, 2, 3]
# Remove first occurrence of a value
li.remove(2) # li is now [1, 3]
li.remove(2) # Raises a ValueError as 2 is not in the list
# Insert an element at a specific index
li.insert(1, 2) # li is now [1, 2, 3] again
# Get the index of the first item found
li.index(2) # => 3
li.index(4) # Raises a ValueError as 4 is not in the list
# You can add lists
# Note: values for li and for other_li are not modified.
li + other_li # => [1, 2, 3, 4, 5, 6]
# Concatenate lists with "extend()"
li.extend(other_li) # Now li is [1, 2, 3, 4, 5, 6]
# Check for existence in a list with "in"
1 in li # => True
# Examine the length with "len()"
len(li) # => 6
# Tuples are like lists but are immutable.
tup = (1, 2, 3)
tup[0] # => 1
tup[0] = 3 # Raises a TypeError
# Note that a tuple of length one has to have a comma after the last element but
# tuples of other lengths, even zero, do not.
type((1)) # => <class 'int'>
type((1,)) # => <class 'tuple'>
type(()) # => <class 'tuple'>
# You can do most of the list operations on tuples too
len(tup) # => 3
tup + (4, 5, 6) # => (1, 2, 3, 4, 5, 6)
tup[:2] # => (1, 2)
2 in tup # => True
# You can unpack tuples (or lists) into variables
a, b, c = (1, 2, 3) # a is now 1, b is now 2 and c is now 3
# You can also do extended unpacking
a, *b, c = (1, 2, 3, 4) # a is now 1, b is now [2, 3] and c is now 4
# Tuples are created by default if you leave out the parentheses
d, e, f = 4, 5, 6
# Now look how easy it is to swap two values
e, d = d, e # d is now 5 and e is now 4
# Dictionaries store mappings
empty_dict = {}
# Here is a prefilled dictionary
filled_dict = {"one": 1, "two": 2, "three": 3}
# Note keys for dictionaries have to be immutable types. This is to ensure that
# the key can be converted to a constant hash value for quick look-ups.
# Immutable types include ints, floats, strings, tuples.
invalid_dict = {[1,2,3]: "123"} # => Raises a TypeError: unhashable type: 'list'
valid_dict = {(1,2,3):[1,2,3]} # Values can be of any type, however.
# Look up values with []
filled_dict["one"] # => 1
# Get all keys as an iterable with "keys()". We need to wrap the call in list()
# to turn it into a list. We'll talk about those later. Note - Dictionary key
# ordering is not guaranteed. Your results might not match this exactly.
list(filled_dict.keys()) # => ["three", "two", "one"]
# Get all values as an iterable with "values()". Once again we need to wrap it
# in list() to get it out of the iterable. Note - Same as above regarding key
# ordering.
list(filled_dict.values()) # => [3, 2, 1]
# Check for existence of keys in a dictionary with "in"
"one" in filled_dict # => True
1 in filled_dict # => False
# Looking up a non-existing key is a KeyError
filled_dict["four"] # KeyError
# Use "get()" method to avoid the KeyError
filled_dict.get("one") # => 1
filled_dict.get("four") # => None
# The get method supports a default argument when the value is missing
filled_dict.get("one", 4) # => 1
filled_dict.get("four", 4) # => 4
# "setdefault()" inserts into a dictionary only if the given key isn't present
filled_dict.setdefault("five", 5) # filled_dict["five"] is set to 5
filled_dict.setdefault("five", 6) # filled_dict["five"] is still 5
# Adding to a dictionary
filled_dict.update({"four":4}) # => {"one": 1, "two": 2, "three": 3, "four": 4}
#filled_dict["four"] = 4 #another way to add to dict
# Remove keys from a dictionary with del
del filled_dict["one"] # Removes the key "one" from filled dict
# From Python 3.5 you can also use the additional unpacking options
{'a': 1, **{'b': 2}} # => {'a': 1, 'b': 2}
{'a': 1, **{'a': 2}} # => {'a': 2}
# Sets store ... well sets
empty_set = set()
# Initialize a set with a bunch of values. Yeah, it looks a bit like a dict. Sorry.
some_set = {1, 1, 2, 2, 3, 4} # some_set is now {1, 2, 3, 4}
# Similar to keys of a dictionary, elements of a set have to be immutable.
invalid_set = {[1], 1} # => Raises a TypeError: unhashable type: 'list'
valid_set = {(1,), 1}
# Can set new variables to a set
filled_set = some_set
# Add one more item to the set
filled_set.add(5) # filled_set is now {1, 2, 3, 4, 5}
# Do set intersection with &
other_set = {3, 4, 5, 6}
filled_set & other_set # => {3, 4, 5}
# Do set union with |
filled_set | other_set # => {1, 2, 3, 4, 5, 6}
# Do set difference with -
{1, 2, 3, 4} - {2, 3, 5} # => {1, 4}
# Do set symmetric difference with ^
{1, 2, 3, 4} ^ {2, 3, 5} # => {1, 4, 5}
# Check if set on the left is a superset of set on the right
{1, 2} >= {1, 2, 3} # => False
# Check if set on the left is a subset of set on the right
{1, 2} <= {1, 2, 3} # => True
# Check for existence in a set with in
2 in filled_set # => True
10 in filled_set # => False
####################################################
## 3. Control Flow and Iterables
####################################################
# Let's just make a variable
some_var = 5
# Here is an if statement. Indentation is significant in python!
# prints "some_var is smaller than 10"
if some_var > 10:
print("some_var is totally bigger than 10.")
elif some_var < 10: # This elif clause is optional.
print("some_var is smaller than 10.")
else: # This is optional too.
print("some_var is indeed 10.")
"""
For loops iterate over lists
prints:
dog is a mammal
cat is a mammal
mouse is a mammal
"""
for animal in ["dog", "cat", "mouse"]:
# You can use format() to interpolate formatted strings
print("{} is a mammal".format(animal))
"""
"range(number)" returns an iterable of numbers
from zero to the given number
prints:
0
1
2
3
"""
for i in range(4):
print(i)
"""
"range(lower, upper)" returns an iterable of numbers
from the lower number to the upper number
prints:
4
5
6
7
"""
for i in range(4, 8):
print(i)
"""
"range(lower, upper, step)" returns an iterable of numbers
from the lower number to the upper number, while incrementing
by step. If step is not indicated, the default value is 1.
prints:
4
6
8
"""
for i in range(4, 8, 2):
print(i)
"""
While loops go until a condition is no longer met.
prints:
0
1
2
3
"""
x = 0
while x < 4:
print(x)
x += 1 # Shorthand for x = x + 1
# Handle exceptions with a try/except block
try:
# Use "raise" to raise an error
raise IndexError("This is an index error")
except IndexError as e:
pass # Pass is just a no-op. Usually you would do recovery here.
except (TypeError, NameError):
pass # Multiple exceptions can be handled together, if required.
else: # Optional clause to the try/except block. Must follow all except blocks
print("All good!") # Runs only if the code in try raises no exceptions
finally: # Execute under all circumstances
print("We can clean up resources here")
# Instead of try/finally to cleanup resources you can use a with statement
with open("myfile.txt") as f:
for line in f:
print(line)
# Python offers a fundamental abstraction called the Iterable.
# An iterable is an object that can be treated as a sequence.
# The object returned the range function, is an iterable.
filled_dict = {"one": 1, "two": 2, "three": 3}
our_iterable = filled_dict.keys()
print(our_iterable) # => dict_keys(['one', 'two', 'three']). This is an object that implements our Iterable interface.
# We can loop over it.
for i in our_iterable:
print(i) # Prints one, two, three
# However we cannot address elements by index.
our_iterable[1] # Raises a TypeError
# An iterable is an object that knows how to create an iterator.
our_iterator = iter(our_iterable)
# Our iterator is an object that can remember the state as we traverse through it.
# We get the next object with "next()".
next(our_iterator) # => "one"
# It maintains state as we iterate.
next(our_iterator) # => "two"
next(our_iterator) # => "three"
# After the iterator has returned all of its data, it gives you a StopIterator Exception
next(our_iterator) # Raises StopIteration
# You can grab all the elements of an iterator by calling list() on it.
list(filled_dict.keys()) # => Returns ["one", "two", "three"]
####################################################
## 4. Functions
####################################################
# Use "def" to create new functions
def add(x, y):
print("x is {} and y is {}".format(x, y))
return x + y # Return values with a return statement
# Calling functions with parameters
add(5, 6) # => prints out "x is 5 and y is 6" and returns 11
# Another way to call functions is with keyword arguments
add(y=6, x=5) # Keyword arguments can arrive in any order.
# You can define functions that take a variable number of
# positional arguments
def varargs(*args):
return args
varargs(1, 2, 3) # => (1, 2, 3)
# You can define functions that take a variable number of
# keyword arguments, as well
def keyword_args(**kwargs):
return kwargs
# Let's call it to see what happens
keyword_args(big="foot", loch="ness") # => {"big": "foot", "loch": "ness"}
# You can do both at once, if you like
def all_the_args(*args, **kwargs):
print(args)
print(kwargs)
"""
all_the_args(1, 2, a=3, b=4) prints:
(1, 2)
{"a": 3, "b": 4}
"""
# When calling functions, you can do the opposite of args/kwargs!
# Use * to expand tuples and use ** to expand kwargs.
args = (1, 2, 3, 4)
kwargs = {"a": 3, "b": 4}
all_the_args(*args) # equivalent to foo(1, 2, 3, 4)
all_the_args(**kwargs) # equivalent to foo(a=3, b=4)
all_the_args(*args, **kwargs) # equivalent to foo(1, 2, 3, 4, a=3, b=4)
# Returning multiple values (with tuple assignments)
def swap(x, y):
return y, x # Return multiple values as a tuple without the parenthesis.
# (Note: parenthesis have been excluded but can be included)
x = 1
y = 2
x, y = swap(x, y) # => x = 2, y = 1
# (x, y) = swap(x,y) # Again parenthesis have been excluded but can be included.
# Function Scope
x = 5
def set_x(num):
# Local var x not the same as global variable x
x = num # => 43
print (x) # => 43
def set_global_x(num):
global x
print (x) # => 5
x = num # global var x is now set to 6
print (x) # => 6
set_x(43)
set_global_x(6)
# Python has first class functions
def create_adder(x):
def adder(y):
return x + y
return adder
add_10 = create_adder(10)
add_10(3) # => 13
# There are also anonymous functions
(lambda x: x > 2)(3) # => True
(lambda x, y: x ** 2 + y ** 2)(2, 1) # => 5
# TODO - Fix for iterables
# There are built-in higher order functions
map(add_10, [1, 2, 3]) # => [11, 12, 13]
map(max, [1, 2, 3], [4, 2, 1]) # => [4, 2, 3]
filter(lambda x: x > 5, [3, 4, 5, 6, 7]) # => [6, 7]
# We can use list comprehensions for nice maps and filters
# List comprehension stores the output as a list which can itself be a nested list
[add_10(i) for i in [1, 2, 3]] # => [11, 12, 13]
[x for x in [3, 4, 5, 6, 7] if x > 5] # => [6, 7]
####################################################
## 5. Classes
####################################################
# We use the "class" operator to get a class
class Human:
# A class attribute. It is shared by all instances of this class
species = "H. sapiens"
# Basic initializer, this is called when this class is instantiated.
# Note that the double leading and trailing underscores denote objects
# or attributes that are used by python but that live in user-controlled
# namespaces. Methods(or objects or attributes) like: __init__, __str__,
# __repr__ etc. are called magic methods (or sometimes called dunder methods)
# You should not invent such names on your own.
def __init__(self, name):
# Assign the argument to the instance's name attribute
self.name = name
# Initialize property
self.age = 0
# An instance method. All methods take "self" as the first argument
def say(self, msg):
return "{name}: {message}".format(name=self.name, message=msg)
# A class method is shared among all instances
# They are called with the calling class as the first argument
@classmethod
def get_species(cls):
return cls.species
# A static method is called without a class or instance reference
@staticmethod
def grunt():
return "*grunt*"
# A property is just like a getter.
# It turns the method age() into an read-only attribute
# of the same name.
@property
def age(self):
return self._age
# This allows the property to be set
@age.setter
def age(self, age):
self._age = age
# This allows the property to be deleted
@age.deleter
def age(self):
del self._age
# Instantiate a class
i = Human(name="Ian")
print(i.say("hi")) # prints out "Ian: hi"
j = Human("Joel")
print(j.say("hello")) # prints out "Joel: hello"
# Call our class method
i.get_species() # => "H. sapiens"
# Change the shared attribute
Human.species = "H. neanderthalensis"
i.get_species() # => "H. neanderthalensis"
j.get_species() # => "H. neanderthalensis"
# Call the static method
Human.grunt() # => "*grunt*"
# Update the property
i.age = 42
# Get the property
i.age # => 42
# Delete the property
del i.age
i.age # => raises an AttributeError
####################################################
## 6. Modules
####################################################
# You can import modules
import math
print(math.sqrt(16)) # => 4.0
# You can get specific functions from a module
from math import ceil, floor
print(ceil(3.7)) # => 4.0
print(floor(3.7)) # => 3.0
# You can import all functions from a module.
# Warning: this is not recommended
from math import *
# You can shorten module names
import math as m
math.sqrt(16) == m.sqrt(16) # => True
# Python modules are just ordinary python files. You
# can write your own, and import them. The name of the
# module is the same as the name of the file.
# You can find out which functions and attributes
# defines a module.
import math
dir(math)
####################################################
## 7. Advanced
####################################################
# Generators help you make lazy code
def double_numbers(iterable):
for i in iterable:
yield i + i
# A generator creates values on the fly.
# Instead of generating and returning all values at once it creates one in each
# iteration. This means values bigger than 15 wont be processed in
# double_numbers.
# We use a trailing underscore in variable names when we want to use a name that
# would normally collide with a python keyword
range_ = range(1, 900000000)
# will double all numbers until a result >=30 found
for i in double_numbers(range_):
print(i)
if i >= 30:
break
# Decorators
# in this example beg wraps say
# Beg will call say. If say_please is True then it will change the returned
# message
from functools import wraps
def beg(target_function):
@wraps(target_function)
def wrapper(*args, **kwargs):
msg, say_please = target_function(*args, **kwargs)
if say_please:
return "{} {}".format(msg, "Please! I am poor :(")
return msg
return wrapper
@beg
def say(say_please=False):
msg = "Can you buy me a beer?"
return msg, say_please
print(say()) # Can you buy me a beer?
print(say(say_please=True)) # Can you buy me a beer? Please! I am poor :(
| mit |
NaturalGIS/QGIS | python/plugins/processing/algs/grass7/ext/r_li_patchdensity_ascii.py | 45 | 1454 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_patchdensity_ascii.py
--------------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg, parameters, context):
return checkMovingWindow(alg, parameters, context, True)
def processCommand(alg, parameters, context, feedback):
configFile(alg, parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
moveOutputTxtFile(alg, parameters, context)
| gpl-2.0 |
sodafree/backend | django/template/response.py | 93 | 6205 | from django.http import HttpResponse
from django.template import loader, Context, RequestContext
class ContentNotRenderedError(Exception):
pass
class SimpleTemplateResponse(HttpResponse):
rendering_attrs = ['template_name', 'context_data', '_post_render_callbacks']
def __init__(self, template, context=None, mimetype=None, status=None,
content_type=None):
# It would seem obvious to call these next two members 'template' and
# 'context', but those names are reserved as part of the test Client
# API. To avoid the name collision, we use tricky-to-debug problems
self.template_name = template
self.context_data = context
self._post_render_callbacks = []
# content argument doesn't make sense here because it will be replaced
# with rendered template so we always pass empty string in order to
# prevent errors and provide shorter signature.
super(SimpleTemplateResponse, self).__init__('', mimetype, status,
content_type)
# _is_rendered tracks whether the template and context has been baked
# into a final response.
# Super __init__ doesn't know any better than to set self.content to
# the empty string we just gave it, which wrongly sets _is_rendered
# True, so we initialize it to False after the call to super __init__.
self._is_rendered = False
def __getstate__(self):
"""Pickling support function.
Ensures that the object can't be pickled before it has been
rendered, and that the pickled state only includes rendered
data, not the data used to construct the response.
"""
obj_dict = self.__dict__.copy()
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be '
'rendered before it can be pickled.')
for attr in self.rendering_attrs:
if attr in obj_dict:
del obj_dict[attr]
return obj_dict
def resolve_template(self, template):
"Accepts a template object, path-to-template or list of paths"
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
return loader.get_template(template)
else:
return template
def resolve_context(self, context):
"""Converts context data into a full Context object
(assuming it isn't already a Context object).
"""
if isinstance(context, Context):
return context
else:
return Context(context)
@property
def rendered_content(self):
"""Returns the freshly rendered content for the template and context
described by the TemplateResponse.
This *does not* set the final content of the response. To set the
response content, you must either call render(), or set the
content explicitly using the value of this property.
"""
template = self.resolve_template(self.template_name)
context = self.resolve_context(self.context_data)
content = template.render(context)
return content
def add_post_render_callback(self, callback):
"""Adds a new post-rendering callback.
If the response has already been rendered,
invoke the callback immediately.
"""
if self._is_rendered:
callback(self)
else:
self._post_render_callbacks.append(callback)
def render(self):
"""Renders (thereby finalizing) the content of the response.
If the content has already been rendered, this is a no-op.
Returns the baked response instance.
"""
retval = self
if not self._is_rendered:
self._set_content(self.rendered_content)
for post_callback in self._post_render_callbacks:
newretval = post_callback(retval)
if newretval is not None:
retval = newretval
return retval
@property
def is_rendered(self):
return self._is_rendered
def __iter__(self):
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be '
'rendered before it can be iterated over.')
return super(SimpleTemplateResponse, self).__iter__()
def _get_content(self):
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be '
'rendered before it can be accessed.')
return super(SimpleTemplateResponse, self)._get_content()
def _set_content(self, value):
"""Sets the content for the response
"""
super(SimpleTemplateResponse, self)._set_content(value)
self._is_rendered = True
content = property(_get_content, _set_content)
class TemplateResponse(SimpleTemplateResponse):
rendering_attrs = SimpleTemplateResponse.rendering_attrs + \
['_request', '_current_app']
def __init__(self, request, template, context=None, mimetype=None,
status=None, content_type=None, current_app=None):
# self.request gets over-written by django.test.client.Client - and
# unlike context_data and template_name the _request should not
# be considered part of the public API.
self._request = request
# As a convenience we'll allow callers to provide current_app without
# having to avoid needing to create the RequestContext directly
self._current_app = current_app
super(TemplateResponse, self).__init__(
template, context, mimetype, status, content_type)
def resolve_context(self, context):
"""Convert context data into a full RequestContext object
(assuming it isn't already a Context object).
"""
if isinstance(context, Context):
return context
return RequestContext(self._request, context, current_app=self._current_app)
| bsd-3-clause |
kingvuplus/ME-TEST2 | lib/actions/parseactions.py | 56 | 1924 | # takes a header file, outputs action ids
import tokenize, sys, string
def filter(g):
while 1:
t = g.next()
if t[1] == "/*":
while g.next()[1] != "*/":
pass
continue
if t[1] == "//":
while g.next()[1] != "\n":
pass
continue
if t[1] != "\n":
# print t
yield t[1]
def do_file(f, mode):
tokens = filter(tokenize.generate_tokens(open(f, 'r').readline))
sys.stderr.write("parsing %s\n" % f)
state = 0
classstate = 0
firsthit = 1
while 1:
try:
t = tokens.next()
except:
break
if t == "class":
classname = tokens.next()
classstate = state
if t == "{":
state = state + 1
if t == "}":
state = state - 1
if t == "enum" and state == classstate + 1:
actionname = tokens.next()
if actionname == "{":
while tokens.next() != "}":
pass
continue
if actionname[-7:] == "Actions":
if tokens.next() != "{":
try:
print classname
except:
pass
try:
print actionname
except:
pass
raise Exception("action enum must be simple.")
counter = 0
while 1:
t = tokens.next()
if t == "=":
tokens.next()
t = tokens.next()
if t == "}":
break
if counter:
if t != ",":
raise Exception("no comma")
t = tokens.next()
if firsthit:
if mode == "include":
# hack hack hack!!
print "#include <lib/" + '/'.join(f.split('/')[-2:]) + ">"
else:
print "\t// " + f
firsthit = 0
if mode == "parse":
print "{\"" + actionname + "\", \"" + t + "\", " + string.join((classname, t), "::") + "},"
counter = counter + 1
mode = sys.argv[1]
if mode == "parse":
print """
/* generated by parseactions.py - do not modify! */
struct eActionList
{
const char *m_context, *m_action;
int m_id;
} actions[]={"""
for x in sys.argv[2:]:
do_file(x, mode)
if mode == "parse":
print "};"
| gpl-2.0 |
Ziftr/namecoin | client/jsonrpc/_tests/test_serviceHandler.py | 53 | 6079 |
"""
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import unittest
import jsonrpc
from types import *
class Service(object):
@jsonrpc.ServiceMethod
def echo(self, arg):
return arg
def not_a_serviceMethod(self):
pass
@jsonrpc.ServiceMethod
def raiseError(self):
raise Exception("foobar")
class Handler(jsonrpc.ServiceHandler):
def __init__(self, service):
self.service=service
def translateRequest(self, data):
self._requestTranslated=True
return jsonrpc.ServiceHandler.translateRequest(self, data)
def findServiceEndpoint(self, name):
self._foundServiceEndpoint=True
return jsonrpc.ServiceHandler.findServiceEndpoint(self, name)
def invokeServiceEndpoint(self, meth, params):
self._invokedEndpoint=True
return jsonrpc.ServiceHandler.invokeServiceEndpoint(self, meth, params)
def translateResult(self, result, error, id_):
self._resultTranslated=True
return jsonrpc.ServiceHandler.translateResult(self, result, error, id_)
class TestServiceHandler(unittest.TestCase):
def setUp(self):
self.service = Service()
def tearDown(self):
pass
def test_RequestProcessing(self):
handler = Handler(self.service)
json=jsonrpc.dumps({"method":"echo", 'params':['foobar'], 'id':''})
result = handler.handleRequest(json)
self.assert_(handler._requestTranslated)
self.assert_(handler._foundServiceEndpoint)
self.assert_(handler._invokedEndpoint)
self.assert_(handler._resultTranslated)
def test_translateRequest(self):
handler = Handler(self.service)
json=jsonrpc.dumps({"method":"echo", 'params':['foobar'], 'id':''})
req = handler.translateRequest(json)
self.assertEquals(req['method'], "echo")
self.assertEquals(req['params'],['foobar'])
self.assertEquals(req['id'],'')
def test_findServiceEndpoint(self):
handler = Handler(self.service)
self.assertRaises(jsonrpc.ServiceMethodNotFound, handler.findServiceEndpoint, "notfound")
self.assertRaises(jsonrpc.ServiceMethodNotFound, handler.findServiceEndpoint, "not_a_serviceMethod")
meth = handler.findServiceEndpoint("echo")
self.assertEquals(self.service.echo, meth)
def test_invokeEndpoint(self):
handler = Handler(self.service)
meth = handler.findServiceEndpoint("echo")
rslt = handler.invokeServiceEndpoint(meth, ['spam'])
self.assertEquals(rslt, 'spam')
def test_translateResults(self):
handler=Handler(self.service)
data=handler.translateResult("foobar", None, "spam")
self.assertEquals(jsonrpc.loads(data), {"result":"foobar","id":"spam","error":None})
def test_translateError(self):
handler=Handler(self.service)
exc = Exception()
data=handler.translateResult(None, exc, "id")
self.assertEquals(jsonrpc.loads(data), {"result":None,"id":"id","error":{"name":"Exception", "message":""}})
def test_translateUnencodableResults(self):
handler=Handler(self.service)
data=handler.translateResult(self, None, "spam")
self.assertEquals(jsonrpc.loads(data), {"result":None,"id":"spam","error":{"name":"JSONEncodeException", "message":"Result Object Not Serializable"}})
def test_handleRequestEcho(self):
handler=Handler(self.service)
json=jsonrpc.dumps({"method":"echo", 'params':['foobar'], 'id':''})
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), jsonrpc.loads('{"result":"foobar", "error":null, "id":""}'))
def test_handleRequestMethodNotFound(self):
handler=Handler(self.service)
json=jsonrpc.dumps({"method":"not_found", 'params':['foobar'], 'id':''})
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), {"result":None, "error":{"name":"ServiceMethodNotFound", "message":""}, "id":""})
def test_handleRequestMethodNotAllowed(self):
handler=Handler(self.service)
json=jsonrpc.dumps({"method":"not_a_ServiceMethod", 'params':['foobar'], 'id':''})
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), {"result":None, "error":{"name":"ServiceMethodNotFound", "message":""}, "id":""})
def test_handleRequestMethodRaiseError(self):
handler=Handler(self.service)
json=jsonrpc.dumps({"method":"raiseError", 'params':[], 'id':''})
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), {"result":None, "error":{"name":"Exception", "message":"foobar"}, "id":""})
def test_handleBadRequestData(self):
handler=Handler(self.service)
json = "This is not a JSON-RPC request"
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), {"result":None, "error":{"name":"ServiceRequestNotTranslatable", "message":json}, "id":""})
def test_handleBadRequestObject(self):
handler=Handler(self.service)
json = "{}"
result = handler.handleRequest(json)
self.assertEquals(jsonrpc.loads(result), {"result":None, "error":{"name":"BadServiceRequest", "message":json}, "id":""})
| mit |
jgabriellima/yowsup | yowsup/layers/protocol_messages/protocolentities/message_text_broadcast.py | 64 | 1185 | from .message_text import TextMessageProtocolEntity
from yowsup.structs import ProtocolTreeNode
import time
class BroadcastTextMessage(TextMessageProtocolEntity):
def __init__(self, jids, body):
broadcastTime = int(time.time() * 1000)
super(BroadcastTextMessage, self).__init__(body, to = "%s@broadcast" % broadcastTime)
self.setBroadcastProps(jids)
def setBroadcastProps(self, jids):
assert type(jids) is list, "jids must be a list, got %s instead." % type(jids)
self.jids = jids
def toProtocolTreeNode(self):
node = super(BroadcastTextMessage, self).toProtocolTreeNode()
toNodes = [ProtocolTreeNode("to", {"jid": jid}) for jid in self.jids]
broadcastNode = ProtocolTreeNode("broadcast", children = toNodes)
node.addChild(broadcastNode)
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = TextMessageProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = BroadcastTextMessage
jids = [toNode.getAttributeValue("jid") for toNode in node.getChild("broadcast").getAllChildren()]
entity.setBroadcastProps(jids)
return entity
| gpl-3.0 |
tcpcloud/contrail-controller | src/nodemgr/common/event_manager.py | 1 | 30083 | #
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
import gevent
import json
import ConfigParser
from StringIO import StringIO
from ConfigParser import NoOptionError, NoSectionError
import sys
import os
import psutil
import socket
import time
import subprocess
from subprocess import Popen, PIPE
import supervisor.xmlrpc
import xmlrpclib
import platform
from supervisor import childutils
from nodemgr.common.event_listener_protocol_nodemgr import \
EventListenerProtocolNodeMgr
from nodemgr.common.process_stat import ProcessStat
from nodemgr.common.sandesh.nodeinfo.ttypes import *
from nodemgr.common.sandesh.nodeinfo.cpuinfo.ttypes import *
from nodemgr.common.sandesh.nodeinfo.process_info.ttypes import *
from nodemgr.common.cpuinfo import MemCpuUsageData
from sandesh_common.vns.constants import INSTANCE_ID_DEFAULT
import discoveryclient.client as client
from buildinfo import build_info
from pysandesh.sandesh_logger import *
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
def package_installed(pkg):
(pdist, _, _) = platform.dist()
if pdist == 'Ubuntu':
cmd = "dpkg -l " + pkg
else:
cmd = "rpm -q " + pkg
with open(os.devnull, "w") as fnull:
return (not subprocess.call(cmd.split(), stdout=fnull, stderr=fnull))
class EventManager(object):
rules_data = []
group_names = []
process_state_db = {}
third_party_process_state_db = {}
FAIL_STATUS_DUMMY = 0x1
FAIL_STATUS_DISK_SPACE = 0x2
FAIL_STATUS_SERVER_PORT = 0x4
FAIL_STATUS_NTP_SYNC = 0x8
FAIL_STATUS_DISK_SPACE_NA = 0x10
def __init__(self, rule_file, discovery_server,
discovery_port, collector_addr, sandesh_global,
send_build_info = False):
self.stdin = sys.stdin
self.stdout = sys.stdout
self.stderr = sys.stderr
self.rule_file = rule_file
self.rules_data = ''
self.max_cores = 4
self.max_old_cores = 3
self.max_new_cores = 1
self.all_core_file_list = []
self.core_dir_modified_time = 0
self.tick_count = 0
self.fail_status_bits = 0
self.prev_fail_status_bits = 1
self.instance_id = INSTANCE_ID_DEFAULT
self.discovery_server = discovery_server
self.discovery_port = discovery_port
self.collector_addr = collector_addr
self.listener_nodemgr = EventListenerProtocolNodeMgr()
self.sandesh_global = sandesh_global
self.curr_build_info = None
self.new_build_info = None
self.send_build_info = send_build_info
self.last_cpu = None
self.last_time = 0
# Get all the current processes in the node
def get_current_process(self):
proxy = xmlrpclib.ServerProxy(
'http://127.0.0.1',
transport=supervisor.xmlrpc.SupervisorTransport(
None, None, serverurl=self.supervisor_serverurl))
# Add all current processes to make sure nothing misses the radar
process_state_db = {}
# list of all processes on the node is made here
for proc_info in proxy.supervisor.getAllProcessInfo():
if (proc_info['name'] != proc_info['group']):
proc_name = proc_info['group'] + ":" + proc_info['name']
else:
proc_name = proc_info['name']
proc_pid = proc_info['pid']
process_stat_ent = self.get_process_stat_object(proc_name)
process_stat_ent.process_state = "PROCESS_STATE_" + \
proc_info['statename']
if (process_stat_ent.process_state ==
'PROCESS_STATE_RUNNING'):
process_stat_ent.start_time = str(proc_info['start'] * 1000000)
process_stat_ent.start_count += 1
process_stat_ent.pid = proc_pid
process_state_db[proc_name] = process_stat_ent
return process_state_db
# end get_current_process
# Add the current processes in the node to db
def add_current_process(self):
self.process_state_db = self.get_current_process()
# end add_current_process
# In case the processes in the Node can change, update current processes
def update_current_process(self):
process_state_db = self.get_current_process()
old_process_set = set(self.process_state_db.keys())
new_process_set = set(process_state_db.keys())
common_process_set = new_process_set.intersection(old_process_set)
added_process_set = new_process_set - common_process_set
deleted_process_set = old_process_set - common_process_set
for deleted_process in deleted_process_set:
self.delete_process_handler(deleted_process)
for added_process in added_process_set:
self.add_process_handler(
added_process, process_state_db[added_process])
# end update_current_process
# process is deleted, send state & remove it from db
def delete_process_handler(self, deleted_process):
self.process_state_db[deleted_process].deleted = True
group_val = self.process_state_db[deleted_process].group
self.send_process_state_db([group_val])
del self.process_state_db[deleted_process]
# end delete_process_handler
# new process added, update db & send state
def add_process_handler(self, added_process, process_info):
self.process_state_db[added_process] = process_info
group_val = self.process_state_db[added_process].group
self.send_process_state_db([group_val])
# end add_process_handler
def get_discovery_client(self):
_disc = client.DiscoveryClient(
self.discovery_server, self.discovery_port, self.module_id)
return _disc
def check_ntp_status(self):
ntp_status_cmd = 'ntpq -n -c pe | grep "^*"'
proc = Popen(ntp_status_cmd, shell=True, stdout=PIPE, stderr=PIPE)
(output, errout) = proc.communicate()
if proc.returncode != 0:
self.fail_status_bits |= self.FAIL_STATUS_NTP_SYNC
else:
self.fail_status_bits &= ~self.FAIL_STATUS_NTP_SYNC
self.send_nodemgr_process_status()
def get_build_info(self):
# Retrieve build_info from package/rpm and cache it
if self.curr_build_info is None:
command = "contrail-version contrail-nodemgr | grep contrail-nodemgr"
version = os.popen(command).read()
version_partials = version.split()
if len(version_partials) < 3:
sys.stderr.write('Not enough values to parse package version %s' % version)
return ""
else:
_, rpm_version, build_num = version_partials
self.new_build_info = build_info + '"build-id" : "' + \
rpm_version + '", "build-number" : "' + \
build_num + '"}]}'
if (self.new_build_info != self.curr_build_info):
self.curr_build_info = self.new_build_info
return self.curr_build_info
def update_process_core_file_list(self):
#LOG_DEBUG sys.stderr.write('update_process_core_file_list: begin:')
ret_value = False
try:
ls_command = "ls -1 /var/crashes"
(corenames, stderr) = Popen(
ls_command.split(),
stdout=PIPE).communicate()
process_state_db_tmp = {}
for key in self.process_state_db:
#LOG_DEBUG sys.stderr.write('update_process_core_file_list: key: '+key+'\n')
proc_stat = self.get_process_stat_object(key)
process_state_db_tmp[key] = proc_stat
#LOG_DEBUG sys.stderr.write('update_process_core_file_list: corenames: '+corenames+'\n')
for corename in corenames.split():
exec_name = corename.split('.')[1]
for key in self.process_state_db:
if key.startswith(exec_name):
#LOG_DEBUG sys.stderr.write('update_process_core_file_list: startswith: '+exec_name+'\n')
process_state_db_tmp[key].core_file_list.append(corename.rstrip())
for key in self.process_state_db:
if set(process_state_db_tmp[key].core_file_list) != set(self.process_state_db[key].core_file_list):
self.process_state_db[key].core_file_list = process_state_db_tmp[key].core_file_list
ret_value = True
except Exception as e:
sys.stderr.write('update_process_core_file_list: exception: '+str(e))
#LOG_DEBUG sys.stderr.write('update_process_core_file_list: ret_value: '+str(ret_value)+'\n')
return ret_value
#end update_process_core_file_list
def send_process_state_db_base(self, group_names, ProcessInfo):
name = socket.gethostname()
for group in group_names:
process_infos = []
delete_status = True
for key in self.process_state_db:
pstat = self.process_state_db[key]
if (pstat.group != group):
continue
process_info = ProcessInfo()
process_info.process_name = key
process_info.process_state = pstat.process_state
process_info.start_count = pstat.start_count
process_info.stop_count = pstat.stop_count
process_info.exit_count = pstat.exit_count
process_info.last_start_time = pstat.start_time
process_info.last_stop_time = pstat.stop_time
process_info.last_exit_time = pstat.exit_time
process_info.core_file_list = pstat.core_file_list
process_infos.append(process_info)
#in tor-agent case, we should use tor-agent name as uve key
name = pstat.name
if pstat.deleted == False:
delete_status = False
if not process_infos:
continue
# send node UVE
node_status = NodeStatus()
node_status.name = name
node_status.deleted = delete_status
node_status.process_info = process_infos
if (self.send_build_info):
node_status.build_info = self.get_build_info()
node_status_uve = NodeStatusUVE(table=self.table,
data=node_status)
msg = 'send_process_state_db_base: Sending UVE:' + str(node_status_uve)
self.sandesh_global.logger().log(SandeshLogger.get_py_logger_level(
SandeshLevel.SYS_INFO), msg)
node_status_uve.send()
def update_all_core_file(self):
stat_command_option = "stat --printf=%Y /var/crashes"
modified_time = Popen(
stat_command_option.split(),
stdout=PIPE).communicate()
if modified_time[0] == self.core_dir_modified_time:
return False
self.core_dir_modified_time = modified_time[0]
ls_command_option = "ls /var/crashes"
(corename, stderr) = Popen(
ls_command_option.split(),
stdout=PIPE).communicate()
self.all_core_file_list = corename.split('\n')[0:-1]
self.send_process_state_db(self.group_names)
return True
def get_process_stat_object(self, pname):
return ProcessStat(pname)
def send_process_state(self, pname, pstate, pheaders):
# update process stats
if pname in self.process_state_db.keys():
proc_stat = self.process_state_db[pname]
else:
proc_stat = self.get_process_stat_object(pname)
if not proc_stat.group in self.group_names:
self.group_names.append(proc_stat.group)
proc_stat.process_state = pstate
send_uve = False
if (pstate == 'PROCESS_STATE_RUNNING'):
proc_stat.start_count += 1
proc_stat.start_time = str(int(time.time() * 1000000))
send_uve = True
proc_stat.pid = int(pheaders['pid'])
if (pstate == 'PROCESS_STATE_STOPPED'):
proc_stat.stop_count += 1
send_uve = True
proc_stat.stop_time = str(int(time.time() * 1000000))
proc_stat.last_exit_unexpected = False
if (pstate == 'PROCESS_STATE_EXITED'):
proc_stat.exit_count += 1
send_uve = True
proc_stat.exit_time = str(int(time.time() * 1000000))
if not(int(pheaders['expected'])):
self.stderr.write(
pname + " with pid:" + pheaders['pid'] +
" exited abnormally\n")
proc_stat.last_exit_unexpected = True
# check for core file for this exit
find_command_option = \
"find /var/crashes -name core.[A-Za-z]*." + \
pheaders['pid'] + "*"
self.stderr.write(
"find command option for cores:" +
find_command_option + "\n")
(corename, stderr) = Popen(
find_command_option.split(),
stdout=PIPE).communicate()
self.stderr.write("core file: " + corename + "\n")
if ((corename is not None) and (len(corename.rstrip()) >= 1)):
# before adding to the core file list make
# sure that we do not have too many cores
sys.stderr.write(
'core_file_list:' + str(proc_stat.core_file_list) +
", self.max_cores:" + str(self.max_cores) + "\n")
if (len(proc_stat.core_file_list) == self.max_cores):
# get rid of old cores
sys.stderr.write(
'max # of cores reached:' +
str(self.max_cores) + "\n")
val = self.max_cores - self.max_new_cores + 1
core_files_to_be_deleted = \
proc_stat.core_file_list[self.max_old_cores:(val)]
sys.stderr.write(
'deleting core file list:' +
str(core_files_to_be_deleted) + "\n")
for core_file in core_files_to_be_deleted:
sys.stderr.write(
'deleting core file:' + core_file + "\n")
try:
os.remove(core_file)
except OSError as e:
sys.stderr.write('ERROR: ' + str(e) + '\n')
# now delete the list as well
val = self.max_cores - self.max_new_cores + 1
del proc_stat.core_file_list[self.max_old_cores:(val)]
# now add the new core to the core file list
proc_stat.core_file_list.append(corename.rstrip())
sys.stderr.write(
"# of cores for " + pname + ":" +
str(len(proc_stat.core_file_list)) + "\n")
# update process state database
self.process_state_db[pname] = proc_stat
f = open('/var/log/contrail/process_state' +
self.node_type + ".json", 'w')
f.write(json.dumps(
self.process_state_db,
default=lambda obj: obj.__dict__))
if not(send_uve):
return
if (send_uve):
self.send_process_state_db([proc_stat.group])
def send_nodemgr_process_status_base(self, ProcessStateNames,
ProcessState, ProcessStatus):
if (self.prev_fail_status_bits != self.fail_status_bits):
self.prev_fail_status_bits = self.fail_status_bits
fail_status_bits = self.fail_status_bits
state, description = self.get_process_state(fail_status_bits)
process_status = ProcessStatus(
module_id=self.module_id, instance_id=self.instance_id,
state=state, description=description)
process_status_list = []
process_status_list.append(process_status)
node_status = NodeStatus(name=socket.gethostname(),
process_status=process_status_list)
if (self.send_build_info):
node_status.build_info = self.get_build_info()
node_status_uve = NodeStatusUVE(table=self.table,
data=node_status)
msg = 'send_nodemgr_process_status_base: Sending UVE:' + str(node_status_uve)
self.sandesh_global.logger().log(SandeshLogger.get_py_logger_level(
SandeshLevel.SYS_INFO), msg)
node_status_uve.send()
def send_system_cpu_info(self):
mem_cpu_usage_data = MemCpuUsageData(os.getpid(), self.last_cpu, self.last_time)
sys_cpu = SystemCpuInfo()
sys_cpu.num_socket = mem_cpu_usage_data.get_num_socket()
sys_cpu.num_cpu = mem_cpu_usage_data.get_num_cpu()
sys_cpu.num_core_per_socket = mem_cpu_usage_data.get_num_core_per_socket()
sys_cpu.num_thread_per_core = mem_cpu_usage_data.get_num_thread_per_core()
node_status = NodeStatus(name=socket.gethostname(),
system_cpu_info=sys_cpu)
node_status_uve = NodeStatusUVE(table=self.table,
data=node_status)
node_status_uve.send()
def get_all_processes_mem_cpu_usage(self):
process_mem_cpu_usage = {}
for key in self.process_state_db:
pstat = self.process_state_db[key]
if (pstat.process_state == 'PROCESS_STATE_RUNNING'):
try:
mem_cpu_usage_data = MemCpuUsageData(pstat.pid, pstat.last_cpu, pstat.last_time)
process_mem_cpu = mem_cpu_usage_data.get_process_mem_cpu_info()
except psutil.NoSuchProcess:
sys.stderr.write("NoSuchProcess: process name:%s pid:%d\n"
% (pstat.pname, pstat.pid))
else:
process_mem_cpu.__key = pstat.pname
process_mem_cpu_usage[process_mem_cpu.__key] = process_mem_cpu
pstat.last_cpu = mem_cpu_usage_data.last_cpu
pstat.last_time = mem_cpu_usage_data.last_time
# walk through all processes being monitored by nodemgr,
# not spawned by supervisord
third_party_process_dict = self.get_node_third_party_process_dict()
for pname in third_party_process_dict:
pattern = third_party_process_dict[pname]
cmd = "ps -aux | grep " + pattern + " | awk '{print $2}' | head -n1"
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if (stdout != ''):
pid = int(stdout.strip('\n'))
if pname in self.third_party_process_state_db:
pstat = self.third_party_process_state_db[pname]
else:
pstat = self.get_process_stat_object(pname)
pstat.pid = pid
self.third_party_process_state_db[pname] = pstat
try:
mem_cpu_usage_data = MemCpuUsageData(pstat.pid, pstat.last_cpu, pstat.last_time)
process_mem_cpu = mem_cpu_usage_data.get_process_mem_cpu_info()
except psutil.NoSuchProcess:
sys.stderr.write("NoSuchProcess: process name:%s pid:%d\n"
% (pstat.pname, pstat.pid))
self.third_party_process_state_db.pop(pstat.pname)
else:
process_mem_cpu.__key = pname
process_mem_cpu_usage[process_mem_cpu.__key] = process_mem_cpu
pstat.last_cpu = mem_cpu_usage_data.last_cpu
pstat.last_time = mem_cpu_usage_data.last_time
return process_mem_cpu_usage
def get_disk_usage(self):
disk_usage_info = {}
partition = subprocess.Popen(
"df -PT -t ext2 -t ext3 -t ext4 -t xfs",
shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in partition.stdout:
if 'Filesystem' in line:
continue
partition_name = line.rsplit()[0]
partition_type = line.rsplit()[1]
partition_space_used_1k = line.rsplit()[3]
partition_space_available_1k = line.rsplit()[4]
disk_usage_stat = DiskPartitionUsageStats()
try:
disk_usage_stat.partition_type = str(partition_type)
disk_usage_stat.__key = str(partition_name)
disk_usage_stat.partition_space_used_1k = \
int(partition_space_used_1k)
disk_usage_stat.partition_space_available_1k = \
int(partition_space_available_1k)
total_disk_space = \
disk_usage_stat.partition_space_used_1k + \
disk_usage_stat.partition_space_available_1k
disk_usage_stat.percentage_partition_space_used = \
int(round((float(disk_usage_stat.partition_space_used_1k)/ \
float(total_disk_space))*100))
except ValueError:
sys.stderr.write("Failed to get local disk space usage" + "\n")
else:
disk_usage_info[partition_name] = disk_usage_stat
return disk_usage_info
# end get_disk_usage
def get_process_state_base(self, fail_status_bits,
ProcessStateNames, ProcessState):
if fail_status_bits:
state = ProcessStateNames[ProcessState.NON_FUNCTIONAL]
description = self.get_failbits_nodespecific_desc(fail_status_bits)
if (description is ""):
if fail_status_bits & self.FAIL_STATUS_NTP_SYNC:
if description != "":
description += " "
description += "NTP state unsynchronized."
else:
state = ProcessStateNames[ProcessState.FUNCTIONAL]
description = ''
return state, description
def get_failbits_nodespecific_desc(self, fail_status_bits):
return ""
def event_process_state(self, pheaders, headers):
msg = ("process:" + pheaders['processname'] + "," + "groupname:" +
pheaders['groupname'] + "," + "eventname:" + headers['eventname'])
self.sandesh_global.logger().log(SandeshLogger.get_py_logger_level(SandeshLevel.SYS_DEBUG), msg)
pname = pheaders['processname']
if (pheaders['processname'] != pheaders['groupname']):
pname = pheaders['groupname'] + ":" + pheaders['processname']
self.send_process_state(pname, headers['eventname'], pheaders)
for rules in self.rules_data['Rules']:
if 'processname' in rules:
if ((rules['processname'] == pheaders['groupname']) and
(rules['process_state'] == headers['eventname'])):
msg = "got a hit with:" + str(rules)
self.sandesh_global.logger().log(SandeshLogger.get_py_logger_level(
SandeshLevel.SYS_DEBUG), msg)
# do not make async calls
try:
ret_code = subprocess.call(
[rules['action']], shell=True,
stdout=self.stderr, stderr=self.stderr)
except Exception as e:
msg = ('Failed to execute action: ' + rules['action'] +
' with err ' + str(e))
self.sandesh_global.logger().logger.log(SandeshLogger.
get_py_logger_level(SandeshLevel.SYS_ERR), msg)
else:
if ret_code:
msg = ('Execution of action ' + rules['action'] +
' returned err ' + str(ret_code))
self.sandesh_global.logger().log(SandeshLogger.
get_py_logger_level(SandeshLevel.SYS_ERR), msg)
def event_process_communication(self, pdata):
flag_and_value = pdata.partition(":")
msg = ("Flag:" + flag_and_value[0] +
" Value:" + flag_and_value[2])
self.sandesh_global.logger().log(SandeshLogger.get_py_logger_level
(SandeshLevel.SYS_DEBUG), msg)
for rules in self.rules_data['Rules']:
if 'flag_name' in rules:
if ((rules['flag_name'] == flag_and_value[0]) and
(rules['flag_value'].strip() == flag_and_value[2].strip())):
msg = "got a hit with:" + str(rules)
self.sandesh_global.logger().log(SandeshLogger.
get_py_logger_level(SandeshLevel.SYS_DEBUG), msg)
cmd_and_args = ['/usr/bin/bash', '-c', rules['action']]
subprocess.Popen(cmd_and_args)
def event_tick_60(self):
self.tick_count += 1
# get disk usage info periodically
disk_usage_info = self.get_disk_usage()
# typical ntp sync time is about 5 min - first time,
# we scan only after 10 min
if self.tick_count >= 10:
self.check_ntp_status()
if self.update_process_core_file_list():
self.send_process_state_db(['default'])
process_mem_cpu_usage = self.get_all_processes_mem_cpu_usage()
# get system mem/cpu usage
system_mem_cpu_usage_data = MemCpuUsageData(os.getpid(), self.last_cpu, self.last_time)
system_mem_usage = system_mem_cpu_usage_data.get_sys_mem_info()
system_cpu_usage = system_mem_cpu_usage_data.get_sys_cpu_info()
# update last_cpu/time after all processing is complete
self.last_cpu = system_mem_cpu_usage_data.last_cpu
self.last_time = system_mem_cpu_usage_data.last_time
# send above encoded buffer
node_status = NodeStatus(name=socket.gethostname(),
disk_usage_info=disk_usage_info,
system_mem_usage=system_mem_usage,
system_cpu_usage=system_cpu_usage,
process_mem_cpu_usage=process_mem_cpu_usage)
# encode other core file
if self.update_all_core_file():
node_status.all_core_file_list = self.all_core_file_list
if (self.send_build_info):
node_status.build_info = self.get_build_info()
node_status_uve = NodeStatusUVE(table=self.table,
data=node_status)
node_status_uve.send()
current_time = int(time.time())
if ((abs(current_time - self.prev_current_time)) > 300):
# update all process start_times with the updated time
# Compute the elapsed time and subtract them from
# current time to get updated values
sys.stderr.write(
"Time lapse detected " +
str(abs(current_time - self.prev_current_time)) + "\n")
for key in self.process_state_db:
pstat = self.process_state_db[key]
if pstat.start_time is not '':
pstat.start_time = str(
(int(current_time - (self.prev_current_time -
((int)(pstat.start_time)) / 1000000))) * 1000000)
if (pstat.process_state == 'PROCESS_STATE_STOPPED'):
if pstat.stop_time is not '':
pstat.stop_time = str(
int(current_time - (self.prev_current_time -
((int)(pstat.stop_time)) / 1000000)) *
1000000)
if (pstat.process_state == 'PROCESS_STATE_EXITED'):
if pstat.exit_time is not '':
pstat.exit_time = str(
int(current_time - (self.prev_current_time -
((int)(pstat.exit_time)) / 1000000)) *
1000000)
# update process state database
self.process_state_db[key] = pstat
try:
json_file = '/var/log/contrail/process_state' + \
self.node_type + ".json"
f = open(json_file, 'w')
f.write(
json.dumps(
self.process_state_db,
default=lambda obj: obj.__dict__))
except:
sys.stderr.write("Unable to write json")
pass
self.send_process_state_db(self.group_names)
self.prev_current_time = int(time.time())
def do_periodic_events(self):
self.event_tick_60()
def runforever(self, test=False):
self.prev_current_time = int(time.time())
while 1:
# we explicitly use self.stdin, self.stdout, and self.stderr
# instead of sys.* so we can unit test this code
headers, payload = self.listener_nodemgr.wait(
self.stdin, self.stdout)
pheaders, pdata = childutils.eventdata(payload + '\n')
# check for process state change events
if headers['eventname'].startswith("PROCESS_STATE"):
self.event_process_state(pheaders, headers)
# check for flag value change events
if headers['eventname'].startswith("PROCESS_COMMUNICATION"):
self.event_process_communication(pdata)
# do periodic events
if headers['eventname'].startswith("TICK_60"):
self.do_periodic_events()
self.listener_nodemgr.ok(self.stdout)
| apache-2.0 |
perlygatekeeper/glowing-robot | Little_Alchemy_2/Scraper_python/env/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py | 9 | 17303 | """Dependency Resolution
The dependency resolution in pip is performed as follows:
for top-level requirements:
a. only one spec allowed per project, regardless of conflicts or not.
otherwise a "double requirement" exception is raised
b. they override sub-dependency requirements.
for sub-dependencies
a. "first found, wins" (where the order is breadth first)
"""
import logging
import sys
from collections import defaultdict
from itertools import chain
from pip._vendor.packaging import specifiers
from pip._internal.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
UnsupportedPythonVersion,
)
from pip._internal.req.constructors import install_req_from_req_string
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
dist_in_usersite, ensure_dir, normalize_version_info,
)
from pip._internal.utils.packaging import (
check_requires_python, get_requires_python,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import DefaultDict, List, Optional, Set, Tuple
from pip._vendor import pkg_resources
from pip._internal.cache import WheelCache
from pip._internal.distributions import AbstractDistribution
from pip._internal.download import PipSession
from pip._internal.index import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
logger = logging.getLogger(__name__)
def _check_dist_requires_python(
dist, # type: pkg_resources.Distribution
version_info, # type: Tuple[int, int, int]
ignore_requires_python=False, # type: bool
):
# type: (...) -> None
"""
Check whether the given Python version is compatible with a distribution's
"Requires-Python" value.
:param version_info: A 3-tuple of ints representing the Python
major-minor-micro version to check.
:param ignore_requires_python: Whether to ignore the "Requires-Python"
value if the given Python version isn't compatible.
:raises UnsupportedPythonVersion: When the given Python version isn't
compatible.
"""
requires_python = get_requires_python(dist)
try:
is_compatible = check_requires_python(
requires_python, version_info=version_info,
)
except specifiers.InvalidSpecifier as exc:
logger.warning(
"Package %r has an invalid Requires-Python: %s",
dist.project_name, exc,
)
return
if is_compatible:
return
version = '.'.join(map(str, version_info))
if ignore_requires_python:
logger.debug(
'Ignoring failed Requires-Python check for package %r: '
'%s not in %r',
dist.project_name, version, requires_python,
)
return
raise UnsupportedPythonVersion(
'Package {!r} requires a different Python: {} not in {!r}'.format(
dist.project_name, version, requires_python,
))
class Resolver(object):
"""Resolves which packages need to be installed/uninstalled to perform \
the requested operation without breaking the requirements of any package.
"""
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
def __init__(
self,
preparer, # type: RequirementPreparer
session, # type: PipSession
finder, # type: PackageFinder
wheel_cache, # type: Optional[WheelCache]
use_user_site, # type: bool
ignore_dependencies, # type: bool
ignore_installed, # type: bool
ignore_requires_python, # type: bool
force_reinstall, # type: bool
isolated, # type: bool
upgrade_strategy, # type: str
use_pep517=None, # type: Optional[bool]
py_version_info=None, # type: Optional[Tuple[int, ...]]
):
# type: (...) -> None
super(Resolver, self).__init__()
assert upgrade_strategy in self._allowed_strategies
if py_version_info is None:
py_version_info = sys.version_info[:3]
else:
py_version_info = normalize_version_info(py_version_info)
self._py_version_info = py_version_info
self.preparer = preparer
self.finder = finder
self.session = session
# NOTE: This would eventually be replaced with a cache that can give
# information about both sdist and wheels transparently.
self.wheel_cache = wheel_cache
# This is set in resolve
self.require_hashes = None # type: Optional[bool]
self.upgrade_strategy = upgrade_strategy
self.force_reinstall = force_reinstall
self.isolated = isolated
self.ignore_dependencies = ignore_dependencies
self.ignore_installed = ignore_installed
self.ignore_requires_python = ignore_requires_python
self.use_user_site = use_user_site
self.use_pep517 = use_pep517
self._discovered_dependencies = \
defaultdict(list) # type: DefaultDict[str, List]
def resolve(self, requirement_set):
# type: (RequirementSet) -> None
"""Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
are downloaded, unpacked and prepared for installation. This
preparation is done by ``pip.operations.prepare``.
Once PyPI has static dependency metadata available, it would be
possible to move the preparation to become a step separated from
dependency resolution.
"""
# make the wheelhouse
if self.preparer.wheel_download_dir:
ensure_dir(self.preparer.wheel_download_dir)
# If any top-level requirement has a hash specified, enter
# hash-checking mode, which requires hashes from all.
root_reqs = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
self.require_hashes = (
requirement_set.require_hashes or
any(req.has_hash_options for req in root_reqs)
)
# Display where finder is looking for packages
search_scope = self.finder.search_scope
locations = search_scope.get_formatted_locations()
if locations:
logger.info(locations)
# Actually prepare the files, and collect any exceptions. Most hash
# exceptions cannot be checked ahead of time, because
# req.populate_link() needs to be called before we can make decisions
# based on link type.
discovered_reqs = [] # type: List[InstallRequirement]
hash_errors = HashErrors()
for req in chain(root_reqs, discovered_reqs):
try:
discovered_reqs.extend(
self._resolve_one(requirement_set, req)
)
except HashError as exc:
exc.req = req
hash_errors.append(exc)
if hash_errors:
raise hash_errors
def _is_upgrade_allowed(self, req):
# type: (InstallRequirement) -> bool
if self.upgrade_strategy == "to-satisfy-only":
return False
elif self.upgrade_strategy == "eager":
return True
else:
assert self.upgrade_strategy == "only-if-needed"
return req.is_direct
def _set_req_to_reinstall(self, req):
# type: (InstallRequirement) -> None
"""
Set a requirement to be installed.
"""
# Don't uninstall the conflict if doing a user install and the
# conflict is not a user install.
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
req.conflicts_with = req.satisfied_by
req.satisfied_by = None
# XXX: Stop passing requirement_set for options
def _check_skip_installed(self, req_to_install):
# type: (InstallRequirement) -> Optional[str]
"""Check if req_to_install should be skipped.
This will check if the req is installed, and whether we should upgrade
or reinstall it, taking into account all the relevant user options.
After calling this req_to_install will only have satisfied_by set to
None if the req_to_install is to be upgraded/reinstalled etc. Any
other value will be a dist recording the current thing installed that
satisfies the requirement.
Note that for vcs urls and the like we can't assess skipping in this
routine - we simply identify that we need to pull the thing down,
then later on it is pulled down and introspected to assess upgrade/
reinstalls etc.
:return: A text reason for why it was skipped, or None.
"""
if self.ignore_installed:
return None
req_to_install.check_if_exists(self.use_user_site)
if not req_to_install.satisfied_by:
return None
if self.force_reinstall:
self._set_req_to_reinstall(req_to_install)
return None
if not self._is_upgrade_allowed(req_to_install):
if self.upgrade_strategy == "only-if-needed":
return 'already satisfied, skipping upgrade'
return 'already satisfied'
# Check for the possibility of an upgrade. For link-based
# requirements we have to pull the tree down and inspect to assess
# the version #, so it's handled way down.
if not req_to_install.link:
try:
self.finder.find_requirement(req_to_install, upgrade=True)
except BestVersionAlreadyInstalled:
# Then the best version is installed.
return 'already up-to-date'
except DistributionNotFound:
# No distribution found, so we squash the error. It will
# be raised later when we re-try later to do the install.
# Why don't we just raise here?
pass
self._set_req_to_reinstall(req_to_install)
return None
def _get_abstract_dist_for(self, req):
# type: (InstallRequirement) -> AbstractDistribution
"""Takes a InstallRequirement and returns a single AbstractDist \
representing a prepared variant of the same.
"""
assert self.require_hashes is not None, (
"require_hashes should have been set in Resolver.resolve()"
)
if req.editable:
return self.preparer.prepare_editable_requirement(
req, self.require_hashes, self.use_user_site, self.finder,
)
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req.satisfied_by is None
skip_reason = self._check_skip_installed(req)
if req.satisfied_by:
return self.preparer.prepare_installed_requirement(
req, self.require_hashes, skip_reason
)
upgrade_allowed = self._is_upgrade_allowed(req)
abstract_dist = self.preparer.prepare_linked_requirement(
req, self.session, self.finder, upgrade_allowed,
self.require_hashes
)
# NOTE
# The following portion is for determining if a certain package is
# going to be re-installed/upgraded or not and reporting to the user.
# This should probably get cleaned up in a future refactor.
# req.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not self.ignore_installed:
req.check_if_exists(self.use_user_site)
if req.satisfied_by:
should_modify = (
self.upgrade_strategy != "to-satisfy-only" or
self.force_reinstall or
self.ignore_installed or
req.link.scheme == 'file'
)
if should_modify:
self._set_req_to_reinstall(req)
else:
logger.info(
'Requirement already satisfied (use --upgrade to upgrade):'
' %s', req,
)
return abstract_dist
def _resolve_one(
self,
requirement_set, # type: RequirementSet
req_to_install # type: InstallRequirement
):
# type: (...) -> List[InstallRequirement]
"""Prepare a single requirements file.
:return: A list of additional InstallRequirements to also install.
"""
# Tell user what we are doing for this requirement:
# obtain (editable), skipping, processing (local url), collecting
# (remote url or package name)
if req_to_install.constraint or req_to_install.prepared:
return []
req_to_install.prepared = True
# register tmp src for cleanup in case something goes wrong
requirement_set.reqs_to_cleanup.append(req_to_install)
abstract_dist = self._get_abstract_dist_for(req_to_install)
# Parse and return dependencies
dist = abstract_dist.get_pkg_resources_distribution()
# This will raise UnsupportedPythonVersion if the given Python
# version isn't compatible with the distribution's Requires-Python.
_check_dist_requires_python(
dist, version_info=self._py_version_info,
ignore_requires_python=self.ignore_requires_python,
)
more_reqs = [] # type: List[InstallRequirement]
def add_req(subreq, extras_requested):
sub_install_req = install_req_from_req_string(
str(subreq),
req_to_install,
isolated=self.isolated,
wheel_cache=self.wheel_cache,
use_pep517=self.use_pep517
)
parent_req_name = req_to_install.name
to_scan_again, add_to_parent = requirement_set.add_requirement(
sub_install_req,
parent_req_name=parent_req_name,
extras_requested=extras_requested,
)
if parent_req_name and add_to_parent:
self._discovered_dependencies[parent_req_name].append(
add_to_parent
)
more_reqs.extend(to_scan_again)
with indent_log():
# We add req_to_install before its dependencies, so that we
# can refer to it when adding dependencies.
if not requirement_set.has_requirement(req_to_install.name):
# 'unnamed' requirements will get added here
req_to_install.is_direct = True
requirement_set.add_requirement(
req_to_install, parent_req_name=None,
)
if not self.ignore_dependencies:
if req_to_install.extras:
logger.debug(
"Installing extra requirements: %r",
','.join(req_to_install.extras),
)
missing_requested = sorted(
set(req_to_install.extras) - set(dist.extras)
)
for missing in missing_requested:
logger.warning(
'%s does not provide the extra \'%s\'',
dist, missing
)
available_requested = sorted(
set(dist.extras) & set(req_to_install.extras)
)
for subreq in dist.requires(available_requested):
add_req(subreq, extras_requested=available_requested)
if not req_to_install.editable and not req_to_install.satisfied_by:
# XXX: --no-install leads this to report 'Successfully
# downloaded' for only non-editable reqs, even though we took
# action on them.
requirement_set.successfully_downloaded.append(req_to_install)
return more_reqs
def get_installation_order(self, req_set):
# type: (RequirementSet) -> List[InstallRequirement]
"""Create the installation order.
The installation order is topological - requirements are installed
before the requiring thing. We break cycles at an arbitrary point,
and make no other guarantees.
"""
# The current implementation, which we may change at any point
# installs the user specified things in the order given, except when
# dependencies must come earlier to achieve topological order.
order = []
ordered_reqs = set() # type: Set[InstallRequirement]
def schedule(req):
if req.satisfied_by or req in ordered_reqs:
return
if req.constraint:
return
ordered_reqs.add(req)
for dep in self._discovered_dependencies[req.name]:
schedule(dep)
order.append(req)
for install_req in req_set.requirements.values():
schedule(install_req)
return order
| artistic-2.0 |
CanberraUAV/cuav | tests/lib/test_cuav_landingregion.py | 1 | 1452 | #!/usr/bin/env python
'''
test program for cuav_landingregion
'''
import sys, os, time, random, functools
import pytest
import numpy as np
from cuav.lib import cuav_region, cuav_landingregion, mav_position
def test_addLandingZone():
lz = cuav_landingregion.LandingZone()
for i in range(0, 10):
r = cuav_region.Region(1020, 658, 1050, 678, (30, 30))
r.latlon = (23, 34)
pos = mav_position.MavPosition(23, 24, 80, 0, 0, 0, 1)
r.score = 20
lz.checkaddregion(r, pos)
assert len(lz.regionClumps) == 1
def test_addLandingZoneMany():
lz = cuav_landingregion.LandingZone()
for i in range(0, 100):
r = cuav_region.Region(1020, 658, 1050, 678, (30, 30))
r.latlon = (random.uniform(-90, 90), random.uniform(-180, 180))
r.score = random.randint(0, 1000)
pos = mav_position.MavPosition(r.latlon[0], r.latlon[1], 80, 0, 0, 0, 1)
lz.checkaddregion(r, pos)
assert len(lz.regionClumps) == 100
def test_calcLandingZone():
lz = cuav_landingregion.LandingZone()
for i in range(0, 100):
r = cuav_region.Region(1020, 658, 1050, 678, (30, 30))
r.latlon = (random.uniform(-0.001, 0.001)+34, random.uniform(-0.001, 0.001)-140)
r.score = random.randint(0, 1000)
pos = mav_position.MavPosition(r.latlon[0], r.latlon[1], 80, 0, 0, 0, 1)
lz.checkaddregion(r, pos)
ret = lz.calclandingzone()
assert ret == True
| gpl-3.0 |
BenWiederhake/House-Of-Tweets | backend/twitterConnection.py | 2 | 8292 | from birdBackend import BirdBackend
import re
import threading
from soundGenerator import generate_sound
import responseBuilder
from twitter import TwitterInterface, TweetConsumer, UpdatesConsumer
import mq
import mylog
# Seconds
REMOVE_CITIZEN_TIME = 5 * 60
# Must be lowercase. The incoming hashtags will be lowercased before comparison.
COMMAND_HASHTAGS_DEFINITE = {'houseoftweets', 'house_of_tweets', 'house-of-tweets'}
COMMAND_HASHTAGS_ACKONLY = {'hot'}
def party_to_color(party: str):
party = party.lower()
if party is None:
color = "#ffffff"
elif party.startswith("c"): # CDU/CSU
color = "#000000"
elif party.startswith("s"): # SPD
color = "#ff0000"
elif party.startswith("g"): # GRÜNE, Grün
color = "#46962b"
elif party.startswith("di"): # DIE LINKE
color = "#c82864"
else:
color = "#ffffff"
return color
def contains_command(hashtags):
for h in hashtags:
if h.lower() in COMMAND_HASHTAGS_DEFINITE:
return True
for h in hashtags:
if h.lower() in COMMAND_HASHTAGS_ACKONLY:
return COMMAND_HASHTAGS_ACKONLY # True-ish value, and self documenting arbitrary constant
return False
# Search the tweet for a bird, and return the first one.
def find_bird(content, birdBack):
words = list(re.sub("[^\w]", " ", content).split())
for candidate in words:
bid = birdBack.getBid(candidate)
if bid is not None:
return bid
return None
# The core decisionmaker. Gets a processed tweet (consumeTweet()) and
class TwitterListener(TweetConsumer):
def __init__(self, sendingQueue: mq.SendQueueInterface, tw,
politicianBackend, birdBack: BirdBackend):
super().__init__()
self.birdBack = birdBack
self.sendingQueue = sendingQueue
self.tw = tw
self.pb = politicianBackend
self.prev_msg_id = 42 - 1
def consumeTweet(self, tweet):
self.prev_msg_id += 1
mylog.info("(" * 80)
mylog.info("Received tweet #{msg_id}:".format(msg_id=self.prev_msg_id))
mylog.debug(tweet)
# Boring stuff
msg = dict()
msg['content'] = tweet['content']
msg['hashtags'] = tweet['hashtags']
msg['id'] = self.prev_msg_id
msg['image'] = tweet['profile_img']
msg['name'] = tweet['userscreen']
msg['retweet'] = tweet['retweet'] or tweet['content'].startswith('RT ')
msg['time'] = tweet['time']
msg['twitterName'] = tweet['username']
poli = self.pb.getPolitician(tweet['uid'])
citi = self.tw.getCitizen(tweet['uid'])
# Resolve politician/citizen specifics
if poli is not None:
mylog.info("This is definitely a politician.")
msg['poli'] = poli['pid']
birds = self.handle_poli(tweet, msg, poli)
elif citi is not None:
mylog.info("This is definitely a citizen.")
msg['poli'] = None
birds = self.handle_citizen(citi, msg)
else:
mylog.info("Outdated tweet by no-longer citizen {}".format(tweet['uid']))
birds = None
# Make a sound
if birds is None:
mylog.info("=> drop tweet, DONE")
mylog.info(")" * 80)
return
cBird, pBird = birds
msg['sound'] = generate_sound(tweet['content'], tweet['retweet'], cBird, pBird)
# Send it
self.sendingQueue.post(msg)
mylog.info("Done with this tweet, DONE")
mylog.info(")" * 80)
# For consistency.
# noinspection PyMethodMayBeStatic
def handle_citizen(self, citizen, msg):
msg['partycolor'] = '#257E9C' # some random, dark-ish blue
# Don't define msg['refresh']
return [citizen['birdId'], None]
def handle_poli(self, tweet, msg, poli):
# Careful: 'poli' is a copy, so any changes due to setBird aren't reflected!
msg['partycolor'] = party_to_color(poli['party'])
msg['party'] = poli['party']
pBird = poli['self_bird']
# In case it changed, use the one provided by twitter
handle = msg['twitterName']
has_command = contains_command(tweet['hashtags'])
# Check for any updates
if 'house' in tweet['username'].lower() and tweet['content'].startswith('@'):
mylog.warning("Ignoring my own tweet for commands, as it starts with '@'")
elif has_command:
pid = poli['pid']
pBird_name = self.birdBack.getName(pBird)
bird_id = find_bird(tweet['content'], self.birdBack)
reply = None
if bird_id is not None:
# Ack
bird_name = self.birdBack.getName(bird_id)
mylog.info('politician "{}" ({}) gets new bird {}'
.format(tweet['userscreen'], pid, bird_id))
msg['refresh'] = dict()
msg['refresh']['politicianId'] = pid
msg['refresh']['birdId'] = bird_id
self.pb.setBird(pid, bird_id, actor='p')
reply = responseBuilder.build_some_ack(handle, pBird_name, bird_name)
# Again, 'poli' is a copy, so it wasn't updated by the call to 'setBird'.
pBird = bird_id
elif has_command != COMMAND_HASHTAGS_ACKONLY:
# NACK
mylog.warning('I saw that command, but no valid bird!')
mylog.warning('pid={pid!r} content={ct}'
.format(ct=tweet['content'], pid=pid))
reply = responseBuilder.build_some_nack(handle, pBird_name)
if reply is not None:
self.tw.twitter.maybe_reply(tweet['tweet_id'], reply)
# In case of 'refresh', poli already contains the update:
return [poli['citizen_bird'], pBird]
COUNTER_PREV = 1
# Locking has to be done from the outside
# TODO: Shouldn't there be something for this in the stdlib? Probably a class.
def poll_counter():
global COUNTER_PREV
COUNTER_PREV += 1
return COUNTER_PREV
class TwitterConnection(object):
def __init__(self, queue: mq.SendQueueInterface, followListPolitician,
polBack, birdBack, twitter: TwitterInterface,
consumer_updates: UpdatesConsumer):
self.birdBack = birdBack
self.polBack = polBack
self.citizens = dict()
self.poList = followListPolitician
self.queue = queue
self.lock = threading.RLock()
self.consumer_updates = consumer_updates
self.twitter = twitter
self.twitter.consumer_tweets = TwitterListener(self.queue, self, self.polBack, self.birdBack)
self.twitter.register_longlived(followListPolitician)
self.twitter.consumer_updates = consumer_updates
# Returns 'None' if not a citizen
def getCitizen(self, cid):
with self.lock:
res = self.citizens.get(str(cid))
return res
def addCitizen(self, twittername, birdid, tid=None):
if tid is None:
tid = self.twitter.resolve_name(twittername)
if tid is None:
mylog.warning("citizen user ignored, invalid name: " + twittername)
self.consumer_updates.updateShortpoll(twittername, "unknown-user")
return
if self.polBack.getPolitician(tid) is not None:
self.consumer_updates.updateShortpoll(twittername, "is-politician")
return
if birdid not in self.birdBack.bJson:
mylog.warning("citizen user ignored, invalid bird: " + birdid)
self.consumer_updates.updateShortpoll(twittername, "unknown-bird")
return
with self.lock:
if tid in self.citizens:
entry = self.citizens[tid]
mylog.info("Updating existing citizen's bird from {}".format(entry))
else:
mylog.info("Creating new citizen's bird")
entry = dict()
entry["userId"] = tid
entry["party"] = 'neutral'
self.citizens[tid] = entry
# Even if a tweet comes in instantly, getCitizen syncs on
# self.lock, so it's fine. That's also why getCitizen() will
# never see an incomplete citizen.
self.twitter.register_shortlived(tid, twittername)
entry["birdId"] = birdid
token = poll_counter()
entry["token"] = token
mylog.debug("Resulting citizen entry: {}".format(entry))
timer = threading.Timer(REMOVE_CITIZEN_TIME,
self._remove_citizen_wrap, [tid, token])
# Don't prevent shutting down
timer.daemon = True
timer.start()
self.consumer_updates.updateShortpoll(twittername, "succ-resolved")
return
def _remove_citizen_wrap(self, tid, token):
mylog.with_exceptions(self._remove_citizen, None, tid, token)
def _remove_citizen(self, tid, token):
with self.lock:
mylog.info("Want to remove citizen {}, token {}".format(tid, token))
if tid not in self.citizens:
mylog.warning("=> Already deleted (huh?)")
elif self.citizens[tid]['token'] != token:
mylog.info("=> Token mismatch, db has {}"
.format(self.citizens[tid]['token']))
else:
mylog.info("=> Yup")
self.twitter.deregister([tid])
del self.citizens[tid]
mylog.info("Remaining citizens: {}".format(self.citizens.keys()))
def isPoli(self, uid):
with self.lock:
return str(uid) in self.poList
| gpl-3.0 |
Qalthos/ansible | lib/ansible/modules/network/fortios/fortios_system_virtual_wan_link.py | 17 | 47367 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_virtual_wan_link
short_description: Configure redundant internet connections using SD-WAN (formerly virtual WAN link) in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify system feature and virtual_wan_link category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
system_virtual_wan_link:
description:
- Configure redundant internet connections using SD-WAN (formerly virtual WAN link).
default: null
suboptions:
fail-alert-interfaces:
description:
- Physical interfaces that will be alerted.
suboptions:
name:
description:
- Physical interface name. Source system.interface.name.
required: true
fail-detect:
description:
- Enable/disable SD-WAN Internet connection status checking (failure detection).
choices:
- enable
- disable
health-check:
description:
- SD-WAN status checking or health checking. Identify a server on the Internet and determine how SD-WAN verifies that the FortiGate can
communicate with it.
suboptions:
addr-mode:
description:
- Address mode (IPv4 or IPv6).
choices:
- ipv4
- ipv6
failtime:
description:
- Number of failures before server is considered lost (1 - 10, default = 5).
http-get:
description:
- URL used to communicate with the server if the protocol if the protocol is HTTP.
http-match:
description:
- Response string expected from the server if the protocol is HTTP.
interval:
description:
- Status check interval, or the time between attempting to connect to the server (1 - 3600 sec, default = 5).
members:
description:
- Member sequence number list.
suboptions:
seq-num:
description:
- Member sequence number. Source system.virtual-wan-link.members.seq-num.
required: true
name:
description:
- Status check or health check name.
required: true
packet-size:
description:
- Packet size of a twamp test session,
password:
description:
- Twamp controller password in authentication mode
port:
description:
- Port number used to communicate with the server over the selected protocol.
protocol:
description:
- Protocol used to determine if the FortiGate can communicate with the server.
choices:
- ping
- tcp-echo
- udp-echo
- http
- twamp
- ping6
recoverytime:
description:
- Number of successful responses received before server is considered recovered (1 - 10, default = 5).
security-mode:
description:
- Twamp controller security mode.
choices:
- none
- authentication
server:
description:
- IP address or FQDN name of the server.
sla:
description:
- Service level agreement (SLA).
suboptions:
id:
description:
- SLA ID.
required: true
jitter-threshold:
description:
- Jitter for SLA to make decision in milliseconds. (0 - 10000000, default = 5).
latency-threshold:
description:
- Latency for SLA to make decision in milliseconds. (0 - 10000000, default = 5).
link-cost-factor:
description:
- Criteria on which to base link selection.
choices:
- latency
- jitter
- packet-loss
packetloss-threshold:
description:
- Packet loss for SLA to make decision in percentage. (0 - 100, default = 0).
threshold-alert-jitter:
description:
- Alert threshold for jitter (ms, default = 0).
threshold-alert-latency:
description:
- Alert threshold for latency (ms, default = 0).
threshold-alert-packetloss:
description:
- Alert threshold for packet loss (percentage, default = 0).
threshold-warning-jitter:
description:
- Warning threshold for jitter (ms, default = 0).
threshold-warning-latency:
description:
- Warning threshold for latency (ms, default = 0).
threshold-warning-packetloss:
description:
- Warning threshold for packet loss (percentage, default = 0).
update-cascade-interface:
description:
- Enable/disable update cascade interface.
choices:
- enable
- disable
update-static-route:
description:
- Enable/disable updating the static route.
choices:
- enable
- disable
load-balance-mode:
description:
- Algorithm or mode to use for load balancing Internet traffic to SD-WAN members.
choices:
- source-ip-based
- weight-based
- usage-based
- source-dest-ip-based
- measured-volume-based
members:
description:
- Physical FortiGate interfaces added to the virtual-wan-link.
suboptions:
comment:
description:
- Comments.
gateway:
description:
- The default gateway for this interface. Usually the default gateway of the Internet service provider that this interface is
connected to.
gateway6:
description:
- IPv6 gateway.
ingress-spillover-threshold:
description:
- Ingress spillover threshold for this interface (0 - 16776000 kbit/s). When this traffic volume threshold is reached, new
sessions spill over to other interfaces in the SD-WAN.
interface:
description:
- Interface name. Source system.interface.name.
priority:
description:
- Priority of the interface (0 - 4294967295). Used for SD-WAN rules or priority rules.
seq-num:
description:
- Sequence number(1-255).
required: true
source:
description:
- Source IP address used in the health-check packet to the server.
source6:
description:
- Source IPv6 address used in the health-check packet to the server.
spillover-threshold:
description:
- Egress spillover threshold for this interface (0 - 16776000 kbit/s). When this traffic volume threshold is reached, new sessions
spill over to other interfaces in the SD-WAN.
status:
description:
- Enable/disable this interface in the SD-WAN.
choices:
- disable
- enable
volume-ratio:
description:
- Measured volume ratio (this value / sum of all values = percentage of link volume, 0 - 255).
weight:
description:
- Weight of this interface for weighted load balancing. (0 - 255) More traffic is directed to interfaces with higher weights.
service:
description:
- Create SD-WAN rules or priority rules (also called services) to control how sessions are distributed to physical interfaces in the
SD-WAN.
suboptions:
addr-mode:
description:
- Address mode (IPv4 or IPv6).
choices:
- ipv4
- ipv6
bandwidth-weight:
description:
- Coefficient of reciprocal of available bidirectional bandwidth in the formula of custom-profile-1.
dscp-forward:
description:
- Enable/disable forward traffic DSCP tag.
choices:
- enable
- disable
dscp-forward-tag:
description:
- Forward traffic DSCP tag.
dscp-reverse:
description:
- Enable/disable reverse traffic DSCP tag.
choices:
- enable
- disable
dscp-reverse-tag:
description:
- Reverse traffic DSCP tag.
dst:
description:
- Destination address name.
suboptions:
name:
description:
- Address or address group name. Source firewall.address.name firewall.addrgrp.name.
required: true
dst-negate:
description:
- Enable/disable negation of destination address match.
choices:
- enable
- disable
dst6:
description:
- Destination address6 name.
suboptions:
name:
description:
- Address6 or address6 group name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
end-port:
description:
- End destination port number.
gateway:
description:
- Enable/disable SD-WAN service gateway.
choices:
- enable
- disable
groups:
description:
- User groups.
suboptions:
name:
description:
- Group name. Source user.group.name.
required: true
health-check:
description:
- Health check. Source system.virtual-wan-link.health-check.name.
hold-down-time:
description:
- Waiting period in seconds when switching from the back-up member to the primary member (0 - 10000000, default = 0).
id:
description:
- Priority rule ID (1 - 4000).
required: true
input-device:
description:
- Source interface name.
suboptions:
name:
description:
- Interface name. Source system.interface.name.
required: true
internet-service:
description:
- Enable/disable use of Internet service for application-based load balancing.
choices:
- enable
- disable
internet-service-ctrl:
description:
- Control-based Internet Service ID list.
suboptions:
id:
description:
- Control-based Internet Service ID.
required: true
internet-service-ctrl-group:
description:
- Control-based Internet Service group list.
suboptions:
name:
description:
- Control-based Internet Service group name. Source application.group.name.
required: true
internet-service-custom:
description:
- Custom Internet service name list.
suboptions:
name:
description:
- Custom Internet service name. Source firewall.internet-service-custom.name.
required: true
internet-service-custom-group:
description:
- Custom Internet Service group list.
suboptions:
name:
description:
- Custom Internet Service group name. Source firewall.internet-service-custom-group.name.
required: true
internet-service-group:
description:
- Internet Service group list.
suboptions:
name:
description:
- Internet Service group name. Source firewall.internet-service-group.name.
required: true
internet-service-id:
description:
- Internet service ID list.
suboptions:
id:
description:
- Internet service ID. Source firewall.internet-service.id.
required: true
jitter-weight:
description:
- Coefficient of jitter in the formula of custom-profile-1.
latency-weight:
description:
- Coefficient of latency in the formula of custom-profile-1.
link-cost-factor:
description:
- Link cost factor.
choices:
- latency
- jitter
- packet-loss
- inbandwidth
- outbandwidth
- bibandwidth
- custom-profile-1
link-cost-threshold:
description:
- Percentage threshold change of link cost values that will result in policy route regeneration (0 - 10000000, default = 10).
member:
description:
- Member sequence number.
mode:
description:
- Control how the priority rule sets the priority of interfaces in the SD-WAN.
choices:
- auto
- manual
- priority
- sla
name:
description:
- Priority rule name.
packet-loss-weight:
description:
- Coefficient of packet-loss in the formula of custom-profile-1.
priority-members:
description:
- Member sequence number list.
suboptions:
seq-num:
description:
- Member sequence number. Source system.virtual-wan-link.members.seq-num.
required: true
protocol:
description:
- Protocol number.
quality-link:
description:
- Quality grade.
route-tag:
description:
- IPv4 route map route-tag.
sla:
description:
- Service level agreement (SLA).
suboptions:
health-check:
description:
- Virtual WAN Link health-check. Source system.virtual-wan-link.health-check.name.
required: true
id:
description:
- SLA ID.
src:
description:
- Source address name.
suboptions:
name:
description:
- Address or address group name. Source firewall.address.name firewall.addrgrp.name.
required: true
src-negate:
description:
- Enable/disable negation of source address match.
choices:
- enable
- disable
src6:
description:
- Source address6 name.
suboptions:
name:
description:
- Address6 or address6 group name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
start-port:
description:
- Start destination port number.
status:
description:
- Enable/disable SD-WAN service.
choices:
- enable
- disable
tos:
description:
- Type of service bit pattern.
tos-mask:
description:
- Type of service evaluated bits.
users:
description:
- User name.
suboptions:
name:
description:
- User name. Source user.local.name.
required: true
status:
description:
- Enable/disable SD-WAN.
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure redundant internet connections using SD-WAN (formerly virtual WAN link).
fortios_system_virtual_wan_link:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
system_virtual_wan_link:
fail-alert-interfaces:
-
name: "default_name_4 (source system.interface.name)"
fail-detect: "enable"
health-check:
-
addr-mode: "ipv4"
failtime: "8"
http-get: "<your_own_value>"
http-match: "<your_own_value>"
interval: "11"
members:
-
seq-num: "13 (source system.virtual-wan-link.members.seq-num)"
name: "default_name_14"
packet-size: "15"
password: "<your_own_value>"
port: "17"
protocol: "ping"
recoverytime: "19"
security-mode: "none"
server: "192.168.100.40"
sla:
-
id: "23"
jitter-threshold: "24"
latency-threshold: "25"
link-cost-factor: "latency"
packetloss-threshold: "27"
threshold-alert-jitter: "28"
threshold-alert-latency: "29"
threshold-alert-packetloss: "30"
threshold-warning-jitter: "31"
threshold-warning-latency: "32"
threshold-warning-packetloss: "33"
update-cascade-interface: "enable"
update-static-route: "enable"
load-balance-mode: "source-ip-based"
members:
-
comment: "Comments."
gateway: "<your_own_value>"
gateway6: "<your_own_value>"
ingress-spillover-threshold: "41"
interface: "<your_own_value> (source system.interface.name)"
priority: "43"
seq-num: "44"
source: "<your_own_value>"
source6: "<your_own_value>"
spillover-threshold: "47"
status: "disable"
volume-ratio: "49"
weight: "50"
service:
-
addr-mode: "ipv4"
bandwidth-weight: "53"
dscp-forward: "enable"
dscp-forward-tag: "<your_own_value>"
dscp-reverse: "enable"
dscp-reverse-tag: "<your_own_value>"
dst:
-
name: "default_name_59 (source firewall.address.name firewall.addrgrp.name)"
dst-negate: "enable"
dst6:
-
name: "default_name_62 (source firewall.address6.name firewall.addrgrp6.name)"
end-port: "63"
gateway: "enable"
groups:
-
name: "default_name_66 (source user.group.name)"
health-check: "<your_own_value> (source system.virtual-wan-link.health-check.name)"
hold-down-time: "68"
id: "69"
input-device:
-
name: "default_name_71 (source system.interface.name)"
internet-service: "enable"
internet-service-ctrl:
-
id: "74"
internet-service-ctrl-group:
-
name: "default_name_76 (source application.group.name)"
internet-service-custom:
-
name: "default_name_78 (source firewall.internet-service-custom.name)"
internet-service-custom-group:
-
name: "default_name_80 (source firewall.internet-service-custom-group.name)"
internet-service-group:
-
name: "default_name_82 (source firewall.internet-service-group.name)"
internet-service-id:
-
id: "84 (source firewall.internet-service.id)"
jitter-weight: "85"
latency-weight: "86"
link-cost-factor: "latency"
link-cost-threshold: "88"
member: "89"
mode: "auto"
name: "default_name_91"
packet-loss-weight: "92"
priority-members:
-
seq-num: "94 (source system.virtual-wan-link.members.seq-num)"
protocol: "95"
quality-link: "96"
route-tag: "97"
sla:
-
health-check: "<your_own_value> (source system.virtual-wan-link.health-check.name)"
id: "100"
src:
-
name: "default_name_102 (source firewall.address.name firewall.addrgrp.name)"
src-negate: "enable"
src6:
-
name: "default_name_105 (source firewall.address6.name firewall.addrgrp6.name)"
start-port: "106"
status: "enable"
tos: "<your_own_value>"
tos-mask: "<your_own_value>"
users:
-
name: "default_name_111 (source user.local.name)"
status: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_system_virtual_wan_link_data(json):
option_list = ['fail-alert-interfaces', 'fail-detect', 'health-check',
'load-balance-mode', 'members', 'service',
'status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def system_virtual_wan_link(data, fos):
vdom = data['vdom']
system_virtual_wan_link_data = data['system_virtual_wan_link']
filtered_data = filter_system_virtual_wan_link_data(system_virtual_wan_link_data)
return fos.set('system',
'virtual-wan-link',
data=filtered_data,
vdom=vdom)
def fortios_system(data, fos):
login(data, fos)
if data['system_virtual_wan_link']:
resp = system_virtual_wan_link(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"system_virtual_wan_link": {
"required": False, "type": "dict",
"options": {
"fail-alert-interfaces": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"fail-detect": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"health-check": {"required": False, "type": "list",
"options": {
"addr-mode": {"required": False, "type": "str",
"choices": ["ipv4", "ipv6"]},
"failtime": {"required": False, "type": "int"},
"http-get": {"required": False, "type": "str"},
"http-match": {"required": False, "type": "str"},
"interval": {"required": False, "type": "int"},
"members": {"required": False, "type": "list",
"options": {
"seq-num": {"required": True, "type": "int"}
}},
"name": {"required": True, "type": "str"},
"packet-size": {"required": False, "type": "int"},
"password": {"required": False, "type": "str"},
"port": {"required": False, "type": "int"},
"protocol": {"required": False, "type": "str",
"choices": ["ping", "tcp-echo", "udp-echo",
"http", "twamp", "ping6"]},
"recoverytime": {"required": False, "type": "int"},
"security-mode": {"required": False, "type": "str",
"choices": ["none", "authentication"]},
"server": {"required": False, "type": "str"},
"sla": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"},
"jitter-threshold": {"required": False, "type": "int"},
"latency-threshold": {"required": False, "type": "int"},
"link-cost-factor": {"required": False, "type": "str",
"choices": ["latency", "jitter", "packet-loss"]},
"packetloss-threshold": {"required": False, "type": "int"}
}},
"threshold-alert-jitter": {"required": False, "type": "int"},
"threshold-alert-latency": {"required": False, "type": "int"},
"threshold-alert-packetloss": {"required": False, "type": "int"},
"threshold-warning-jitter": {"required": False, "type": "int"},
"threshold-warning-latency": {"required": False, "type": "int"},
"threshold-warning-packetloss": {"required": False, "type": "int"},
"update-cascade-interface": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"update-static-route": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"load-balance-mode": {"required": False, "type": "str",
"choices": ["source-ip-based", "weight-based", "usage-based",
"source-dest-ip-based", "measured-volume-based"]},
"members": {"required": False, "type": "list",
"options": {
"comment": {"required": False, "type": "str"},
"gateway": {"required": False, "type": "str"},
"gateway6": {"required": False, "type": "str"},
"ingress-spillover-threshold": {"required": False, "type": "int"},
"interface": {"required": False, "type": "str"},
"priority": {"required": False, "type": "int"},
"seq-num": {"required": True, "type": "int"},
"source": {"required": False, "type": "str"},
"source6": {"required": False, "type": "str"},
"spillover-threshold": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"volume-ratio": {"required": False, "type": "int"},
"weight": {"required": False, "type": "int"}
}},
"service": {"required": False, "type": "list",
"options": {
"addr-mode": {"required": False, "type": "str",
"choices": ["ipv4", "ipv6"]},
"bandwidth-weight": {"required": False, "type": "int"},
"dscp-forward": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-forward-tag": {"required": False, "type": "str"},
"dscp-reverse": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-reverse-tag": {"required": False, "type": "str"},
"dst": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"dst-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dst6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"end-port": {"required": False, "type": "int"},
"gateway": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"groups": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"health-check": {"required": False, "type": "str"},
"hold-down-time": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"input-device": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet-service": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"internet-service-ctrl": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"internet-service-ctrl-group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet-service-custom": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet-service-custom-group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet-service-group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet-service-id": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"jitter-weight": {"required": False, "type": "int"},
"latency-weight": {"required": False, "type": "int"},
"link-cost-factor": {"required": False, "type": "str",
"choices": ["latency", "jitter", "packet-loss",
"inbandwidth", "outbandwidth", "bibandwidth",
"custom-profile-1"]},
"link-cost-threshold": {"required": False, "type": "int"},
"member": {"required": False, "type": "int"},
"mode": {"required": False, "type": "str",
"choices": ["auto", "manual", "priority",
"sla"]},
"name": {"required": False, "type": "str"},
"packet-loss-weight": {"required": False, "type": "int"},
"priority-members": {"required": False, "type": "list",
"options": {
"seq-num": {"required": True, "type": "int"}
}},
"protocol": {"required": False, "type": "int"},
"quality-link": {"required": False, "type": "int"},
"route-tag": {"required": False, "type": "int"},
"sla": {"required": False, "type": "list",
"options": {
"health-check": {"required": True, "type": "str"},
"id": {"required": False, "type": "int"}
}},
"src": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"src-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"src6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"start-port": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tos": {"required": False, "type": "str"},
"tos-mask": {"required": False, "type": "str"},
"users": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}}
}},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
is_error, has_changed, result = fortios_system(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Si-elegans/Web-based_GUI_Tools | django_notify/__init__.py | 3 | 2425 | # -*- coding: utf-8 -*-
# This package and all its sub-packages are part of django_notify,
# except where otherwise stated.
#
# django_notify is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# django_notify is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with django_notify. If not, see <http://www.gnu.org/licenses/>.
# Unused feature, atm. everything is bundled with django-wiki
VERSION = "0.0.4"
from django.contrib.contenttypes.models import ContentType
from django.db.models import Model
from django.utils.translation import ugettext as _
import models
_disable_notifications = False
def notify(message, key, target_object=None, url=None, filter_exclude={}):
"""
Notify subscribing users of a new event. Key can be any kind of string,
just make sure to reuse it where applicable! Object_id is some identifier
of an object, for instance if a user subscribes to a specific comment thread,
you could write:
notify("there was a response to your comment", "comment_response",
target_object=PostersObject,
url=reverse('comments:view', args=(PostersObject.id,)))
The below example notifies everyone subscribing to the "new_comments" key
with the message "New comment posted".
notify("New comment posted", "new_comments")
filter_exclude: a dictionary to exclude special elements of subscriptions
in the queryset, for instance filter_exclude={''}
"""
if _disable_notifications:
return 0
if target_object:
if not isinstance(target_object, Model):
raise TypeError(_(u"You supplied a target_object that's not an instance of a django Model."))
object_id = target_object.id
else:
object_id = None
objects = models.Notification.create_notifications(
key,
object_id=object_id,
message=message,
url=url,
filter_exclude=filter_exclude,
)
return len(objects)
| apache-2.0 |
TeutoNet-Netzdienste/ansible | lib/ansible/utils/module_docs_fragments/rackspace.py | 66 | 4150 | # (c) 2014, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard Rackspace only documentation fragment
DOCUMENTATION = """
options:
api_key:
description:
- Rackspace API key (overrides I(credentials))
aliases:
- password
credentials:
description:
- File to find the Rackspace credentials in (ignored if I(api_key) and
I(username) are provided)
default: null
aliases:
- creds_file
env:
description:
- Environment as configured in ~/.pyrax.cfg,
see U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#pyrax-configuration)
version_added: 1.5
region:
description:
- Region to create an instance in
default: DFW
username:
description:
- Rackspace username (overrides I(credentials))
verify_ssl:
description:
- Whether or not to require SSL validation of API endpoints
version_added: 1.5
requirements:
- pyrax
notes:
- The following environment variables can be used, C(RAX_USERNAME),
C(RAX_API_KEY), C(RAX_CREDS_FILE), C(RAX_CREDENTIALS), C(RAX_REGION).
- C(RAX_CREDENTIALS) and C(RAX_CREDS_FILE) points to a credentials file
appropriate for pyrax. See U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#authenticating)
- C(RAX_USERNAME) and C(RAX_API_KEY) obviate the use of a credentials file
- C(RAX_REGION) defines a Rackspace Public Cloud region (DFW, ORD, LON, ...)
"""
# Documentation fragment including attributes to enable communication
# of other OpenStack clouds. Not all rax modules support this.
OPENSTACK = """
options:
api_key:
description:
- Rackspace API key (overrides I(credentials))
aliases:
- password
auth_endpoint:
description:
- The URI of the authentication service
default: https://identity.api.rackspacecloud.com/v2.0/
version_added: 1.5
credentials:
description:
- File to find the Rackspace credentials in (ignored if I(api_key) and
I(username) are provided)
default: null
aliases:
- creds_file
env:
description:
- Environment as configured in ~/.pyrax.cfg,
see U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#pyrax-configuration)
version_added: 1.5
identity_type:
description:
- Authentication machanism to use, such as rackspace or keystone
default: rackspace
version_added: 1.5
region:
description:
- Region to create an instance in
default: DFW
tenant_id:
description:
- The tenant ID used for authentication
version_added: 1.5
tenant_name:
description:
- The tenant name used for authentication
version_added: 1.5
username:
description:
- Rackspace username (overrides I(credentials))
verify_ssl:
description:
- Whether or not to require SSL validation of API endpoints
version_added: 1.5
requirements:
- pyrax
notes:
- The following environment variables can be used, C(RAX_USERNAME),
C(RAX_API_KEY), C(RAX_CREDS_FILE), C(RAX_CREDENTIALS), C(RAX_REGION).
- C(RAX_CREDENTIALS) and C(RAX_CREDS_FILE) points to a credentials file
appropriate for pyrax. See U(https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md#authenticating)
- C(RAX_USERNAME) and C(RAX_API_KEY) obviate the use of a credentials file
- C(RAX_REGION) defines a Rackspace Public Cloud region (DFW, ORD, LON, ...)
"""
| gpl-3.0 |
Marcusz97/CILP_Facilitatore_Audacity | lib-src/lv2/lv2/plugins/eg02-midigate.lv2/waflib/fixpy2.py | 332 | 1110 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os
all_modifs={}
def fixdir(dir):
global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir,'waflib'),k,v)
def modif(dir,name,fun):
if name=='*':
lst=[]
for y in'. Tools extras'.split():
for x in os.listdir(os.path.join(dir,y)):
if x.endswith('.py'):
lst.append(y+os.sep+x)
for x in lst:
modif(dir,x,fun)
return
filename=os.path.join(dir,name)
f=open(filename,'r')
try:
txt=f.read()
finally:
f.close()
txt=fun(txt)
f=open(filename,'w')
try:
f.write(txt)
finally:
f.close()
def subst(*k):
def do_subst(fun):
global all_modifs
for x in k:
try:
all_modifs[x].append(fun)
except KeyError:
all_modifs[x]=[fun]
return fun
return do_subst
@subst('*')
def r1(code):
code=code.replace(',e:',',e:')
code=code.replace("",'')
code=code.replace('','')
return code
@subst('Runner.py')
def r4(code):
code=code.replace('next(self.biter)','self.biter.next()')
return code
| gpl-2.0 |
jlabroquere/SU2 | SU2_PY/continuous_adjoint.py | 3 | 5109 | #!/usr/bin/env python
## \file continuous_adjoint.py
# \brief Python script for continuous adjoint computation using the SU2 suite.
# \author F. Palacios, T. Economon, T. Lukaczyk
# \version 4.0.1 "Cardinal"
#
# SU2 Lead Developers: Dr. Francisco Palacios (Francisco.D.Palacios@boeing.com).
# Dr. Thomas D. Economon (economon@stanford.edu).
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
#
# Copyright (C) 2012-2015 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, sys, shutil, copy
from optparse import OptionParser
sys.path.append(os.environ['SU2_RUN'])
import SU2
# -------------------------------------------------------------------
# Main
# -------------------------------------------------------------------
def main():
# Command Line Options
parser=OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE")
parser.add_option("-n", "--partitions", dest="partitions", default=1,
help="number of PARTITIONS", metavar="PARTITIONS")
parser.add_option("-c", "--compute", dest="compute", default="True",
help="COMPUTE direct and adjoint problem", metavar="COMPUTE")
parser.add_option("-s", "--step", dest="step", default=1E-4,
help="DOT finite difference STEP", metavar="STEP")
(options, args)=parser.parse_args()
options.partitions = int( options.partitions )
options.step = float( options.step )
options.compute = options.compute.upper() == 'TRUE'
continuous_adjoint( options.filename ,
options.partitions ,
options.compute ,
options.step )
#: def main()
# -------------------------------------------------------------------
# Continuous Adjoint
# -------------------------------------------------------------------
def continuous_adjoint( filename ,
partitions = 0 ,
compute = True ,
step = 1e-4 ):
# Config
config = SU2.io.Config(filename)
config.NUMBER_PART = partitions
# State
state = SU2.io.State()
# Force CSV output in order to compute gradients
config.WRT_CSV_SOL = 'YES'
# check for existing files
if not compute:
config.RESTART_SOL = 'YES'
state.find_files(config)
else:
state.FILES.MESH = config.MESH_FILENAME
# Direct Solution
if compute:
info = SU2.run.direct(config)
state.update(info)
SU2.io.restart2solution(config,state)
# Adjoint Solution
if compute:
info = SU2.run.adjoint(config)
state.update(info)
#SU2.io.restart2solution(config,state)
# Gradient Projection
info = SU2.run.projection(config,step)
state.update(info)
return state
#: continuous_adjoint()
# -------------------------------------------------------------------
# Alternate Forumulation
# -------------------------------------------------------------------
def continuous_design( filename ,
partitions = 0 ,
compute = True ,
step = 1e-4 ):
# TODO:
# step
# Config
config = SU2.io.Config(filename)
config.NUMBER_PART = partitions
ADJ_NAME = config.OBJECTIVE_FUNCTION
# State
state = SU2.io.State()
# check for existing files
if not compute:
state.find_files(config)
else:
state.FILES.MESH = config.MESH_FILENAME
# Adjoint Gradient
grads = SU2.eval.grad( ADJ_NAME, 'CONTINUOUS_ADJOINT', config, state )
return state
# -------------------------------------------------------------------
# Run Main Program
# -------------------------------------------------------------------
# this is only accessed if running from command prompt
if __name__ == '__main__':
main()
| lgpl-2.1 |
jtgans/squish | lib/squish/takecommand.py | 1 | 4364 | #!/usr/bin/env python
# -*- python -*-
#
# Copyright (C) 2008 Google, Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
Squish: The stupid bug tracker.
'''
import os
import sys
import hashlib
import glob
import optparse
import yaml
from . import progName
from command import Command
from registeredcommand import commands
import config
import bug
import worklog
import emailaddress
class TakeCommand(Command):
'''
Command to report a new bug.
'''
command_name = 'take'
synopsis = 'Assign a bug to yourself.'
usage = 'take [<options>] <bug-num-or-partial>'
def __init__(self):
Command.__init__(self)
def _setupOptParse(self):
self._parser.add_option('-w', '--add-worklog', dest='add_worklog',
action='store_true',
default=False,
help=('Add a worklog entry to the bug.'))
def runCommand(self):
if len(self._args) != 1:
sys.stderr.write('take requires a single bug name or partial.\n')
return 1
bugfiles = self.findBugsByNumOrPartial(self._args[0])
if len(bugfiles) > 1:
print 'Multiple bugs matched your query.'
print 'Please choose one from the below and retry your command.'
print
for bugnum in bugfiles:
print '\t%s' % os.path.basename(bugnum)
return 1
oldfilename = bugfiles[0]
bugnum = os.path.basename(oldfilename)
newfilename = '%s/%s/%s' % (self._siteDir, 'in-progress', bugnum)
try:
bugreport = bug.loadBugFromFile(oldfilename)
except OSError, e:
sys.stderr.write('Unable to load %s: %s\n' % (oldfilename, str(e)))
sys.exit(1)
except bug.BugValidationError, e:
sys.stderr.write('Bug %s is invalid or corrupt. Aborting.\n'
% oldfilename)
sys.exit(1)
if self._flags.add_worklog:
entry = worklog.Worklog()
entry.poster = self._userConfig.email
entry.description = 'Taking this bug.'
template = entry.generateReportTemplate()
report = self.spawnUserEditor(template, 'worklog.txt')
try:
entry.parseReportTemplate(report)
except worklog.WorklogValidationError, e:
sys.stderr.write(('Worklog report validation error: '
'%s\n' % str(e)))
sys.stderr.write('worklog.txt left behind\n')
sys.exit(1)
bugreport.worklog.append(entry)
bugreport.assignee = self._userConfig.email
try:
bug.dumpBugToFile(bugreport, newfilename)
except OSError, e:
sys.stderr.write('Unable to dump %s to %s: %s\n'
% (bugnum, newfilename, str(e)))
sys.stderr.write('Aborting.\n')
sys.exit(1)
try:
os.unlink(oldfilename)
if self._flags.add_worklog:
os.unlink('worklog.txt')
except OSError, e:
sys.stderr.write('Unable to unlink %s: %s\n' % (oldfilename, str(e)))
sys.stderr.write('Non-fatal error. Please remove %s before submitting.\n'
% oldfilename)
print('Bug %s assigned to %s and marked as in-progress.'
% (bugnum, self._userConfig.email))
return 0
def generateHelp(self):
formatters = {
'progname': progName,
'option_help': self._parser.format_help()
}
return '''
Usage: %(progname)s take [<options>] <bug-num-or-partial>
Assign a bug to yourself and move it to the in-progress state. By default,
assigning the bug to yourself moves it from whichever state it was in to
in-progress.
If bug-num-or-partial results in multiple bugs, a listing of bugs matched is
returned and no action is taken.
%(option_help)s''' % formatters
| gpl-2.0 |
unreal666/outwiker | src/outwiker/gui/editorstyleslist.py | 2 | 3753 | # -*- coding: utf-8 -*-
import wx
class EditorStylesList (wx.Panel):
"""
Класс контрола для редактирования стилей редактора
(цвет шрифта, жирность и т.п., цвет фона пока менять не будем)
"""
def __init__(self, parent):
super(EditorStylesList, self).__init__(parent)
self.__createGui()
self.__layout()
self.__bind()
self._styles = []
def __createGui(self):
self._stylesList = wx.ListBox(self, style=wx.LB_SINGLE)
self._stylesList.SetMinSize((150, -1))
self._colorPicker = wx.ColourPickerCtrl(self)
self._bold = wx.CheckBox(self, label=_(u"Bold"))
self._italic = wx.CheckBox(self, label=_(u"Italic"))
self._underline = wx.CheckBox(self, label=_(u"Underline"))
def __layout(self):
styleSizer = wx.FlexGridSizer(cols=1)
styleSizer.AddGrowableCol(0)
styleSizer.Add(self._colorPicker, flag=wx.ALL | wx.EXPAND, border=2)
styleSizer.Add(self._bold, flag=wx.ALL |
wx.ALIGN_CENTER_VERTICAL, border=2)
styleSizer.Add(self._italic, flag=wx.ALL |
wx.ALIGN_CENTER_VERTICAL, border=2)
styleSizer.Add(self._underline, flag=wx.ALL |
wx.ALIGN_CENTER_VERTICAL, border=2)
mainSizer = wx.FlexGridSizer(cols=2)
mainSizer.AddGrowableRow(0)
mainSizer.AddGrowableCol(0)
mainSizer.AddGrowableCol(1)
mainSizer.Add(self._stylesList, flag=wx.EXPAND | wx.ALL, border=2)
mainSizer.Add(styleSizer, flag=wx.EXPAND | wx.ALL, border=2)
self.SetSizer(mainSizer)
self.Layout()
def __bind(self):
self._stylesList.Bind(
wx.EVT_LISTBOX, self._onStyleSelect, self._stylesList)
self._colorPicker.Bind(wx.EVT_COLOURPICKER_CHANGED,
self._onStyleChanged, self._colorPicker)
self._bold.Bind(wx.EVT_CHECKBOX, self._onStyleChanged, self._bold)
self._italic.Bind(wx.EVT_CHECKBOX, self._onStyleChanged, self._italic)
self._underline.Bind(
wx.EVT_CHECKBOX, self._onStyleChanged, self._underline)
def _onStyleSelect(self, event):
self._updateSelection()
def _onStyleChanged(self, event):
index = self._stylesList.GetSelection()
if index >= 0:
self._styles[index].fore = self._colorPicker.GetColour(
).GetAsString(wx.C2S_HTML_SYNTAX)
self._styles[index].bold = self._bold.IsChecked()
self._styles[index].italic = self._italic.IsChecked()
self._styles[index].underline = self._underline.IsChecked()
def _updateSelection(self):
index = self._stylesList.GetSelection()
if index >= 0:
self._colorPicker.SetColour(self._styles[index].fore)
self._bold.SetValue(self._styles[index].bold)
self._italic.SetValue(self._styles[index].italic)
self._underline.SetValue(self._styles[index].underline)
def addStyle(self, title, style):
"""
Добавить стиль в список
title - название стиля
style - экземпляр класса StcStyle
"""
self._stylesList.Append(title)
self._styles.append(style)
if len(self._styles) == 1:
self._stylesList.SetSelection(0)
self._updateSelection()
def getStyle(self, index):
"""
Возвращает экземпляр класса StcStyle по номеру
"""
assert index >= 0
assert index < len(self._styles)
return self._styles[index]
| gpl-3.0 |
marcosmodesto/django-testapp | django/contrib/auth/context_processors.py | 57 | 1518 | # PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, module_name):
self.user, self.module_name = user, module_name
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __nonzero__(self):
return self.user.has_module_perms(self.module_name)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, module_name):
return PermLookupDict(self.user, module_name)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
| bsd-3-clause |
mikeschiano/SEVEN | node_modules/node-gyp/gyp/tools/pretty_gyp.py | 2618 | 4756 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Pretty-prints the contents of a GYP file."""
import sys
import re
# Regex to remove comments when we're counting braces.
COMMENT_RE = re.compile(r'\s*#.*')
# Regex to remove quoted strings when we're counting braces.
# It takes into account quoted quotes, and makes sure that the quotes match.
# NOTE: It does not handle quotes that span more than one line, or
# cases where an escaped quote is preceeded by an escaped backslash.
QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
QUOTE_RE = re.compile(QUOTE_RE_STR)
def comment_replace(matchobj):
return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
def mask_comments(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)(#)(.*)')
return [search_re.sub(comment_replace, line) for line in input]
def quote_replace(matchobj):
return "%s%s%s%s" % (matchobj.group(1),
matchobj.group(2),
'x'*len(matchobj.group(3)),
matchobj.group(2))
def mask_quotes(input):
"""Mask the quoted strings so we skip braces inside quoted strings."""
search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
return [search_re.sub(quote_replace, line) for line in input]
def do_split(input, masked_input, search_re):
output = []
mask_output = []
for (line, masked_line) in zip(input, masked_input):
m = search_re.match(masked_line)
while m:
split = len(m.group(1))
line = line[:split] + r'\n' + line[split:]
masked_line = masked_line[:split] + r'\n' + masked_line[split:]
m = search_re.match(masked_line)
output.extend(line.split(r'\n'))
mask_output.extend(masked_line.split(r'\n'))
return (output, mask_output)
def split_double_braces(input):
"""Masks out the quotes and comments, and then splits appropriate
lines (lines that matche the double_*_brace re's above) before
indenting them below.
These are used to split lines which have multiple braces on them, so
that the indentation looks prettier when all laid out (e.g. closing
braces make a nice diagonal line).
"""
double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
masked_input = mask_quotes(input)
masked_input = mask_comments(masked_input)
(output, mask_output) = do_split(input, masked_input, double_open_brace_re)
(output, mask_output) = do_split(output, mask_output, double_close_brace_re)
return output
def count_braces(line):
"""keeps track of the number of braces on a given line and returns the result.
It starts at zero and subtracts for closed braces, and adds for open braces.
"""
open_braces = ['[', '(', '{']
close_braces = [']', ')', '}']
closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
cnt = 0
stripline = COMMENT_RE.sub(r'', line)
stripline = QUOTE_RE.sub(r"''", stripline)
for char in stripline:
for brace in open_braces:
if char == brace:
cnt += 1
for brace in close_braces:
if char == brace:
cnt -= 1
after = False
if cnt > 0:
after = True
# This catches the special case of a closing brace having something
# other than just whitespace ahead of it -- we don't want to
# unindent that until after this line is printed so it stays with
# the previous indentation level.
if cnt < 0 and closing_prefix_re.match(stripline):
after = True
return (cnt, after)
def prettyprint_input(lines):
"""Does the main work of indenting the input based on the brace counts."""
indent = 0
basic_offset = 2
last_line = ""
for line in lines:
if COMMENT_RE.match(line):
print line
else:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
else:
print " " * (basic_offset * indent) + line
else:
print ""
last_line = line
def main():
if len(sys.argv) > 1:
data = open(sys.argv[1]).read().splitlines()
else:
data = sys.stdin.read().splitlines()
# Split up the double braces.
lines = split_double_braces(data)
# Indent and print the output.
prettyprint_input(lines)
return 0
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
ericgriffin/metasort | lib/boost/tools/build/v2/test/tag.py | 44 | 3338 | #!/usr/bin/python
# Copyright (C) 2003. Pedro Ferreira
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
###############################################################################
#
# test_folder_with_dot_in_name()
# ------------------------------
#
###############################################################################
def test_folder_with_dot_in_name(t):
"""
Regression test: the 'tag' feature did not work in directories that had a
dot in their name.
"""
t.write("version-1.32.0/jamroot.jam", """\
project test : requirements <tag>@$(__name__).tag ;
rule tag ( name : type ? : property-set )
{
# Do nothing, just make sure the rule is invoked OK.
ECHO The tag rule has been invoked. ;
}
exe a : a.cpp ;
""")
t.write("version-1.32.0/a.cpp", "int main() {}\n")
t.run_build_system(subdir="version-1.32.0")
t.expect_addition("version-1.32.0/bin/$toolset/debug/a.exe")
t.expect_output_lines("The tag rule has been invoked.")
###############################################################################
#
# test_tag_property()
# -------------------
#
###############################################################################
def test_tag_property(t):
"""Basic tag property test."""
t.write("jamroot.jam", """\
import virtual-target ;
rule tag ( name : type ? : property-set )
{
local tags ;
switch [ $(property-set).get <variant> ]
{
case debug : tags += d ;
case release : tags += r ;
}
switch [ $(property-set).get <link> ]
{
case shared : tags += s ;
case static : tags += t ;
}
if $(tags)
{
return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
: $(type) : $(property-set) ] ;
}
}
# Test both fully-qualified and local name of the rule
exe a : a.cpp : <tag>@$(__name__).tag ;
lib b : a.cpp : <tag>@tag ;
stage c : a ;
""")
t.write("a.cpp", """\
int main() {}
#ifdef _MSC_VER
__declspec (dllexport) void x () {}
#endif
""")
file_list = (
BoostBuild.List("bin/$toolset/debug/a_ds.exe") +
BoostBuild.List("bin/$toolset/debug/b_ds.dll") +
BoostBuild.List("c/a_ds.exe") +
BoostBuild.List("bin/$toolset/release/a_rs.exe") +
BoostBuild.List("bin/$toolset/release/b_rs.dll") +
BoostBuild.List("c/a_rs.exe") +
BoostBuild.List("bin/$toolset/debug/link-static/a_dt.exe") +
BoostBuild.List("bin/$toolset/debug/link-static/b_dt.lib") +
BoostBuild.List("c/a_dt.exe") +
BoostBuild.List("bin/$toolset/release/link-static/a_rt.exe") +
BoostBuild.List("bin/$toolset/release/link-static/b_rt.lib") +
BoostBuild.List("c/a_rt.exe"))
variants = ["debug", "release", "link=static,shared"]
t.run_build_system(variants)
t.expect_addition(file_list)
t.run_build_system(variants + ["clean"])
t.expect_removal(file_list)
###############################################################################
#
# main()
# ------
#
###############################################################################
t = BoostBuild.Tester(use_test_config=False)
test_tag_property(t)
test_folder_with_dot_in_name(t)
t.cleanup()
| apache-2.0 |
Edraak/edraak-platform | common/lib/xmodule/xmodule/tests/xml/__init__.py | 14 | 2479 | """
Xml parsing tests for XModules
"""
import pprint
from django.test import TestCase
from lxml import etree
from mock import Mock
from six import text_type
from xmodule.x_module import XMLParsingSystem, policy_key
from xmodule.mako_module import MakoDescriptorSystem
from xmodule.modulestore.xml import CourseLocationManager
from opaque_keys.edx.keys import CourseKey
from xblock.runtime import KvsFieldData, DictKeyValueStore
class InMemorySystem(XMLParsingSystem, MakoDescriptorSystem): # pylint: disable=abstract-method
"""
The simplest possible XMLParsingSystem
"""
def __init__(self, xml_import_data):
self.course_id = CourseKey.from_string(xml_import_data.course_id)
self.default_class = xml_import_data.default_class
self._descriptors = {}
def get_policy(usage_id):
"""Return the policy data for the specified usage"""
return xml_import_data.policy.get(policy_key(usage_id), {})
super(InMemorySystem, self).__init__(
get_policy=get_policy,
process_xml=self.process_xml,
load_item=self.load_item,
error_tracker=Mock(),
resources_fs=xml_import_data.filesystem,
mixins=xml_import_data.xblock_mixins,
select=xml_import_data.xblock_select,
render_template=lambda template, context: pprint.pformat((template, context)),
field_data=KvsFieldData(DictKeyValueStore()),
)
def process_xml(self, xml): # pylint: disable=method-hidden
"""Parse `xml` as an XBlock, and add it to `self._descriptors`"""
self.get_asides = Mock(return_value=[])
descriptor = self.xblock_from_node(
etree.fromstring(xml),
None,
CourseLocationManager(self.course_id),
)
self._descriptors[text_type(descriptor.location)] = descriptor
return descriptor
def load_item(self, location, for_parent=None): # pylint: disable=method-hidden, unused-argument
"""Return the descriptor loaded for `location`"""
return self._descriptors[text_type(location)]
class XModuleXmlImportTest(TestCase):
"""Base class for tests that use basic XML parsing"""
@classmethod
def process_xml(cls, xml_import_data):
"""Use the `xml_import_data` to import an :class:`XBlock` from XML."""
system = InMemorySystem(xml_import_data)
return system.process_xml(xml_import_data.xml_string)
| agpl-3.0 |
Sodki/ansible | lib/ansible/modules/utilities/logic/wait_for.py | 21 | 21884 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: wait_for
short_description: Waits for a condition before continuing.
description:
- You can wait for a set amount of time C(timeout), this is the default if nothing is specified.
- Waiting for a port to become available is useful for when services
are not immediately available after their init scripts return
which is true of certain Java application servers. It is also
useful when starting guests with the M(virt) module and
needing to pause until they are ready.
- This module can also be used to wait for a regex match a string to be present in a file.
- In 1.6 and later, this module can also be used to wait for a file to be available or
absent on the filesystem.
- In 1.8 and later, this module can also be used to wait for active
connections to be closed before continuing, useful if a node
is being rotated out of a load balancer pool.
version_added: "0.7"
options:
host:
description:
- A resolvable hostname or IP address to wait for
required: false
default: "127.0.0.1"
timeout:
description:
- maximum number of seconds to wait for
required: false
default: 300
connect_timeout:
description:
- maximum number of seconds to wait for a connection to happen before closing and retrying
required: false
default: 5
delay:
description:
- number of seconds to wait before starting to poll
required: false
default: 0
port:
description:
- port number to poll
required: false
default: null
active_connection_states:
description:
- The list of tcp connection states which are counted as active connections
default: ['ESTABLISHED','SYN_SENT','SYN_RECV','FIN_WAIT1','FIN_WAIT2','TIME_WAIT']
version_added: "2.3"
state:
description:
- either C(present), C(started), or C(stopped), C(absent), or C(drained)
- When checking a port C(started) will ensure the port is open, C(stopped) will check that it is closed, C(drained) will check for active connections
- When checking for a file or a search string C(present) or C(started) will ensure that the file or string is present before continuing,
C(absent) will check that file is absent or removed
choices: [ "present", "started", "stopped", "absent", "drained" ]
required: False
default: "started"
path:
version_added: "1.4"
required: false
default: null
description:
- path to a file on the filesytem that must exist before continuing
search_regex:
version_added: "1.4"
required: false
default: null
description:
- Can be used to match a string in either a file or a socket connection. Defaults to a multiline regex.
exclude_hosts:
version_added: "1.8"
required: false
default: null
description:
- list of hosts or IPs to ignore when looking for active TCP connections for C(drained) state
sleep:
version_added: "2.3"
required: false
default: 1
description:
- Number of seconds to sleep between checks, before 2.3 this was hardcoded to 1 second.
notes:
- The ability to use search_regex with a port connection was added in 1.7.
requirements: []
author:
- "Jeroen Hoekx (@jhoekx)"
- "John Jarvis (@jarv)"
- "Andrii Radyk (@AnderEnder)"
'''
EXAMPLES = '''
# wait 300 seconds for port 8000 to become open on the host, don't start checking for 10 seconds
- wait_for:
port: 8000
delay: 10
# wait 300 seconds for port 8000 of any IP to close active connections, don't start checking for 10 seconds
- wait_for:
host: 0.0.0.0
port: 8000
delay: 10
state: drained
# wait 300 seconds for port 8000 of any IP to close active connections, ignoring connections for specified hosts
- wait_for:
host: 0.0.0.0
port: 8000
state: drained
exclude_hosts: 10.2.1.2,10.2.1.3
# wait until the file /tmp/foo is present before continuing
- wait_for:
path: /tmp/foo
# wait until the string "completed" is in the file /tmp/foo before continuing
- wait_for:
path: /tmp/foo
search_regex: completed
# wait until the lock file is removed
- wait_for:
path: /var/lock/file.lock
state: absent
# wait until the process is finished and pid was destroyed
- wait_for:
path: /proc/3466/status
state: absent
# wait 300 seconds for port 22 to become open and contain "OpenSSH", don't assume the inventory_hostname is resolvable
# and don't start checking for 10 seconds
- local_action: wait_for port=22 host="{{ ansible_ssh_host | default(inventory_hostname) }}" search_regex=OpenSSH delay=10
'''
import binascii
import datetime
import math
import re
import select
import socket
import sys
import time
from ansible.module_utils._text import to_native
HAS_PSUTIL = False
try:
import psutil
HAS_PSUTIL = True
# just because we can import it on Linux doesn't mean we will use it
except ImportError:
pass
class TCPConnectionInfo(object):
"""
This is a generic TCP Connection Info strategy class that relies
on the psutil module, which is not ideal for targets, but necessary
for cross platform support.
A subclass may wish to override some or all of these methods.
- _get_exclude_ips()
- get_active_connections()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
match_all_ips = {
socket.AF_INET: '0.0.0.0',
socket.AF_INET6: '::',
}
ipv4_mapped_ipv6_address = {
'prefix': '::ffff',
'match_all': '::ffff:0.0.0.0'
}
def __new__(cls, *args, **kwargs):
return load_platform_subclass(TCPConnectionInfo, args, kwargs)
def __init__(self, module):
self.module = module
self.ips = _convert_host_to_ip(module.params['host'])
self.port = int(self.module.params['port'])
self.exclude_ips = self._get_exclude_ips()
if not HAS_PSUTIL:
module.fail_json(msg="psutil module required for wait_for")
def _get_exclude_ips(self):
exclude_hosts = self.module.params['exclude_hosts']
exclude_ips = []
if exclude_hosts is not None:
for host in exclude_hosts:
exclude_ips.extend(_convert_host_to_ip(host))
return exclude_ips
def get_active_connections_count(self):
active_connections = 0
for p in psutil.process_iter():
connections = p.get_connections(kind='inet')
for conn in connections:
if conn.status not in self.module.params['active_connection_states']:
continue
(local_ip, local_port) = conn.local_address
if self.port != local_port:
continue
(remote_ip, remote_port) = conn.remote_address
if (conn.family, remote_ip) in self.exclude_ips:
continue
if any((
(conn.family, local_ip) in self.ips,
(conn.family, self.match_all_ips[conn.family]) in self.ips,
local_ip.startswith(self.ipv4_mapped_ipv6_address['prefix']) and
(conn.family, self.ipv4_mapped_ipv6_address['match_all']) in self.ips,
)):
active_connections += 1
return active_connections
# ===========================================
# Subclass: Linux
class LinuxTCPConnectionInfo(TCPConnectionInfo):
"""
This is a TCP Connection Info evaluation strategy class
that utilizes information from Linux's procfs. While less universal,
does allow Linux targets to not require an additional library.
"""
platform = 'Linux'
distribution = None
source_file = {
socket.AF_INET: '/proc/net/tcp',
socket.AF_INET6: '/proc/net/tcp6'
}
match_all_ips = {
socket.AF_INET: '00000000',
socket.AF_INET6: '00000000000000000000000000000000',
}
ipv4_mapped_ipv6_address = {
'prefix': '0000000000000000FFFF0000',
'match_all': '0000000000000000FFFF000000000000'
}
local_address_field = 1
remote_address_field = 2
connection_state_field = 3
def __init__(self, module):
self.module = module
self.ips = _convert_host_to_hex(module.params['host'])
self.port = "%0.4X" % int(module.params['port'])
self.exclude_ips = self._get_exclude_ips()
def _get_exclude_ips(self):
exclude_hosts = self.module.params['exclude_hosts']
exclude_ips = []
if exclude_hosts is not None:
for host in exclude_hosts:
exclude_ips.extend(_convert_host_to_hex(host))
return exclude_ips
def get_active_connections_count(self):
active_connections = 0
for family in self.source_file.keys():
f = open(self.source_file[family])
for tcp_connection in f.readlines():
tcp_connection = tcp_connection.strip().split()
if tcp_connection[self.local_address_field] == 'local_address':
continue
if (tcp_connection[self.connection_state_field] not in
[get_connection_state_id(_connection_state) for _connection_state in self.module.params['active_connection_states']]):
continue
(local_ip, local_port) = tcp_connection[self.local_address_field].split(':')
if self.port != local_port:
continue
(remote_ip, remote_port) = tcp_connection[self.remote_address_field].split(':')
if (family, remote_ip) in self.exclude_ips:
continue
if any((
(family, local_ip) in self.ips,
(family, self.match_all_ips[family]) in self.ips,
local_ip.startswith(self.ipv4_mapped_ipv6_address['prefix']) and
(family, self.ipv4_mapped_ipv6_address['match_all']) in self.ips,
)):
active_connections += 1
f.close()
return active_connections
def _convert_host_to_ip(host):
"""
Perform forward DNS resolution on host, IP will give the same IP
Args:
host: String with either hostname, IPv4, or IPv6 address
Returns:
List of tuples containing address family and IP
"""
addrinfo = socket.getaddrinfo(host, 80, 0, 0, socket.SOL_TCP)
ips = []
for family, socktype, proto, canonname, sockaddr in addrinfo:
ip = sockaddr[0]
ips.append((family, ip))
if family == socket.AF_INET:
ips.append((socket.AF_INET6, "::ffff:" + ip))
return ips
def _convert_host_to_hex(host):
"""
Convert the provided host to the format in /proc/net/tcp*
/proc/net/tcp uses little-endian four byte hex for ipv4
/proc/net/tcp6 uses little-endian per 4B word for ipv6
Args:
host: String with either hostname, IPv4, or IPv6 address
Returns:
List of tuples containing address family and the
little-endian converted host
"""
ips = []
if host is not None:
for family, ip in _convert_host_to_ip(host):
hexip_nf = binascii.b2a_hex(socket.inet_pton(family, ip))
hexip_hf = ""
for i in range(0, len(hexip_nf), 8):
ipgroup_nf = hexip_nf[i:i+8]
ipgroup_hf = socket.ntohl(int(ipgroup_nf, base=16))
hexip_hf = "%s%08X" % (hexip_hf, ipgroup_hf)
ips.append((family, hexip_hf))
return ips
def _create_connection(host, port, connect_timeout):
"""
Connect to a 2-tuple (host, port) and return
the socket object.
Args:
2-tuple (host, port) and connection timeout
Returns:
Socket object
"""
if sys.version_info < (2, 6):
(family, _) = (_convert_host_to_ip(host))[0]
connect_socket = socket.socket(family, socket.SOCK_STREAM)
connect_socket.settimeout(connect_timeout)
connect_socket.connect( (host, port) )
else:
connect_socket = socket.create_connection( (host, port), connect_timeout)
return connect_socket
def _timedelta_total_seconds(timedelta):
return (
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
def get_connection_state_id(state):
connection_state_id = {
'ESTABLISHED': '01',
'SYN_SENT': '02',
'SYN_RECV': '03',
'FIN_WAIT1': '04',
'FIN_WAIT2': '05',
'TIME_WAIT': '06',
}
return connection_state_id[state]
def main():
module = AnsibleModule(
argument_spec = dict(
host=dict(default='127.0.0.1'),
timeout=dict(default=300, type='int'),
connect_timeout=dict(default=5, type='int'),
delay=dict(default=0, type='int'),
port=dict(default=None, type='int'),
active_connection_states=dict(default=['ESTABLISHED','SYN_SENT','SYN_RECV','FIN_WAIT1','FIN_WAIT2','TIME_WAIT'], type='list'),
path=dict(default=None, type='path'),
search_regex=dict(default=None),
state=dict(default='started', choices=['started', 'stopped', 'present', 'absent', 'drained']),
exclude_hosts=dict(default=None, type='list'),
sleep=dict(default=1, type='int')
),
)
params = module.params
host = params['host']
timeout = params['timeout']
connect_timeout = params['connect_timeout']
delay = params['delay']
port = params['port']
state = params['state']
path = params['path']
search_regex = params['search_regex']
if search_regex is not None:
compiled_search_re = re.compile(search_regex, re.MULTILINE)
else:
compiled_search_re = None
if port and path:
module.fail_json(msg="port and path parameter can not both be passed to wait_for")
if path and state == 'stopped':
module.fail_json(msg="state=stopped should only be used for checking a port in the wait_for module")
if path and state == 'drained':
module.fail_json(msg="state=drained should only be used for checking a port in the wait_for module")
if params['exclude_hosts'] is not None and state != 'drained':
module.fail_json(msg="exclude_hosts should only be with state=drained")
for _connection_state in params['active_connection_states']:
try:
get_connection_state_id(_connection_state)
except:
module.fail_json(msg="unknown active_connection_state ("+_connection_state+") defined")
start = datetime.datetime.now()
if delay:
time.sleep(delay)
if not port and not path and state != 'drained':
time.sleep(timeout)
elif state in [ 'stopped', 'absent' ]:
### first wait for the stop condition
end = start + datetime.timedelta(seconds=timeout)
while datetime.datetime.now() < end:
if path:
try:
f = open(path)
f.close()
except IOError:
break
elif port:
try:
s = _create_connection(host, port, connect_timeout)
s.shutdown(socket.SHUT_RDWR)
s.close()
except:
break
# Conditions not yet met, wait and try again
time.sleep(params['sleep'])
else:
elapsed = datetime.datetime.now() - start
if port:
module.fail_json(msg="Timeout when waiting for %s:%s to stop." % (host, port), elapsed=elapsed.seconds)
elif path:
module.fail_json(msg="Timeout when waiting for %s to be absent." % (path), elapsed=elapsed.seconds)
elif state in ['started', 'present']:
### wait for start condition
end = start + datetime.timedelta(seconds=timeout)
while datetime.datetime.now() < end:
if path:
try:
os.stat(path)
except OSError:
e = get_exception()
# If anything except file not present, throw an error
if e.errno != 2:
elapsed = datetime.datetime.now() - start
module.fail_json(msg="Failed to stat %s, %s" % (path, e.strerror), elapsed=elapsed.seconds)
# file doesn't exist yet, so continue
else:
# File exists. Are there additional things to check?
if not compiled_search_re:
# nope, succeed!
break
try:
f = open(path)
try:
if re.search(compiled_search_re, f.read()):
# String found, success!
break
finally:
f.close()
except IOError:
pass
elif port:
alt_connect_timeout = math.ceil(_timedelta_total_seconds(end - datetime.datetime.now()))
try:
s = _create_connection(host, port, min(connect_timeout, alt_connect_timeout))
except:
# Failed to connect by connect_timeout. wait and try again
pass
else:
# Connected -- are there additional conditions?
if compiled_search_re:
data = ''
matched = False
while datetime.datetime.now() < end:
max_timeout = math.ceil(_timedelta_total_seconds(end - datetime.datetime.now()))
(readable, w, e) = select.select([s], [], [], max_timeout)
if not readable:
# No new data. Probably means our timeout
# expired
continue
response = s.recv(1024)
if not response:
# Server shutdown
break
data += to_native(response, errors='surrogate_or_strict')
if re.search(compiled_search_re, data):
matched = True
break
# Shutdown the client socket
s.shutdown(socket.SHUT_RDWR)
s.close()
if matched:
# Found our string, success!
break
else:
# Connection established, success!
s.shutdown(socket.SHUT_RDWR)
s.close()
break
# Conditions not yet met, wait and try again
time.sleep(params['sleep'])
else: # while-else
# Timeout expired
elapsed = datetime.datetime.now() - start
if port:
if search_regex:
module.fail_json(msg="Timeout when waiting for search string %s in %s:%s" % (search_regex, host, port), elapsed=elapsed.seconds)
else:
module.fail_json(msg="Timeout when waiting for %s:%s" % (host, port), elapsed=elapsed.seconds)
elif path:
if search_regex:
module.fail_json(msg="Timeout when waiting for search string %s in %s" % (search_regex, path), elapsed=elapsed.seconds)
else:
module.fail_json(msg="Timeout when waiting for file %s" % (path), elapsed=elapsed.seconds)
elif state == 'drained':
### wait until all active connections are gone
end = start + datetime.timedelta(seconds=timeout)
tcpconns = TCPConnectionInfo(module)
while datetime.datetime.now() < end:
try:
if tcpconns.get_active_connections_count() == 0:
break
except IOError:
pass
# Conditions not yet met, wait and try again
time.sleep(params['sleep'])
else:
elapsed = datetime.datetime.now() - start
module.fail_json(msg="Timeout when waiting for %s:%s to drain" % (host, port), elapsed=elapsed.seconds)
elapsed = datetime.datetime.now() - start
module.exit_json(state=state, port=port, search_regex=search_regex, path=path, elapsed=elapsed.seconds)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
eqcorrscan/EQcorrscan | eqcorrscan/__init__.py | 1 | 3103 | #!/usr/bin/python
"""
:copyright:
EQcorrscan developers.
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
import importlib
import sys
import warnings
from eqcorrscan.core.match_filter.party import Party # NOQA
from eqcorrscan.core.match_filter.family import Family # NOQA
from eqcorrscan.core.match_filter.detection import Detection # NOQA
from eqcorrscan.core.match_filter.tribe import Tribe # NOQA
from eqcorrscan.core.match_filter.template import Template # NOQA
from eqcorrscan.core.subspace import Detector, read_detector # NOQA
from eqcorrscan.core.lag_calc import lag_calc # NOQA
from eqcorrscan.utils.correlate import ( # NOQA
get_stream_xcorr, get_array_xcorr, register_array_xcorr)
__all__ = ['core', 'utils', 'tutorials', 'tests']
__version__ = '0.4.3'
# Cope with changes to name-space to remove most of the camel-case
_import_map = {}
class EQcorrscanDeprecationWarning(UserWarning):
"""
Force pop-up of warnings.
"""
pass
if sys.version_info.major < 3:
raise NotImplementedError(
"EQcorrscan no longer supports Python 2.x"
" See https://github.com/eqcorrscan/EQcorrscan/issues/242 to read "
"more.")
class EQcorrscanRestructureAndLoad(object):
"""
Path finder and module loader for transitioning
"""
def find_module(self, fullname, path=None):
# Compatibility with namespace paths.
if hasattr(path, "_path"):
path = path._path
if not path or not path[0].startswith(__path__[0]):
return None
for key in _import_map.keys():
if fullname.startswith(key):
break
else:
return None
return self
def load_module(self, name):
# Use cached modules.
if name in sys.modules:
return sys.modules[name]
# Otherwise check if the name is part of the import map.
elif name in _import_map:
new_name = _import_map[name]
else:
new_name = name
for old, new in _import_map.items():
if not new_name.startswith(old):
continue
new_name = new_name.replace(old, new)
break
else:
return None
# Don't load again if already loaded.
if new_name in sys.modules:
module = sys.modules[new_name]
else:
module = importlib.import_module(new_name)
# Warn here as at this point the module has already been imported.
warnings.warn("Module '%s' is deprecated and will stop working "
"with the next EQcorrscan version. Please import module "
"'%s' instead." % (name, new_name),
EQcorrscanDeprecationWarning)
sys.modules[new_name] = module
sys.modules[name] = module
return module
sys.meta_path.append(EQcorrscanRestructureAndLoad())
if __name__ == '__main__':
import doctest
doctest.testmod(exclude_empty=True)
| gpl-3.0 |
chemelnucfin/tensorflow | tensorflow/python/tpu/_tpu_estimator_embedding.py | 28 | 1025 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Stub file to maintain backwards compatibility."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import,unused-import
from tensorflow_estimator.python.estimator.tpu._tpu_estimator_embedding import *
# pylint: enable=wildcard-import,unused-import
| apache-2.0 |
plablo09/geo_context | roc_curve.py | 1 | 3990 | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
from sklearn import preprocessing
from sklearn.decomposition import PCA as sklearnPCA
from sklearn import svm
from sklearn.metrics import roc_curve, auc
from sklearn.cross_validation import StratifiedKFold
import matplotlib.pyplot as plt
from scipy import interp
#set random state for camparability
random_state = np.random.RandomState(0)
#this function performs stratified k-folds and plots roc curves
def plot_roc(predictor, target):
cv = StratifiedKFold(target, n_folds=6)
classifier = svm.SVC(probability=True,random_state=random_state)
mean_tpr = 0.0
mean_fpr = np.linspace(0, 1, 100)
#all_tpr = []
for i, (train, test) in enumerate(cv):
probas_ = classifier.fit(predictor[train],
target[train]).predict_proba(predictor[test])
# Compute ROC curve and area the curve
fpr, tpr, thresholds = roc_curve(target[test],
probas_[:, 1],pos_label=3.0)
mean_tpr += interp(mean_fpr, fpr, tpr)
mean_tpr[0] = 0.0
roc_auc = auc(fpr, tpr)
plt.plot(fpr, tpr, lw=1, label='ROC fold %d (area = %0.2f)' % (i, roc_auc))
plt.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6), label='Luck')
mean_tpr /= len(cv)
mean_tpr[-1] = 1.0
mean_auc = auc(mean_fpr, mean_tpr)
plt.plot(mean_fpr, mean_tpr, 'k--',
label='Mean ROC (area = %0.2f)' % mean_auc, lw=2)
plt.xlim([-0.05, 1.05])
plt.ylim([-0.05, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic geo_context')
plt.legend(loc="lower right")
plt.show()
#read data
context = pd.read_csv('context_nuevo.csv')
#select variable columns
cols_select = context.columns[6:]
variables = context.ix[:,cols_select]
for c in ['no_se','uname','cont','lat','lon','geom','cve_mza']:
del variables[c]
#reclass intervalo as numerical
def intervalo_to_numbers(x):
equiv = {'sun':0,'mon':1,'tue':2,'wed':3,'thu':4,'fri':5,'sat':6,'sun':7}
interval = 0.16666*int(x.split('.')[1])
day = x.split('.')[0]
valor = equiv[day] + interval
return valor
reclass = variables['intervalo'].apply(intervalo_to_numbers)
#drop old 'intervalo' column and replace it with numerical values
del variables['intervalo']
variables = variables.join(reclass,how='inner')
#Get dataframe as matrix and scale it:
data = variables.as_matrix()
Y = data[:,0]
X = data[:,1:]
scaled_X = preprocessing.scale(X)
#Perform PCA analysis
pca = sklearnPCA(n_components=0.80,whiten=True)
pca_transform = pca.fit_transform(scaled_X)
pca_transform.shape
#Stratified k-fold
#Get only positive and negative classes, first with original data
X_bin, Y_bin = scaled_X[Y != 2], Y[Y != 2]
#Same with PCA reduced data:
#data = variables.as_matrix()
#Y_pca = pca_transform[:,0]
#X_pca = pca_transform[:,1:]
#X_pca_bin, Y_pca_bin = X_pca[Y != 2], Y[Y != 2]
#cv_pca = StratifiedKFold(Y_pca_bin, n_folds=6)
#for i, (train, test) in enumerate(cv_pca):
# probas_ = classifier.fit(X_pca_bin[train], Y_bin[train]).predict_proba(X_pca_bin[test])
# # Compute ROC curve and area the curve
# fpr, tpr, thresholds = roc_curve(Y_bin[test], probas_[:, 1],pos_label=3.0)
# mean_tpr += interp(mean_fpr, fpr, tpr)
# mean_tpr[0] = 0.0
# roc_auc = auc(fpr, tpr)
# plt.plot(fpr, tpr, lw=1, label='ROC fold %d (area = %0.2f)' % (i, roc_auc))
#
#plt.plot([0, 1], [0, 1], '--', color=(0.6, 0.6, 0.6), label='Luck')
#
#mean_tpr /= len(cv_pca)
#mean_tpr[-1] = 1.0
#mean_auc = auc(mean_fpr, mean_tpr)
#plt.plot(mean_fpr, mean_tpr, 'k--',
# label='Mean ROC (area = %0.2f)' % mean_auc, lw=2)
#
#plt.xlim([-0.05, 1.05])
#plt.ylim([-0.05, 1.05])
#plt.xlabel('False Positive Rate')
#plt.ylabel('True Positive Rate')
#plt.title('Receiver operating characteristic geo_context')
#plt.legend(loc="lower right")
#plt.show() | apache-2.0 |
webmasterraj/GaSiProMo | flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py | 3133 | 34872 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
| gpl-2.0 |
james-antill/yum | test/transactiontests.py | 11 | 7012 | from yum.constants import *
import unittest
import settestpath
from testbase import *
from yum.transactioninfo import TransactionData
class TransactionDataTests(unittest.TestCase):
''' Test cases for yum.transactioninfo.TransactionData'''
def setUp(self):
self.tsInfo = TransactionData()
self.rpmdb = packageSack.PackageSack()
self.pkgSack = packageSack.PackageSack()
self.tsInfo.setDatabases(self.rpmdb, self.pkgSack)
self.foo1 = FakePackage('foo', '1', '0', '0', 'noarch')
self.foo2 = FakePackage('foo', '2', '0', '0', 'i386')
self.bar1 = FakePackage('bar', '1', '0', '0', 'i386')
self.bar2 = FakePackage('bar', '2', '0', '0', 'noarch')
self.foogui1 = FakePackage('foogui', '1', '0', '0', 'x86_64')
self.foogui2 = FakePackage('foogui', '2', '0', '0', 'noarch')
def testLenght(self):
''' test __len__ method '''
self.tsInfo.addInstall(self.foo1)
self.tsInfo.addUpdate(self.foogui2,self.foogui1)
self.dumpTsInfo()
self.assertEqual(len(self.tsInfo),3)
def testAddTheSameTwice(self):
''' test add the same twice '''
txmbr1 = self.tsInfo.addInstall(self.foo1)
txmbr2 = self.tsInfo.addInstall(self.foo2)
txmbr3 = self.tsInfo.addInstall(self.foo1)
self.dumpTsInfo()
self.assertEqual(len(self.tsInfo),2) # only 2 members
# self.assertEquals(txmbr3, txmbr1) # 1 & 3 should be equal
def testExists(self):
''' test exists method '''
self.tsInfo.addInstall(self.foo1)
self.tsInfo.addInstall(self.bar1)
self.dumpTsInfo()
self.assertEqual(self.tsInfo.exists(self.foo1.pkgtup),1)
self.assertEqual(self.tsInfo.exists(self.bar1.pkgtup),1)
self.assertEqual(self.tsInfo.exists(self.foogui1.pkgtup),0)
def testRemove(self):
''' test remove from transaction '''
txmbr = self.tsInfo.addInstall(self.foo1)
self.tsInfo.addInstall(self.bar2)
self.tsInfo.remove(self.bar2.pkgtup)
self.dumpTsInfo()
self.assertResult([txmbr])
def testAddToTransaction(self):
''' test adding basic adding to Transaction '''
txmbr1 = self.tsInfo.addInstall(self.foo1)
txmbr2 = self.tsInfo.addUpdate(self.foogui2,self.foogui1)
txmbr3 = self.tsInfo.getMembers(self.foogui1.pkgtup)[0]
self.dumpTsInfo()
self.assertResult([txmbr1,txmbr2,txmbr3])
def testGetFromTransaction(self):
''' test getting from Transaction '''
self.tsInfo.addInstall(self.foo2)
self.tsInfo.addObsoleting(self.bar2,self.bar1)
self.tsInfo.addUpdate(self.foogui2,self.foogui1)
self.tsInfo.addErase(self.foo1)
self.dumpTsInfo()
# get install member foo-2.0 - u
txmbr = self.tsInfo.getMembers(self.foo2.pkgtup)[0]
self.assertEqual(txmbr.po, self.foo2)
self.assertEqual(txmbr.current_state, TS_AVAILABLE)
self.assertEqual(txmbr.output_state, TS_INSTALL)
self.assertEqual(txmbr.po.state, TS_INSTALL)
self.assertEqual(txmbr.ts_state, 'u')
# get erase member foo-1.0 - e
txmbr = self.tsInfo.getMembers(self.foo1.pkgtup)[0]
self.assertEqual(txmbr.po, self.foo1)
self.assertEqual(txmbr.current_state, TS_INSTALL)
self.assertEqual(txmbr.output_state, TS_ERASE)
self.assertEqual(txmbr.po.state, TS_INSTALL)
self.assertEqual(txmbr.ts_state, 'e')
# get Obsoleting
txmbr = self.tsInfo.getMembers(self.bar2.pkgtup)[0]
self.assertEqual(txmbr.po, self.bar2)
self.assertEqual(txmbr.current_state, TS_AVAILABLE)
self.assertEqual(txmbr.output_state, TS_OBSOLETING)
self.assertEqual(txmbr.po.state, TS_OBSOLETING)
self.assertEqual(txmbr.ts_state, 'u')
self.assertEqual(txmbr.relatedto, [(self.bar1, 'obsoletes')])
self.assertEqual(txmbr.obsoletes, [self.bar1])
# get update member
txmbr = self.tsInfo.getMembers(self.foogui2.pkgtup)[0]
self.assertEqual(txmbr.po, self.foogui2)
self.assertEqual(txmbr.current_state, TS_AVAILABLE)
self.assertEqual(txmbr.output_state, TS_UPDATE)
self.assertEqual(txmbr.po.state, TS_UPDATE)
self.assertEqual(txmbr.ts_state, 'u')
self.assertEqual(txmbr.relatedto, [(self.foogui1, 'updates')])
self.assertEqual(txmbr.updates, [self.foogui1])
def testAddUpdatesAndObsoletes(self):
''' test addUpdated,addObsoleted'''
txmbr1 = self.tsInfo.addUpdated(self.foo1,self.foo2)
txmbr2 = self.tsInfo.addObsoleted(self.bar1,self.bar2)
self.dumpTsInfo()
self.assertResult([txmbr1,txmbr2])
txmbr = self.tsInfo.getMembersWithState(output_states=[TS_UPDATED])[0]
self.assertEqual(txmbr.po, self.foo1)
txmbr = self.tsInfo.getMembersWithState(output_states=[TS_OBSOLETED])[0]
self.assertEqual(txmbr.po, self.bar1)
def testMatchNaevr(self):
''' test MatchNaevr '''
self.tsInfo.addInstall(self.foo1)
self.tsInfo.addObsoleting(self.bar2,self.bar1)
self.tsInfo.addUpdate(self.foogui2,self.foogui1)
self.dumpTsInfo()
res = self.tsInfo.matchNaevr( name='foogui')
self.assertEqual(len(res),2) # foogui-1.0, foogui-2.0
res = self.tsInfo.matchNaevr( arch='noarch')
self.assertEqual(len(res),3) # foo-1.0, bar-2.0, foogui-2.0
res = self.tsInfo.matchNaevr( epoch='0',ver='1', rel='0')
self.assertEqual(len(res),2) # foo-1.0, foogui-1.0
def testgetMembersWithState(self):
''' test getMembersWithState'''
self.tsInfo.addInstall(self.foo1)
self.tsInfo.addObsoleting(self.bar2,self.bar1)
self.tsInfo.addUpdate(self.foogui2,self.foogui1)
self.dumpTsInfo()
res = self.tsInfo.getMembersWithState(output_states=[TS_INSTALL,TS_UPDATE])
self.assertEqual(len(res),2) # foo-1.0, bar-2.0
res = self.tsInfo.getMembersWithState(output_states=[TS_UPDATED])
self.assertEqual(len(res),1) # bar-1.0
def assertResult(self, txmbrs):
"""Check if self.tsInfo contains the given txmbr.
"""
errors = ["Problems with members in txInfo \n\n"]
txmbrs = set(txmbrs)
found = set(self.tsInfo.getMembers())
# Look for needed members
for txmbr in txmbrs:
if not self.tsInfo.exists(txmbr.po.pkgtup):
errors.append(" %s was not found in tsInfo!\n" % txmbr)
for txmbr in found - txmbrs:
errors.append("%s should not be in tsInfo\n" % txmbr)
if len(errors) > 1:
errors.append("\nTest case was:\n\n")
errors.extend(inspect.getsource(inspect.stack()[1][0].f_code))
errors.append("\n")
self.fail("".join(errors))
def dumpTsInfo(self):
for txmbr in self.tsInfo:
print txmbr
| gpl-2.0 |
Chilledheart/vbox | src/VBox/Additions/common/crOpenGL/Linux_i386_exports.py | 22 | 2622 | # Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
import apiutil
def GenerateEntrypoints():
#apiutil.CopyrightC()
# Get sorted list of dispatched functions.
# The order is very important - it must match cr_opcodes.h
# and spu_dispatch_table.h
print '%include "iprt/asmdefs.mac"'
print ""
print "%ifdef RT_ARCH_AMD64"
print "extern glim"
print "%else ; X86"
print "extern glim"
print "%endif"
print ""
keys = apiutil.GetDispatchedFunctions(sys.argv[1]+"/APIspec.txt")
for index in range(len(keys)):
func_name = keys[index]
if apiutil.Category(func_name) == "Chromium":
continue
if apiutil.Category(func_name) == "VBox":
continue
print "BEGINPROC_EXPORTED gl%s" % func_name
print "%ifdef RT_ARCH_AMD64"
print "\tmov \trax, qword glim+%d" % (8*index)
print "\tjmp \t[rax]"
print "%else ; X86"
print "\tmov \teax, dword glim+%d" % (4*index)
print "\tjmp \t[eax]"
print "%endif"
print "ENDPROC gl%s" % func_name
print ""
print ';'
print '; Aliases'
print ';'
# Now loop over all the functions and take care of any aliases
allkeys = apiutil.GetAllFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in allkeys:
if "omit" in apiutil.ChromiumProps(func_name):
continue
if func_name in keys:
# we already processed this function earlier
continue
# alias is the function we're aliasing
alias = apiutil.Alias(func_name)
if alias:
# this dict lookup should never fail (raise an exception)!
index = keys.index(alias)
print "BEGINPROC_EXPORTED gl%s" % func_name
print "%ifdef RT_ARCH_AMD64"
print "\tmov \trax, qword glim+%d" % (8*index)
print "\tjmp \t[rax]"
print "%else ; X86"
print "\tmov \teax, dword glim+%d" % (4*index)
print "\tjmp \t[eax]"
print "%endif"
print "ENDPROC gl%s" % func_name
print ""
print ';'
print '; No-op stubs'
print ';'
# Now generate no-op stub functions
for func_name in allkeys:
if "stub" in apiutil.ChromiumProps(func_name):
print "BEGINPROC_EXPORTED gl%s" % func_name
print "\tleave"
print "\tret"
print "ENDPROC gl%s" % func_name
print ""
GenerateEntrypoints()
| gpl-2.0 |
qmarlats/pyquizz | env-3/lib/python3.5/site-packages/sphinx/writers/latex.py | 2 | 77270 | # -*- coding: utf-8 -*-
"""
sphinx.writers.latex
~~~~~~~~~~~~~~~~~~~~
Custom docutils writer for LaTeX.
Much of this code is adapted from Dave Kuhlman's "docpy" writer from his
docutils sandbox.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
from os import path
import warnings
from six import itervalues, text_type
from docutils import nodes, writers
from docutils.writers.latex2e import Babel
from sphinx import addnodes
from sphinx import highlighting
from sphinx.errors import SphinxError
from sphinx.locale import admonitionlabels, _
from sphinx.util import split_into
from sphinx.util.i18n import format_date
from sphinx.util.nodes import clean_astext, traverse_parent
from sphinx.util.texescape import tex_escape_map, tex_replace_map
from sphinx.util.smartypants import educate_quotes_latex
HEADER = r'''%% Generated by Sphinx.
\def\sphinxdocclass{%(docclass)s}
\documentclass[%(papersize)s,%(pointsize)s%(classoptions)s]{%(wrapperclass)s}
%(passoptionstopackages)s
%(inputenc)s
%(utf8extra)s
%(cmappkg)s
%(fontenc)s
%(amsmath)s
%(babel)s
%(fontpkg)s
%(fncychap)s
%(longtable)s
\usepackage{sphinx}
\usepackage{multirow}
\usepackage{eqparbox}
%(usepackages)s
%(contentsname)s
%(numfig_format)s
%(pageautorefname)s
%(tocdepth)s
%(preamble)s
\title{%(title)s}
\date{%(date)s}
\release{%(release)s}
\author{%(author)s}
\newcommand{\sphinxlogo}{%(logo)s}
\renewcommand{\releasename}{%(releasename)s}
%(makeindex)s
'''
BEGIN_DOC = r'''
\begin{document}
%(shorthandoff)s
%(maketitle)s
%(tableofcontents)s
'''
FOOTER = r'''
\renewcommand{\indexname}{%(indexname)s}
%(printindex)s
\end{document}
'''
URI_SCHEMES = ('mailto:', 'http:', 'https:', 'ftp:')
class collected_footnote(nodes.footnote):
"""Footnotes that are collected are assigned this class."""
class UnsupportedError(SphinxError):
category = 'Markup is unsupported in LaTeX'
class LaTeXWriter(writers.Writer):
supported = ('sphinxlatex',)
settings_spec = ('LaTeX writer options', '', (
('Document name', ['--docname'], {'default': ''}),
('Document class', ['--docclass'], {'default': 'manual'}),
('Author', ['--author'], {'default': ''}),
))
settings_defaults = {}
output = None
def __init__(self, builder):
writers.Writer.__init__(self)
self.builder = builder
self.translator_class = (
self.builder.translator_class or LaTeXTranslator)
def translate(self):
transform = ShowUrlsTransform(self.document)
transform.apply()
visitor = self.translator_class(self.document, self.builder)
self.document.walkabout(visitor)
self.output = visitor.astext()
# Helper classes
class ExtBabel(Babel):
def __init__(self, language_code):
super(ExtBabel, self).__init__(language_code or '')
self.language_code = language_code
def get_shorthandoff(self):
shortlang = self.language.split('_')[0]
if shortlang in ('de', 'ngerman', 'sl', 'slovene', 'pt', 'portuges',
'es', 'spanish', 'nl', 'dutch', 'pl', 'polish', 'it',
'italian'):
return '\\shorthandoff{"}'
return ''
def uses_cyrillic(self):
shortlang = self.language.split('_')[0]
return shortlang in ('bg', 'bulgarian', 'kk', 'kazakh',
'mn', 'mongolian', 'ru', 'russian',
'uk', 'ukrainian')
def is_supported_language(self):
return bool(super(ExtBabel, self).get_language())
def get_language(self):
language = super(ExtBabel, self).get_language()
if not language:
return 'english' # fallback to english
else:
return language
class ShowUrlsTransform(object):
expanded = False
def __init__(self, document):
self.document = document
def apply(self):
# replace id_prefix temporarily
id_prefix = self.document.settings.id_prefix
self.document.settings.id_prefix = 'show_urls'
self.expand_show_urls()
if self.expanded:
self.renumber_footnotes()
# restore id_prefix
self.document.settings.id_prefix = id_prefix
def expand_show_urls(self):
show_urls = self.document.settings.env.config.latex_show_urls
if show_urls is False or show_urls == 'no':
return
for node in self.document.traverse(nodes.reference):
uri = node.get('refuri', '')
if uri.startswith(URI_SCHEMES):
if uri.startswith('mailto:'):
uri = uri[7:]
if node.astext() != uri:
index = node.parent.index(node)
if show_urls == 'footnote':
if list(traverse_parent(node, nodes.topic)):
# should not expand references in topics
pass
else:
footnote_nodes = self.create_footnote(uri)
for i, fn in enumerate(footnote_nodes):
node.parent.insert(index + i + 1, fn)
self.expanded = True
else: # all other true values (b/w compat)
textnode = nodes.Text(" (%s)" % uri)
node.parent.insert(index + 1, textnode)
def create_footnote(self, uri):
label = nodes.label('', '#')
para = nodes.paragraph()
para.append(nodes.Text(uri))
footnote = nodes.footnote(uri, label, para, auto=1)
footnote['names'].append('#')
self.document.note_autofootnote(footnote)
label = nodes.Text('#')
footnote_ref = nodes.footnote_reference('[#]_', label, auto=1,
refid=footnote['ids'][0])
self.document.note_autofootnote_ref(footnote_ref)
footnote.add_backref(footnote_ref['ids'][0])
return [footnote, footnote_ref]
def renumber_footnotes(self):
def is_used_number(number):
for node in self.document.traverse(nodes.footnote):
if not node.get('auto') and number in node['names']:
return True
return False
def is_auto_footnote(node):
return isinstance(node, nodes.footnote) and node.get('auto')
def footnote_ref_by(node):
ids = node['ids']
parent = list(traverse_parent(node, (nodes.document, addnodes.start_of_file)))[0]
def is_footnote_ref(node):
return (isinstance(node, nodes.footnote_reference) and
ids[0] == node['refid'] and
parent in list(traverse_parent(node)))
return is_footnote_ref
startnum = 1
for footnote in self.document.traverse(is_auto_footnote):
while True:
label = str(startnum)
startnum += 1
if not is_used_number(label):
break
old_label = footnote[0].astext()
footnote.remove(footnote[0])
footnote.insert(0, nodes.label('', label))
if old_label in footnote['names']:
footnote['names'].remove(old_label)
footnote['names'].append(label)
for footnote_ref in self.document.traverse(footnote_ref_by(footnote)):
footnote_ref.remove(footnote_ref[0])
footnote_ref += nodes.Text(label)
class Table(object):
def __init__(self):
self.col = 0
self.colcount = 0
self.colspec = None
self.rowcount = 0
self.had_head = False
self.has_problematic = False
self.has_verbatim = False
self.caption = None
self.longtable = False
class LaTeXTranslator(nodes.NodeVisitor):
sectionnames = ["part", "chapter", "section", "subsection",
"subsubsection", "paragraph", "subparagraph"]
ignore_missing_images = False
default_elements = {
'papersize': 'letterpaper',
'pointsize': '10pt',
'classoptions': '',
'extraclassoptions': '',
'passoptionstopackages': '',
'inputenc': '\\usepackage[utf8]{inputenc}',
'utf8extra': ('\\ifdefined\\DeclareUnicodeCharacter\n'
' \\DeclareUnicodeCharacter{00A0}{\\nobreakspace}\n'
'\\else\\fi'),
'cmappkg': '\\usepackage{cmap}',
'fontenc': '\\usepackage[T1]{fontenc}',
'amsmath': '\\usepackage{amsmath,amssymb}',
'babel': '\\usepackage{babel}',
'fontpkg': '\\usepackage{times}',
'fncychap': '\\usepackage[Bjarne]{fncychap}',
'longtable': '\\usepackage{longtable}',
'usepackages': '',
'numfig_format': '',
'contentsname': '',
'preamble': '',
'title': '',
'date': '',
'release': '',
'author': '',
'logo': '',
'releasename': 'Release',
'makeindex': '\\makeindex',
'shorthandoff': '',
'maketitle': '\\maketitle',
'tableofcontents': '\\tableofcontents',
'footer': '',
'printindex': '\\printindex',
'transition': '\n\n\\bigskip\\hrule{}\\bigskip\n\n',
'figure_align': 'htbp',
'tocdepth': '',
'pageautorefname': '',
}
# sphinx specific document classes
docclasses = ('howto', 'manual')
def __init__(self, document, builder):
nodes.NodeVisitor.__init__(self, document)
self.builder = builder
self.body = []
# flags
self.in_title = 0
self.in_production_list = 0
self.in_footnote = 0
self.in_caption = 0
self.in_container_literal_block = 0
self.in_term = 0
self.in_merged_cell = 0
self.in_minipage = 0
self.first_document = 1
self.this_is_the_title = 1
self.literal_whitespace = 0
self.no_contractions = 0
self.compact_list = 0
self.first_param = 0
self.remember_multirow = {}
self.remember_multirowcol = {}
# determine top section level
if builder.config.latex_toplevel_sectioning:
self.top_sectionlevel = \
self.sectionnames.index(builder.config.latex_toplevel_sectioning)
else:
if document.settings.docclass == 'howto':
self.top_sectionlevel = 2
else:
if builder.config.latex_use_parts:
self.top_sectionlevel = 0
else:
self.top_sectionlevel = 1
# sort out some elements
papersize = builder.config.latex_paper_size + 'paper'
if papersize == 'paper': # e.g. command line "-D latex_paper_size="
papersize = 'letterpaper'
self.elements = self.default_elements.copy()
self.elements.update({
'wrapperclass': self.format_docclass(document.settings.docclass),
'papersize': papersize,
'pointsize': builder.config.latex_font_size,
# if empty, the title is set to the first section title
'title': document.settings.title,
'release': builder.config.release,
'author': document.settings.author,
'releasename': _('Release'),
'preamble': builder.config.latex_preamble,
'indexname': _('Index'),
})
if document.settings.docclass == 'howto':
docclass = builder.config.latex_docclass.get('howto', 'article')
else:
docclass = builder.config.latex_docclass.get('manual', 'report')
self.elements['docclass'] = docclass
if builder.config.today:
self.elements['date'] = builder.config.today
else:
self.elements['date'] = format_date(builder.config.today_fmt or _('%b %d, %Y'),
language=builder.config.language)
if builder.config.latex_logo:
self.elements['logo'] = '\\includegraphics{%s}\\par' % \
path.basename(builder.config.latex_logo)
# setup babel
self.babel = ExtBabel(builder.config.language)
self.elements['classoptions'] += ',' + self.babel.get_language()
if builder.config.language:
if not self.babel.is_supported_language():
self.builder.warn('no Babel option known for language %r' %
builder.config.language)
self.elements['shorthandoff'] = self.babel.get_shorthandoff()
self.elements['fncychap'] = '\\usepackage[Sonny]{fncychap}'
# Times fonts don't work with Cyrillic languages
if self.babel.uses_cyrillic():
self.elements['fontpkg'] = ''
# pTeX (Japanese TeX) for support
if builder.config.language == 'ja':
# use dvipdfmx as default class option in Japanese
self.elements['classoptions'] = ',dvipdfmx'
# disable babel which has not publishing quality in Japanese
self.elements['babel'] = ''
# disable fncychap in Japanese documents
self.elements['fncychap'] = ''
if getattr(builder, 'usepackages', None):
def declare_package(packagename, options=None):
if options:
return '\\usepackage[%s]{%s}' % (options, packagename)
else:
return '\\usepackage{%s}' % (packagename,)
usepackages = (declare_package(*p) for p in builder.usepackages)
self.elements['usepackages'] += "\n".join(usepackages)
# allow the user to override them all
self.elements.update(builder.config.latex_elements)
if self.elements['extraclassoptions']:
self.elements['classoptions'] += ',' + \
self.elements['extraclassoptions']
if document.get('tocdepth'):
# redece tocdepth if `part` or `chapter` is used for top_sectionlevel
# tocdepth = -1: show only parts
# tocdepth = 0: show parts and chapters
# tocdepth = 1: show parts, chapters and sections
# tocdepth = 2: show parts, chapters, sections and subsections
# ...
self.elements['tocdepth'] = ('\\setcounter{tocdepth}{%d}' %
(document['tocdepth'] + self.top_sectionlevel - 2))
if getattr(document.settings, 'contentsname', None):
self.elements['contentsname'] = \
self.babel_renewcommand('\\contentsname', document.settings.contentsname)
self.elements['pageautorefname'] = \
self.babel_defmacro('\\pageautorefname', self.encode(_('page')))
self.elements['numfig_format'] = self.generate_numfig_format(builder)
self.highlighter = highlighting.PygmentsBridge(
'latex',
builder.config.pygments_style, builder.config.trim_doctest_flags)
self.context = []
self.descstack = []
self.bibitems = []
self.table = None
self.next_table_colspec = None
# stack of [language, linenothreshold] settings per file
# the first item here is the default and must not be changed
# the second item is the default for the master file and can be changed
# by .. highlight:: directive in the master file
self.hlsettingstack = 2 * [[builder.config.highlight_language,
sys.maxsize]]
self.bodystack = []
self.footnotestack = []
self.footnote_restricted = False
self.pending_footnotes = []
self.curfilestack = []
self.handled_abbrs = set()
self.next_hyperlink_ids = {}
self.next_section_ids = set()
def pushbody(self, newbody):
self.bodystack.append(self.body)
self.body = newbody
def popbody(self):
body = self.body
self.body = self.bodystack.pop()
return body
def push_hyperlink_ids(self, figtype, ids):
hyperlink_ids = self.next_hyperlink_ids.setdefault(figtype, set())
hyperlink_ids.update(ids)
def pop_hyperlink_ids(self, figtype):
return self.next_hyperlink_ids.pop(figtype, set())
def restrict_footnote(self, node):
if self.footnote_restricted is False:
self.footnote_restricted = node
self.pending_footnotes = []
def unrestrict_footnote(self, node):
if self.footnote_restricted == node:
self.footnote_restricted = False
for footnode in self.pending_footnotes:
footnode['footnotetext'] = True
footnode.walkabout(self)
self.pending_footnotes = []
def format_docclass(self, docclass):
""" prepends prefix to sphinx document classes
"""
if docclass in self.docclasses:
docclass = 'sphinx' + docclass
return docclass
def astext(self):
return (HEADER % self.elements +
self.highlighter.get_stylesheet() +
u''.join(self.body) +
'\n' + self.elements['footer'] + '\n' +
self.generate_indices() +
FOOTER % self.elements)
def hypertarget(self, id, withdoc=True, anchor=True):
if withdoc:
id = self.curfilestack[-1] + ':' + id
return (anchor and '\\phantomsection' or '') + \
'\\label{%s}' % self.idescape(id)
def hyperlink(self, id):
return '{\\hyperref[%s]{' % self.hyperrefescape(id)
def hyperpageref(self, id):
return '\\autopageref*{%s}' % self.idescape(id)
def idescape(self, id):
return text_type(id).translate(tex_replace_map).\
encode('ascii', 'backslashreplace').decode('ascii').\
replace('\\', '_')
def hyperrefescape(self, ref):
return self.idescape(ref).replace('-', '\\string-')
def babel_renewcommand(self, command, definition):
if self.elements['babel']:
prefix = '\\addto\\captions%s{' % self.babel.get_language()
suffix = '}'
else: # babel is disabled (mainly for Japanese environment)
prefix = ''
suffix = ''
return ('%s\\renewcommand{%s}{%s}%s\n' % (prefix, command, definition, suffix))
def babel_defmacro(self, name, definition):
if self.elements['babel']:
prefix = '\\addto\\extras%s{' % self.babel.get_language()
suffix = '}'
else: # babel is disabled (mainly for Japanese environment)
prefix = ''
suffix = ''
return ('%s\\def%s{%s}%s\n' % (prefix, name, definition, suffix))
def generate_numfig_format(self, builder):
ret = []
figure = self.builder.config.numfig_format['figure'].split('%s', 1)
if len(figure) == 1:
ret.append('\\def\\fnum@figure{%s}\n' %
text_type(figure[0]).translate(tex_escape_map))
else:
definition = text_type(figure[0]).translate(tex_escape_map)
ret.append(self.babel_renewcommand('\\figurename', definition))
if figure[1]:
ret.append('\\makeatletter\n')
ret.append('\\def\\fnum@figure{\\figurename\\thefigure%s}\n' %
text_type(figure[1]).translate(tex_escape_map))
ret.append('\\makeatother\n')
table = self.builder.config.numfig_format['table'].split('%s', 1)
if len(table) == 1:
ret.append('\\def\\fnum@table{%s}\n' %
text_type(table[0]).translate(tex_escape_map))
else:
definition = text_type(table[0]).translate(tex_escape_map)
ret.append(self.babel_renewcommand('\\tablename', definition))
if table[1]:
ret.append('\\makeatletter\n')
ret.append('\\def\\fnum@table{\\tablename\\thetable%s}\n' %
text_type(table[1]).translate(tex_escape_map))
ret.append('\\makeatother\n')
codeblock = self.builder.config.numfig_format['code-block'].split('%s', 1)
if len(codeblock) == 1:
pass # FIXME
else:
ret.append('\\SetupFloatingEnvironment{literal-block}{name=%s}\n' %
text_type(codeblock[0]).translate(tex_escape_map))
if table[1]:
pass # FIXME
return ''.join(ret)
def generate_indices(self):
def generate(content, collapsed):
ret.append('\\begin{theindex}\n')
ret.append('\\def\\bigletter#1{{\\Large\\sffamily#1}'
'\\nopagebreak\\vspace{1mm}}\n')
for i, (letter, entries) in enumerate(content):
if i > 0:
ret.append('\\indexspace\n')
ret.append('\\bigletter{%s}\n' %
text_type(letter).translate(tex_escape_map))
for entry in entries:
if not entry[3]:
continue
ret.append('\\item {\\texttt{%s}}' % self.encode(entry[0]))
if entry[4]:
# add "extra" info
ret.append(' \\emph{(%s)}' % self.encode(entry[4]))
ret.append(', \\pageref{%s:%s}\n' %
(entry[2], self.idescape(entry[3])))
ret.append('\\end{theindex}\n')
ret = []
# latex_domain_indices can be False/True or a list of index names
indices_config = self.builder.config.latex_domain_indices
if indices_config:
for domain in itervalues(self.builder.env.domains):
for indexcls in domain.indices:
indexname = '%s-%s' % (domain.name, indexcls.name)
if isinstance(indices_config, list):
if indexname not in indices_config:
continue
# deprecated config value
if indexname == 'py-modindex' and \
not self.builder.config.latex_use_modindex:
continue
content, collapsed = indexcls(domain).generate(
self.builder.docnames)
if not content:
continue
ret.append(u'\\renewcommand{\\indexname}{%s}\n' %
indexcls.localname)
generate(content, collapsed)
return ''.join(ret)
def visit_document(self, node):
self.footnotestack.append(self.collect_footnotes(node))
self.curfilestack.append(node.get('docname', ''))
if self.first_document == 1:
# the first document is all the regular content ...
self.body.append(BEGIN_DOC % self.elements)
self.first_document = 0
elif self.first_document == 0:
# ... and all others are the appendices
self.body.append(u'\n\\appendix\n')
self.first_document = -1
if 'docname' in node:
self.body.append(self.hypertarget(':doc'))
# "- 1" because the level is increased before the title is visited
self.sectionlevel = self.top_sectionlevel - 1
def depart_document(self, node):
if self.bibitems:
widest_label = ""
for bi in self.bibitems:
if len(widest_label) < len(bi[0]):
widest_label = bi[0]
self.body.append(u'\n\\begin{thebibliography}{%s}\n' % widest_label)
for bi in self.bibitems:
target = self.hypertarget(bi[2] + ':' + bi[3],
withdoc=False)
self.body.append(u'\\bibitem[%s]{%s}{%s %s}\n' %
(self.encode(bi[0]), self.idescape(bi[0]),
target, bi[1]))
self.body.append(u'\\end{thebibliography}\n')
self.bibitems = []
def visit_start_of_file(self, node):
# collect new footnotes
self.footnotestack.append(self.collect_footnotes(node))
# also add a document target
self.next_section_ids.add(':doc')
self.curfilestack.append(node['docname'])
# use default highlight settings for new file
self.hlsettingstack.append(self.hlsettingstack[0])
def collect_footnotes(self, node):
def footnotes_under(n):
if isinstance(n, nodes.footnote):
yield n
else:
for c in n.children:
if isinstance(c, addnodes.start_of_file):
continue
for k in footnotes_under(c):
yield k
fnotes = {}
for fn in footnotes_under(node):
num = fn.children[0].astext().strip()
newnode = collected_footnote(*fn.children, number=num)
fnotes[num] = [newnode, False]
return fnotes
def depart_start_of_file(self, node):
self.footnotestack.pop()
self.curfilestack.pop()
self.hlsettingstack.pop()
def visit_highlightlang(self, node):
self.hlsettingstack[-1] = [node['lang'], node['linenothreshold']]
raise nodes.SkipNode
def visit_section(self, node):
if not self.this_is_the_title:
self.sectionlevel += 1
self.body.append('\n\n')
if node.get('ids'):
self.next_section_ids.update(node['ids'])
def depart_section(self, node):
self.sectionlevel = max(self.sectionlevel - 1,
self.top_sectionlevel - 1)
def visit_problematic(self, node):
self.body.append(r'{\color{red}\bfseries{}')
def depart_problematic(self, node):
self.body.append('}')
def visit_topic(self, node):
self.in_minipage = 1
self.body.append('\\setbox0\\vbox{\n'
'\\begin{minipage}{0.95\\linewidth}\n')
def depart_topic(self, node):
self.in_minipage = 0
self.body.append('\\end{minipage}}\n'
'\\begin{center}\\setlength{\\fboxsep}{5pt}'
'\\shadowbox{\\box0}\\end{center}\n')
visit_sidebar = visit_topic
depart_sidebar = depart_topic
def visit_glossary(self, node):
pass
def depart_glossary(self, node):
pass
def visit_productionlist(self, node):
self.body.append('\n\n\\begin{productionlist}\n')
self.in_production_list = 1
def depart_productionlist(self, node):
self.body.append('\\end{productionlist}\n\n')
self.in_production_list = 0
def visit_production(self, node):
if node['tokenname']:
tn = node['tokenname']
self.body.append(self.hypertarget('grammar-token-' + tn))
self.body.append('\\production{%s}{' % self.encode(tn))
else:
self.body.append('\\productioncont{')
def depart_production(self, node):
self.body.append('}\n')
def visit_transition(self, node):
self.body.append(self.elements['transition'])
def depart_transition(self, node):
pass
def visit_title(self, node):
parent = node.parent
if isinstance(parent, addnodes.seealso):
# the environment already handles this
raise nodes.SkipNode
elif self.this_is_the_title:
if len(node.children) != 1 and not isinstance(node.children[0],
nodes.Text):
self.builder.warn('document title is not a single Text node',
(self.curfilestack[-1], node.line))
if not self.elements['title']:
# text needs to be escaped since it is inserted into
# the output literally
self.elements['title'] = node.astext().translate(tex_escape_map)
self.this_is_the_title = 0
raise nodes.SkipNode
elif isinstance(parent, nodes.section):
short = ''
if node.traverse(nodes.image):
short = '[%s]' % ' '.join(clean_astext(node).split()).translate(tex_escape_map)
try:
self.body.append(r'\%s%s{' % (self.sectionnames[self.sectionlevel], short))
except IndexError:
# just use "subparagraph", it's not numbered anyway
self.body.append(r'\%s%s{' % (self.sectionnames[-1], short))
self.context.append('}\n')
self.restrict_footnote(node)
if self.next_section_ids:
for id in self.next_section_ids:
self.context[-1] += self.hypertarget(id, anchor=False)
self.next_section_ids.clear()
elif isinstance(parent, (nodes.topic, nodes.sidebar)):
self.body.append(r'\textbf{')
self.context.append('}\n\n\medskip\n\n')
elif isinstance(parent, nodes.Admonition):
self.body.append('{')
self.context.append('}\n')
elif isinstance(parent, nodes.table):
# Redirect body output until title is finished.
self.pushbody([])
else:
self.builder.warn(
'encountered title node not in section, topic, table, '
'admonition or sidebar',
(self.curfilestack[-1], node.line or ''))
self.body.append('\\textbf{')
self.context.append('}\n')
self.in_title = 1
def depart_title(self, node):
self.in_title = 0
if isinstance(node.parent, nodes.table):
self.table.caption = self.popbody()
else:
self.body.append(self.context.pop())
self.unrestrict_footnote(node)
def visit_subtitle(self, node):
if isinstance(node.parent, nodes.sidebar):
self.body.append('~\\\\\n\\textbf{')
self.context.append('}\n\\smallskip\n')
else:
self.context.append('')
def depart_subtitle(self, node):
self.body.append(self.context.pop())
def visit_desc(self, node):
self.body.append('\n\n\\begin{fulllineitems}\n')
if self.table:
self.table.has_problematic = True
def depart_desc(self, node):
self.body.append('\n\\end{fulllineitems}\n\n')
def visit_desc_signature(self, node):
if node.parent['objtype'] != 'describe' and node['ids']:
hyper = self.hypertarget(node['ids'][0])
else:
hyper = ''
self.body.append(hyper)
for child in node:
if isinstance(child, addnodes.desc_parameterlist):
self.body.append(r'\pysiglinewithargsret{')
break
else:
self.body.append(r'\pysigline{')
def depart_desc_signature(self, node):
self.body.append('}')
def visit_desc_addname(self, node):
self.body.append(r'\code{')
self.literal_whitespace += 1
def depart_desc_addname(self, node):
self.body.append('}')
self.literal_whitespace -= 1
def visit_desc_type(self, node):
pass
def depart_desc_type(self, node):
pass
def visit_desc_returns(self, node):
self.body.append(r'{ $\rightarrow$ ')
def depart_desc_returns(self, node):
self.body.append(r'}')
def visit_desc_name(self, node):
self.body.append(r'\bfcode{')
self.no_contractions += 1
self.literal_whitespace += 1
def depart_desc_name(self, node):
self.body.append('}')
self.literal_whitespace -= 1
self.no_contractions -= 1
def visit_desc_parameterlist(self, node):
# close name, open parameterlist
self.body.append('}{')
self.first_param = 1
def depart_desc_parameterlist(self, node):
# close parameterlist, open return annotation
self.body.append('}{')
def visit_desc_parameter(self, node):
if not self.first_param:
self.body.append(', ')
else:
self.first_param = 0
if not node.hasattr('noemph'):
self.body.append(r'\emph{')
def depart_desc_parameter(self, node):
if not node.hasattr('noemph'):
self.body.append('}')
def visit_desc_optional(self, node):
self.body.append(r'\optional{')
def depart_desc_optional(self, node):
self.body.append('}')
def visit_desc_annotation(self, node):
self.body.append(r'\strong{')
def depart_desc_annotation(self, node):
self.body.append('}')
def visit_desc_content(self, node):
if node.children and not isinstance(node.children[0], nodes.paragraph):
# avoid empty desc environment which causes a formatting bug
self.body.append('~')
def depart_desc_content(self, node):
pass
def visit_seealso(self, node):
self.body.append(u'\n\n\\strong{%s:}\n\n' % admonitionlabels['seealso'])
def depart_seealso(self, node):
self.body.append("\n\n")
def visit_rubric(self, node):
if len(node.children) == 1 and node.children[0].astext() in \
('Footnotes', _('Footnotes')):
raise nodes.SkipNode
self.body.append('\\paragraph{')
self.context.append('}\n')
self.in_title = 1
def depart_rubric(self, node):
self.in_title = 0
self.body.append(self.context.pop())
def visit_footnote(self, node):
raise nodes.SkipNode
def visit_collected_footnote(self, node):
self.in_footnote += 1
if 'footnotetext' in node:
self.body.append('\\footnotetext[%s]{\sphinxAtStartFootnote%%' % node['number'])
else:
self.body.append('\\footnote[%s]{\sphinxAtStartFootnote%%' % node['number'])
def depart_collected_footnote(self, node):
self.body.append('}')
self.in_footnote -= 1
def visit_label(self, node):
if isinstance(node.parent, nodes.citation):
self.bibitems[-1][0] = node.astext()
self.bibitems[-1][2] = self.curfilestack[-1]
self.bibitems[-1][3] = node.parent['ids'][0]
raise nodes.SkipNode
def visit_tabular_col_spec(self, node):
self.next_table_colspec = node['spec']
raise nodes.SkipNode
def visit_table(self, node):
if self.table:
raise UnsupportedError(
'%s:%s: nested tables are not yet implemented.' %
(self.curfilestack[-1], node.line or ''))
self.table = Table()
self.table.longtable = 'longtable' in node['classes']
self.tablebody = []
self.tableheaders = []
# Redirect body output until table is finished.
self.pushbody(self.tablebody)
self.restrict_footnote(node)
def depart_table(self, node):
if self.table.rowcount > 30:
self.table.longtable = True
self.popbody()
if not self.table.longtable and self.table.caption is not None:
self.body.append('\n\n\\begin{threeparttable}\n'
'\\capstart\\caption{')
for caption in self.table.caption:
self.body.append(caption)
self.body.append('}')
for id in self.pop_hyperlink_ids('table'):
self.body.append(self.hypertarget(id, anchor=False))
if node['ids']:
self.body.append(self.hypertarget(node['ids'][0], anchor=False))
if self.table.longtable:
self.body.append('\n\\begin{longtable}')
endmacro = '\\end{longtable}\n\n'
elif self.table.has_verbatim:
self.body.append('\n\\begin{tabular}')
endmacro = '\\end{tabular}\n\n'
elif self.table.has_problematic and not self.table.colspec:
# if the user has given us tabularcolumns, accept them and use
# tabulary nevertheless
self.body.append('\n\\begin{tabular}')
endmacro = '\\end{tabular}\n\n'
else:
self.body.append('\n\\begin{tabulary}{\\linewidth}')
endmacro = '\\end{tabulary}\n\n'
if self.table.colspec:
self.body.append(self.table.colspec)
else:
if self.table.has_problematic:
colwidth = 0.95 / self.table.colcount
colspec = ('p{%.3f\\linewidth}|' % colwidth) * \
self.table.colcount
self.body.append('{|' + colspec + '}\n')
elif self.table.longtable:
self.body.append('{|' + ('l|' * self.table.colcount) + '}\n')
else:
self.body.append('{|' + ('L|' * self.table.colcount) + '}\n')
if self.table.longtable and self.table.caption is not None:
self.body.append(u'\\caption{')
for caption in self.table.caption:
self.body.append(caption)
self.body.append('}')
for id in self.pop_hyperlink_ids('table'):
self.body.append(self.hypertarget(id, anchor=False))
self.body.append(u'\\\\\n')
if self.table.longtable:
self.body.append('\\hline\n')
self.body.extend(self.tableheaders)
self.body.append('\\endfirsthead\n\n')
self.body.append('\\multicolumn{%s}{c}%%\n' % self.table.colcount)
self.body.append(r'{{\tablecontinued{\tablename\ \thetable{} -- %s}}} \\'
% _('continued from previous page'))
self.body.append('\n\\hline\n')
self.body.extend(self.tableheaders)
self.body.append('\\endhead\n\n')
self.body.append(r'\hline \multicolumn{%s}{|r|}{{\tablecontinued{%s}}} \\ \hline'
% (self.table.colcount,
_('Continued on next page')))
self.body.append('\n\\endfoot\n\n')
self.body.append('\\endlastfoot\n\n')
else:
self.body.append('\\hline\n')
self.body.extend(self.tableheaders)
self.body.extend(self.tablebody)
self.body.append(endmacro)
if not self.table.longtable and self.table.caption is not None:
self.body.append('\\end{threeparttable}\n\n')
self.unrestrict_footnote(node)
self.table = None
self.tablebody = None
def visit_colspec(self, node):
self.table.colcount += 1
def depart_colspec(self, node):
pass
def visit_tgroup(self, node):
pass
def depart_tgroup(self, node):
pass
def visit_thead(self, node):
self.table.had_head = True
if self.next_table_colspec:
self.table.colspec = '{%s}\n' % self.next_table_colspec
self.next_table_colspec = None
# Redirect head output until header is finished. see visit_tbody.
self.body = self.tableheaders
def depart_thead(self, node):
pass
def visit_tbody(self, node):
if not self.table.had_head:
self.visit_thead(node)
self.body = self.tablebody
def depart_tbody(self, node):
self.remember_multirow = {}
self.remember_multirowcol = {}
def visit_row(self, node):
self.table.col = 0
for key, value in self.remember_multirow.items():
if not value and key in self.remember_multirowcol:
del self.remember_multirowcol[key]
def depart_row(self, node):
self.body.append('\\\\\n')
if any(self.remember_multirow.values()):
linestart = 1
col = self.table.colcount
for col in range(1, self.table.col + 1):
if self.remember_multirow.get(col):
if linestart != col:
linerange = str(linestart) + '-' + str(col - 1)
self.body.append('\\cline{' + linerange + '}')
linestart = col + 1
if self.remember_multirowcol.get(col, 0):
linestart += self.remember_multirowcol[col]
if linestart <= col:
linerange = str(linestart) + '-' + str(col)
self.body.append('\\cline{' + linerange + '}')
else:
self.body.append('\\hline')
self.table.rowcount += 1
def visit_entry(self, node):
if self.table.col == 0:
while self.remember_multirow.get(self.table.col + 1, 0):
self.table.col += 1
self.remember_multirow[self.table.col] -= 1
if self.remember_multirowcol.get(self.table.col, 0):
extracols = self.remember_multirowcol[self.table.col]
self.body.append(' \\multicolumn{')
self.body.append(str(extracols + 1))
self.body.append('}{|l|}{}')
self.table.col += extracols
self.body.append(' & ')
else:
self.body.append(' & ')
self.table.col += 1
context = ''
if 'morecols' in node:
self.body.append(' \\multicolumn{')
self.body.append(str(node.get('morecols') + 1))
if self.table.col == 1:
self.body.append('}{|l|}{')
else:
self.body.append('}{l|}{')
context += '}'
if 'morerows' in node:
self.body.append(' \\multirow{')
self.body.append(str(node.get('morerows') + 1))
self.body.append('}{*}{')
context += '}'
self.remember_multirow[self.table.col] = node.get('morerows')
if 'morecols' in node:
if 'morerows' in node:
self.remember_multirowcol[self.table.col] = node.get('morecols')
self.table.col += node.get('morecols')
if (('morecols' in node or 'morerows' in node) and
(len(node) > 2 or len(node.astext().split('\n')) > 2)):
self.in_merged_cell = 1
self.literal_whitespace += 1
self.body.append('\\eqparbox{%d}{\\vspace{.5\\baselineskip}\n' % id(node))
self.pushbody([])
context += '}'
if isinstance(node.parent.parent, nodes.thead):
if len(node) == 1 and isinstance(node[0], nodes.paragraph) and node.astext() == '':
pass
else:
self.body.append('\\textsf{\\relax ')
context += '}'
while self.remember_multirow.get(self.table.col + 1, 0):
self.table.col += 1
self.remember_multirow[self.table.col] -= 1
context += ' & '
if self.remember_multirowcol.get(self.table.col, 0):
extracols = self.remember_multirowcol[self.table.col]
context += ' \\multicolumn{'
context += str(extracols + 1)
context += '}{l|}{}'
self.table.col += extracols
if len(node.traverse(nodes.paragraph)) >= 2:
self.table.has_problematic = True
self.context.append(context)
def depart_entry(self, node):
if self.in_merged_cell:
self.in_merged_cell = 0
self.literal_whitespace -= 1
body = self.popbody()
# Remove empty lines from top of merged cell
while body and body[0] == "\n":
body.pop(0)
for line in body:
line = re.sub(u'(?<!~\\\\\\\\)\n', u'~\\\\\\\\\n', line) # escape return code
self.body.append(line)
self.body.append(self.context.pop()) # header
def visit_acks(self, node):
# this is a list in the source, but should be rendered as a
# comma-separated list here
self.body.append('\n\n')
self.body.append(', '.join(n.astext()
for n in node.children[0].children) + '.')
self.body.append('\n\n')
raise nodes.SkipNode
def visit_bullet_list(self, node):
if not self.compact_list:
self.body.append('\\begin{itemize}\n')
if self.table:
self.table.has_problematic = True
def depart_bullet_list(self, node):
if not self.compact_list:
self.body.append('\\end{itemize}\n')
def visit_enumerated_list(self, node):
self.body.append('\\begin{enumerate}\n')
if 'start' in node:
self.body.append('\\setcounter{enumi}{%d}\n' % (node['start'] - 1))
if self.table:
self.table.has_problematic = True
def depart_enumerated_list(self, node):
self.body.append('\\end{enumerate}\n')
def visit_list_item(self, node):
# Append "{}" in case the next character is "[", which would break
# LaTeX's list environment (no numbering and the "[" is not printed).
self.body.append(r'\item {} ')
def depart_list_item(self, node):
self.body.append('\n')
def visit_definition_list(self, node):
self.body.append('\\begin{description}\n')
if self.table:
self.table.has_problematic = True
def depart_definition_list(self, node):
self.body.append('\\end{description}\n')
def visit_definition_list_item(self, node):
pass
def depart_definition_list_item(self, node):
pass
def visit_term(self, node):
self.in_term += 1
ctx = '}] \\leavevmode'
if node.get('ids'):
ctx += self.hypertarget(node['ids'][0])
self.body.append('\\item[{')
self.restrict_footnote(node)
self.context.append(ctx)
def depart_term(self, node):
self.body.append(self.context.pop())
self.unrestrict_footnote(node)
self.in_term -= 1
def visit_termsep(self, node):
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.5',
DeprecationWarning)
self.body.append(', ')
raise nodes.SkipNode
def visit_classifier(self, node):
self.body.append('{[}')
def depart_classifier(self, node):
self.body.append('{]}')
def visit_definition(self, node):
pass
def depart_definition(self, node):
self.body.append('\n')
def visit_field_list(self, node):
self.body.append('\\begin{quote}\\begin{description}\n')
if self.table:
self.table.has_problematic = True
def depart_field_list(self, node):
self.body.append('\\end{description}\\end{quote}\n')
def visit_field(self, node):
pass
def depart_field(self, node):
pass
visit_field_name = visit_term
depart_field_name = depart_term
visit_field_body = visit_definition
depart_field_body = depart_definition
def visit_paragraph(self, node):
# insert blank line, if the paragraph follows a non-paragraph node in a compound
index = node.parent.index(node)
if (index > 0 and isinstance(node.parent, nodes.compound) and
not isinstance(node.parent[index - 1], nodes.paragraph) and
not isinstance(node.parent[index - 1], nodes.compound)):
self.body.append('\\noindent\n')
else:
self.body.append('\n')
def depart_paragraph(self, node):
self.body.append('\n')
def visit_centered(self, node):
self.body.append('\n\\begin{center}')
if self.table:
self.table.has_problematic = True
def depart_centered(self, node):
self.body.append('\n\\end{center}')
def visit_hlist(self, node):
# for now, we don't support a more compact list format
# don't add individual itemize environments, but one for all columns
self.compact_list += 1
self.body.append('\\begin{itemize}\\setlength{\\itemsep}{0pt}'
'\\setlength{\\parskip}{0pt}\n')
if self.table:
self.table.has_problematic = True
def depart_hlist(self, node):
self.compact_list -= 1
self.body.append('\\end{itemize}\n')
def visit_hlistcol(self, node):
pass
def depart_hlistcol(self, node):
pass
def latex_image_length(self, width_str):
match = re.match('(\d*\.?\d*)\s*(\S*)', width_str)
if not match:
# fallback
return width_str
res = width_str
amount, unit = match.groups()[:2]
if not unit or unit == "px":
# pixels: let LaTeX alone
return None
elif unit == "%":
res = "%.3f\\linewidth" % (float(amount) / 100.0)
return res
def is_inline(self, node):
"""Check whether a node represents an inline element."""
return isinstance(node.parent, nodes.TextElement)
def visit_image(self, node):
attrs = node.attributes
pre = [] # in reverse order
post = []
include_graphics_options = []
is_inline = self.is_inline(node)
if 'scale' in attrs:
# Could also be done with ``scale`` option to
# ``\includegraphics``; doing it this way for consistency.
pre.append('\\scalebox{%f}{' % (attrs['scale'] / 100.0,))
post.append('}')
if 'width' in attrs:
w = self.latex_image_length(attrs['width'])
if w:
include_graphics_options.append('width=%s' % w)
if 'height' in attrs:
h = self.latex_image_length(attrs['height'])
if h:
include_graphics_options.append('height=%s' % h)
if 'align' in attrs:
align_prepost = {
# By default latex aligns the top of an image.
(1, 'top'): ('', ''),
(1, 'middle'): ('\\raisebox{-0.5\\height}{', '}'),
(1, 'bottom'): ('\\raisebox{-\\height}{', '}'),
(0, 'center'): ('{\\hspace*{\\fill}', '\\hspace*{\\fill}}'),
# These 2 don't exactly do the right thing. The image should
# be floated alongside the paragraph. See
# http://www.w3.org/TR/html4/struct/objects.html#adef-align-IMG
(0, 'left'): ('{', '\\hspace*{\\fill}}'),
(0, 'right'): ('{\\hspace*{\\fill}', '}'),
}
try:
pre.append(align_prepost[is_inline, attrs['align']][0])
post.append(align_prepost[is_inline, attrs['align']][1])
except KeyError:
pass
if not is_inline:
pre.append('\n')
post.append('\n')
pre.reverse()
if node['uri'] in self.builder.images:
uri = self.builder.images[node['uri']]
else:
# missing image!
if self.ignore_missing_images:
return
uri = node['uri']
if uri.find('://') != -1:
# ignore remote images
return
self.body.extend(pre)
options = ''
if include_graphics_options:
options = '[%s]' % ','.join(include_graphics_options)
base, ext = path.splitext(uri)
self.body.append('\\includegraphics%s{{%s}%s}' % (options, base, ext))
self.body.extend(post)
def depart_image(self, node):
pass
def visit_figure(self, node):
ids = ''
for id in self.pop_hyperlink_ids('figure'):
ids += self.hypertarget(id, anchor=False)
if node['ids']:
ids += self.hypertarget(node['ids'][0], anchor=False)
self.restrict_footnote(node)
if (len(node.children) and
isinstance(node.children[0], nodes.image) and
node.children[0]['ids']):
ids += self.hypertarget(node.children[0]['ids'][0], anchor=False)
if 'width' in node and node.get('align', '') in ('left', 'right'):
self.body.append('\\begin{wrapfigure}{%s}{%s}\n\\centering' %
(node['align'] == 'right' and 'r' or 'l',
node['width']))
self.context.append(ids + '\\end{wrapfigure}\n')
elif self.in_minipage:
if ('align' not in node.attributes or
node.attributes['align'] == 'center'):
self.body.append('\n\\begin{center}')
self.context.append('\\end{center}\n')
else:
self.body.append('\n\\begin{flush%s}' % node.attributes['align'])
self.context.append('\\end{flush%s}\n' % node.attributes['align'])
else:
if ('align' not in node.attributes or
node.attributes['align'] == 'center'):
# centering does not add vertical space like center.
align = '\n\\centering'
align_end = ''
else:
# TODO non vertical space for other alignments.
align = '\\begin{flush%s}' % node.attributes['align']
align_end = '\\end{flush%s}' % node.attributes['align']
self.body.append('\\begin{figure}[%s]%s\n' % (
self.elements['figure_align'], align))
if any(isinstance(child, nodes.caption) for child in node):
self.body.append('\\capstart\n')
self.context.append(ids + align_end + '\\end{figure}\n')
def depart_figure(self, node):
self.body.append(self.context.pop())
self.unrestrict_footnote(node)
def visit_caption(self, node):
self.in_caption += 1
if self.in_container_literal_block:
self.body.append('\\SphinxSetupCaptionForVerbatim{literal-block}{')
elif self.in_minipage and isinstance(node.parent, nodes.figure):
self.body.append('\\captionof{figure}{')
else:
self.body.append('\\caption{')
def depart_caption(self, node):
self.body.append('}')
self.in_caption -= 1
def visit_legend(self, node):
self.body.append('{\\small ')
def depart_legend(self, node):
self.body.append('}')
def visit_admonition(self, node):
self.body.append('\n\\begin{notice}{note}')
def depart_admonition(self, node):
self.body.append('\\end{notice}\n')
def _make_visit_admonition(name):
def visit_admonition(self, node):
self.body.append(u'\n\\begin{notice}{%s}{%s:}' %
(name, admonitionlabels[name]))
return visit_admonition
def _depart_named_admonition(self, node):
self.body.append('\\end{notice}\n')
visit_attention = _make_visit_admonition('attention')
depart_attention = _depart_named_admonition
visit_caution = _make_visit_admonition('caution')
depart_caution = _depart_named_admonition
visit_danger = _make_visit_admonition('danger')
depart_danger = _depart_named_admonition
visit_error = _make_visit_admonition('error')
depart_error = _depart_named_admonition
visit_hint = _make_visit_admonition('hint')
depart_hint = _depart_named_admonition
visit_important = _make_visit_admonition('important')
depart_important = _depart_named_admonition
visit_note = _make_visit_admonition('note')
depart_note = _depart_named_admonition
visit_tip = _make_visit_admonition('tip')
depart_tip = _depart_named_admonition
visit_warning = _make_visit_admonition('warning')
depart_warning = _depart_named_admonition
def visit_versionmodified(self, node):
pass
def depart_versionmodified(self, node):
pass
def visit_target(self, node):
def add_target(id):
# indexing uses standard LaTeX index markup, so the targets
# will be generated differently
if id.startswith('index-'):
return
# do not generate \phantomsection in \section{}
anchor = not self.in_title
self.body.append(self.hypertarget(id, anchor=anchor))
# postpone the labels until after the sectioning command
parindex = node.parent.index(node)
try:
try:
next = node.parent[parindex+1]
except IndexError:
# last node in parent, look at next after parent
# (for section of equal level) if it exists
if node.parent.parent is not None:
next = node.parent.parent[
node.parent.parent.index(node.parent)]
else:
raise
if isinstance(next, nodes.section):
if node.get('refid'):
self.next_section_ids.add(node['refid'])
self.next_section_ids.update(node['ids'])
return
else:
domain = self.builder.env.domains['std']
figtype = domain.get_figtype(next)
if figtype and domain.get_numfig_title(next):
ids = set()
# labels for figures go in the figure body, not before
if node.get('refid'):
ids.add(node['refid'])
ids.update(node['ids'])
self.push_hyperlink_ids(figtype, ids)
return
except IndexError:
pass
if 'refuri' in node:
return
if node.get('refid'):
add_target(node['refid'])
for id in node['ids']:
add_target(id)
def depart_target(self, node):
pass
def visit_attribution(self, node):
self.body.append('\n\\begin{flushright}\n')
self.body.append('---')
def depart_attribution(self, node):
self.body.append('\n\\end{flushright}\n')
def visit_index(self, node, scre=re.compile(r';\s*')):
if not node.get('inline', True):
self.body.append('\n')
entries = node['entries']
for type, string, tid, ismain, key_ in entries:
m = ''
if ismain:
m = '|textbf'
try:
if type == 'single':
p = scre.sub('!', self.encode(string))
self.body.append(r'\index{%s%s}' % (p, m))
elif type == 'pair':
p1, p2 = [self.encode(x) for x in split_into(2, 'pair', string)]
self.body.append(r'\index{%s!%s%s}\index{%s!%s%s}' %
(p1, p2, m, p2, p1, m))
elif type == 'triple':
p1, p2, p3 = [self.encode(x)
for x in split_into(3, 'triple', string)]
self.body.append(
r'\index{%s!%s %s%s}\index{%s!%s, %s%s}'
r'\index{%s!%s %s%s}' %
(p1, p2, p3, m, p2, p3, p1, m, p3, p1, p2, m))
elif type == 'see':
p1, p2 = [self.encode(x) for x in split_into(2, 'see', string)]
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
elif type == 'seealso':
p1, p2 = [self.encode(x) for x in split_into(2, 'seealso', string)]
self.body.append(r'\index{%s|see{%s}}' % (p1, p2))
else:
self.builder.warn(
'unknown index entry type %s found' % type)
except ValueError as err:
self.builder.warn(str(err))
raise nodes.SkipNode
def visit_raw(self, node):
if 'latex' in node.get('format', '').split():
self.body.append(node.astext())
raise nodes.SkipNode
def visit_reference(self, node):
if not self.in_title:
for id in node.get('ids'):
anchor = not self.in_caption
self.body += self.hypertarget(id, anchor=anchor)
uri = node.get('refuri', '')
if not uri and node.get('refid'):
uri = '%' + self.curfilestack[-1] + '#' + node['refid']
if self.in_title or not uri:
self.context.append('')
elif uri.startswith(URI_SCHEMES):
if len(node) == 1 and uri == node[0]:
self.body.append('\\url{%s}' % self.encode_uri(uri))
raise nodes.SkipNode
else:
self.body.append('\\href{%s}{' % self.encode_uri(uri))
self.context.append('}')
elif uri.startswith('#'):
# references to labels in the same document
id = self.curfilestack[-1] + ':' + uri[1:]
self.body.append(self.hyperlink(id))
self.body.append(r'\emph{')
if self.builder.config.latex_show_pagerefs and not \
self.in_production_list:
self.context.append('}}} (%s)' % self.hyperpageref(id))
else:
self.context.append('}}}')
elif uri.startswith('%'):
# references to documents or labels inside documents
hashindex = uri.find('#')
if hashindex == -1:
# reference to the document
id = uri[1:] + '::doc'
else:
# reference to a label
id = uri[1:].replace('#', ':')
self.body.append(self.hyperlink(id))
if len(node) and hasattr(node[0], 'attributes') and \
'std-term' in node[0].get('classes', []):
# don't add a pageref for glossary terms
self.context.append('}}}')
# mark up as termreference
self.body.append(r'\termref{')
else:
self.body.append(r'\crossref{')
if self.builder.config.latex_show_pagerefs and not \
self.in_production_list:
self.context.append('}}} (%s)' % self.hyperpageref(id))
else:
self.context.append('}}}')
else:
self.builder.warn('unusable reference target found: %s' % uri,
(self.curfilestack[-1], node.line))
self.context.append('')
def depart_reference(self, node):
self.body.append(self.context.pop())
def visit_number_reference(self, node):
if node.get('refid'):
id = self.curfilestack[-1] + ':' + node['refid']
else:
id = node.get('refuri', '')[1:].replace('#', ':')
ref = '\\ref{%s}' % self.idescape(id)
title = node.get('title', '%s')
title = text_type(title).translate(tex_escape_map).replace('\\%s', '%s')
hyperref = '\\hyperref[%s]{%s}' % (self.idescape(id), title % ref)
self.body.append(hyperref)
raise nodes.SkipNode
def visit_download_reference(self, node):
pass
def depart_download_reference(self, node):
pass
def visit_pending_xref(self, node):
pass
def depart_pending_xref(self, node):
pass
def visit_emphasis(self, node):
self.body.append(r'\emph{')
def depart_emphasis(self, node):
self.body.append('}')
def visit_literal_emphasis(self, node):
self.body.append(r'\emph{\texttt{')
self.no_contractions += 1
def depart_literal_emphasis(self, node):
self.body.append('}}')
self.no_contractions -= 1
def visit_strong(self, node):
self.body.append(r'\textbf{')
def depart_strong(self, node):
self.body.append('}')
def visit_literal_strong(self, node):
self.body.append(r'\textbf{\texttt{')
self.no_contractions += 1
def depart_literal_strong(self, node):
self.body.append('}}')
self.no_contractions -= 1
def visit_abbreviation(self, node):
abbr = node.astext()
self.body.append(r'\textsc{')
# spell out the explanation once
if node.hasattr('explanation') and abbr not in self.handled_abbrs:
self.context.append('} (%s)' % self.encode(node['explanation']))
self.handled_abbrs.add(abbr)
else:
self.context.append('}')
def depart_abbreviation(self, node):
self.body.append(self.context.pop())
def visit_manpage(self, node):
return self.visit_literal_emphasis(node)
def depart_manpage(self, node):
return self.depart_literal_emphasis(node)
def visit_title_reference(self, node):
self.body.append(r'\titleref{')
def depart_title_reference(self, node):
self.body.append('}')
def visit_citation(self, node):
# TODO maybe use cite bibitems
# bibitem: [citelabel, citetext, docname, citeid]
self.bibitems.append(['', '', '', ''])
self.context.append(len(self.body))
def depart_citation(self, node):
size = self.context.pop()
text = ''.join(self.body[size:])
del self.body[size:]
self.bibitems[-1][1] = text
def visit_citation_reference(self, node):
# This is currently never encountered, since citation_reference nodes
# are already replaced by pending_xref nodes in the environment.
self.body.append('\\cite{%s}' % self.idescape(node.astext()))
raise nodes.SkipNode
def visit_literal(self, node):
self.no_contractions += 1
if self.in_title:
self.body.append(r'\texttt{')
else:
self.body.append(r'\code{')
def depart_literal(self, node):
self.no_contractions -= 1
self.body.append('}')
def visit_footnote_reference(self, node):
num = node.astext().strip()
try:
footnode, used = self.footnotestack[-1][num]
except (KeyError, IndexError):
raise nodes.SkipNode
# if a footnote has been inserted once, it shouldn't be repeated
# by the next reference
if used:
if self.table or self.in_term or self.in_title:
self.body.append('\\protect\\footnotemark[%s]' % num)
else:
self.body.append('\\footnotemark[%s]' % num)
elif self.footnote_restricted:
self.footnotestack[-1][num][1] = True
self.body.append('\\protect\\footnotemark[%s]' % num)
self.pending_footnotes.append(footnode)
else:
self.footnotestack[-1][num][1] = True
footnode.walkabout(self)
raise nodes.SkipChildren
def depart_footnote_reference(self, node):
pass
def visit_literal_block(self, node):
if self.in_footnote:
raise UnsupportedError('%s:%s: literal blocks in footnotes are '
'not supported by LaTeX' %
(self.curfilestack[-1], node.line))
if node.rawsource != node.astext():
# most probably a parsed-literal block -- don't highlight
self.body.append('\\begin{alltt}\n')
else:
ids = ''
for id in self.pop_hyperlink_ids('code-block'):
ids += self.hypertarget(id, anchor=False)
if node['ids']:
# suppress with anchor=False \phantomsection insertion
ids += self.hypertarget(node['ids'][0], anchor=False)
# LaTeX code will insert \phantomsection prior to \label
if ids:
self.body.append('\n\\def\\SphinxLiteralBlockLabel{' + ids + '}')
code = node.astext()
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
if lang is self.hlsettingstack[0][0]:
# only pass highlighter options for original language
opts = self.builder.config.highlight_options
else:
opts = {}
def warner(msg):
self.builder.warn(msg, (self.curfilestack[-1], node.line))
hlcode = self.highlighter.highlight_block(code, lang, opts=opts,
warn=warner, linenos=linenos,
**highlight_args)
# workaround for Unicode issue
hlcode = hlcode.replace(u'€', u'@texteuro[]')
# must use original Verbatim environment and "tabular" environment
if self.table:
hlcode = hlcode.replace('\\begin{Verbatim}',
'\\begin{OriginalVerbatim}')
self.table.has_problematic = True
self.table.has_verbatim = True
# get consistent trailer
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
if ids:
self.body.append('\\let\\SphinxLiteralBlockLabel\empty\n')
raise nodes.SkipNode
def depart_literal_block(self, node):
self.body.append('\n\\end{alltt}\n')
visit_doctest_block = visit_literal_block
depart_doctest_block = depart_literal_block
def visit_line(self, node):
self.body.append('\item[] ')
def depart_line(self, node):
self.body.append('\n')
def visit_line_block(self, node):
if isinstance(node.parent, nodes.line_block):
self.body.append('\\item[]\n'
'\\begin{DUlineblock}{\\DUlineblockindent}\n')
else:
self.body.append('\n\\begin{DUlineblock}{0em}\n')
if self.table:
self.table.has_problematic = True
def depart_line_block(self, node):
self.body.append('\\end{DUlineblock}\n')
def visit_block_quote(self, node):
# If the block quote contains a single object and that object
# is a list, then generate a list not a block quote.
# This lets us indent lists.
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, nodes.bullet_list) or \
isinstance(child, nodes.enumerated_list):
done = 1
if not done:
self.body.append('\\begin{quote}\n')
if self.table:
self.table.has_problematic = True
def depart_block_quote(self, node):
done = 0
if len(node.children) == 1:
child = node.children[0]
if isinstance(child, nodes.bullet_list) or \
isinstance(child, nodes.enumerated_list):
done = 1
if not done:
self.body.append('\\end{quote}\n')
# option node handling copied from docutils' latex writer
def visit_option(self, node):
if self.context[-1]:
# this is not the first option
self.body.append(', ')
def depart_option(self, node):
# flag that the first option is done.
self.context[-1] += 1
def visit_option_argument(self, node):
"""The delimiter betweeen an option and its argument."""
self.body.append(node.get('delimiter', ' '))
def depart_option_argument(self, node):
pass
def visit_option_group(self, node):
self.body.append('\\item [')
# flag for first option
self.context.append(0)
def depart_option_group(self, node):
self.context.pop() # the flag
self.body.append('] ')
def visit_option_list(self, node):
self.body.append('\\begin{optionlist}{3cm}\n')
if self.table:
self.table.has_problematic = True
def depart_option_list(self, node):
self.body.append('\\end{optionlist}\n')
def visit_option_list_item(self, node):
pass
def depart_option_list_item(self, node):
pass
def visit_option_string(self, node):
ostring = node.astext()
self.no_contractions += 1
self.body.append(self.encode(ostring))
self.no_contractions -= 1
raise nodes.SkipNode
def visit_description(self, node):
self.body.append(' ')
def depart_description(self, node):
pass
def visit_superscript(self, node):
self.body.append('$^{\\text{')
def depart_superscript(self, node):
self.body.append('}}$')
def visit_subscript(self, node):
self.body.append('$_{\\text{')
def depart_subscript(self, node):
self.body.append('}}$')
def visit_substitution_definition(self, node):
raise nodes.SkipNode
def visit_substitution_reference(self, node):
raise nodes.SkipNode
def visit_inline(self, node):
classes = node.get('classes', [])
if classes in [['menuselection'], ['guilabel']]:
self.body.append(r'\menuselection{')
self.context.append('}')
elif classes in [['accelerator']]:
self.body.append(r'\accelerator{')
self.context.append('}')
elif classes and not self.in_title:
self.body.append(r'\DUrole{%s}{' % ','.join(classes))
self.context.append('}')
else:
self.context.append('')
def depart_inline(self, node):
self.body.append(self.context.pop())
def visit_generated(self, node):
pass
def depart_generated(self, node):
pass
def visit_compound(self, node):
pass
def depart_compound(self, node):
pass
def visit_container(self, node):
if node.get('literal_block'):
self.in_container_literal_block += 1
ids = ''
for id in self.pop_hyperlink_ids('code-block'):
ids += self.hypertarget(id, anchor=False)
if node['ids']:
# suppress with anchor=False \phantomsection insertion
ids += self.hypertarget(node['ids'][0], anchor=False)
# define label for use in caption.
if ids:
self.body.append('\n\\def\\SphinxLiteralBlockLabel{' + ids + '}\n')
def depart_container(self, node):
if node.get('literal_block'):
self.in_container_literal_block -= 1
self.body.append('\\let\\SphinxVerbatimTitle\\empty\n')
self.body.append('\\let\\SphinxLiteralBlockLabel\\empty\n')
def visit_decoration(self, node):
pass
def depart_decoration(self, node):
pass
# docutils-generated elements that we don't support
def visit_header(self, node):
raise nodes.SkipNode
def visit_footer(self, node):
raise nodes.SkipNode
def visit_docinfo(self, node):
raise nodes.SkipNode
# text handling
def encode(self, text):
text = text_type(text).translate(tex_escape_map)
if self.literal_whitespace:
# Insert a blank before the newline, to avoid
# ! LaTeX Error: There's no line here to end.
text = text.replace(u'\n', u'~\\\\\n').replace(u' ', u'~')
if self.no_contractions:
text = text.replace('--', u'-{-}')
text = text.replace("''", u"'{'}")
return text
def encode_uri(self, text):
# in \href, the tilde is allowed and must be represented literally
return self.encode(text).replace('\\textasciitilde{}', '~')
def visit_Text(self, node):
text = self.encode(node.astext())
if not self.no_contractions:
text = educate_quotes_latex(text)
self.body.append(text)
def depart_Text(self, node):
pass
def visit_comment(self, node):
raise nodes.SkipNode
def visit_meta(self, node):
# only valid for HTML
raise nodes.SkipNode
def visit_system_message(self, node):
pass
def depart_system_message(self, node):
self.body.append('\n')
def visit_math(self, node):
self.builder.warn('using "math" markup without a Sphinx math extension '
'active, please use one of the math extensions '
'described at http://sphinx-doc.org/ext/math.html',
(self.curfilestack[-1], node.line))
raise nodes.SkipNode
visit_math_block = visit_math
def unknown_visit(self, node):
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
| gpl-3.0 |
lmprice/ansible | lib/ansible/playbook/task_include.py | 26 | 2872 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.task import Task
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskInclude']
class TaskInclude(Task):
"""
A task include is derived from a regular task to handle the special
circumstances related to the `- include: ...` task.
"""
# =================================================================================
# ATTRIBUTES
_static = FieldAttribute(isa='bool', default=None)
def __init__(self, block=None, role=None, task_include=None):
super(TaskInclude, self).__init__(block=block, role=role, task_include=task_include)
self.statically_loaded = False
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = TaskInclude(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def copy(self, exclude_parent=False, exclude_tasks=False):
new_me = super(TaskInclude, self).copy(exclude_parent=exclude_parent, exclude_tasks=exclude_tasks)
new_me.statically_loaded = self.statically_loaded
return new_me
def get_vars(self):
'''
We override the parent Task() classes get_vars here because
we need to include the args of the include into the vars as
they are params to the included tasks. But ONLY for 'include'
'''
if self.action != 'include':
all_vars = super(TaskInclude, self).get_vars()
else:
all_vars = dict()
if self._parent:
all_vars.update(self._parent.get_vars())
all_vars.update(self.vars)
all_vars.update(self.args)
if 'tags' in all_vars:
del all_vars['tags']
if 'when' in all_vars:
del all_vars['when']
return all_vars
| gpl-3.0 |
asgard-lab/neutron | neutron/plugins/ml2/drivers/linuxbridge/mech_driver/mech_linuxbridge.py | 17 | 2217 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from neutron.agent import securitygroups_rpc
from neutron.common import constants
from neutron.extensions import portbindings
from neutron.plugins.common import constants as p_constants
from neutron.plugins.ml2.drivers import mech_agent
LOG = log.getLogger(__name__)
class LinuxbridgeMechanismDriver(mech_agent.SimpleAgentMechanismDriverBase):
"""Attach to networks using linuxbridge L2 agent.
The LinuxbridgeMechanismDriver integrates the ml2 plugin with the
linuxbridge L2 agent. Port binding with this driver requires the
linuxbridge agent to be running on the port's host, and that agent
to have connectivity to at least one segment of the port's
network.
"""
def __init__(self):
sg_enabled = securitygroups_rpc.is_firewall_enabled()
super(LinuxbridgeMechanismDriver, self).__init__(
constants.AGENT_TYPE_LINUXBRIDGE,
portbindings.VIF_TYPE_BRIDGE,
{portbindings.CAP_PORT_FILTER: sg_enabled})
def get_allowed_network_types(self, agent):
return (agent['configurations'].get('tunnel_types', []) +
[p_constants.TYPE_LOCAL, p_constants.TYPE_FLAT,
p_constants.TYPE_VLAN])
def get_mappings(self, agent):
mappings = dict(agent['configurations'].get('interface_mappings', {}),
**agent['configurations'].get('bridge_mappings', {}))
return mappings
def check_vlan_transparency(self, context):
"""Linuxbridge driver vlan transparency support."""
return True
| apache-2.0 |
jmerkow/VTK | ThirdParty/AutobahnPython/autobahn/wamp/dealer.py | 16 | 13691 | ###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
from autobahn import util
from autobahn.wamp import types
from autobahn.wamp import role
from autobahn.wamp import message
from autobahn.wamp.exception import ProtocolError, ApplicationError
from autobahn.wamp.interfaces import IDealer, IRouter
from autobahn.wamp.message import _URI_PAT_STRICT_NON_EMPTY, _URI_PAT_LOOSE_NON_EMPTY
class Dealer:
"""
Basic WAMP dealer, implements :class:`autobahn.wamp.interfaces.IDealer`.
"""
def __init__(self, router, options):
"""
Constructor.
:param router: The router this dealer is part of.
:type router: Object that implements :class:`autobahn.wamp.interfaces.IRouter`.
:param options: Router options.
:type options: Instance of :class:`autobahn.wamp.types.RouterOptions`.
"""
self._router = router
self._options = options or types.RouterOptions()
## map: session -> set(registration)
## needed for removeSession
self._session_to_registrations = {}
## map: session_id -> session
## needed for exclude/eligible
self._session_id_to_session = {}
## map: procedure -> (registration, session)
self._procs_to_regs = {}
## map: registration -> procedure
self._regs_to_procs = {}
## pending callee invocation requests
self._invocations = {}
## check all procedure URIs with strict rules
self._option_uri_strict = self._options.uri_check == types.RouterOptions.URI_CHECK_STRICT
## supported features from "WAMP Advanced Profile"
self._role_features = role.RoleDealerFeatures(caller_identification = True, progressive_call_results = True)
def attach(self, session):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.attach`
"""
assert(session not in self._session_to_registrations)
self._session_to_registrations[session] = set()
self._session_id_to_session[session._session_id] = session
def detach(self, session):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.detach`
"""
assert(session in self._session_to_registrations)
for registration in self._session_to_registrations[session]:
del self._procs_to_regs[self._regs_to_procs[registration]]
del self._regs_to_procs[registration]
del self._session_to_registrations[session]
del self._session_id_to_session[session._session_id]
def processRegister(self, session, register):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processRegister`
"""
assert(session in self._session_to_registrations)
## check procedure URI
##
if (not self._option_uri_strict and not _URI_PAT_LOOSE_NON_EMPTY.match(register.procedure)) or \
( self._option_uri_strict and not _URI_PAT_STRICT_NON_EMPTY.match(register.procedure)):
reply = message.Error(message.Register.MESSAGE_TYPE, register.request, ApplicationError.INVALID_URI, ["register for invalid procedure URI '{}'".format(register.procedure)])
session._transport.send(reply)
else:
if not register.procedure in self._procs_to_regs:
## authorize action
##
d = self._as_future(self._router.authorize, session, register.procedure, IRouter.ACTION_REGISTER)
def on_authorize_success(authorized):
if authorized:
registration_id = util.id()
self._procs_to_regs[register.procedure] = (registration_id, session, register.discloseCaller)
self._regs_to_procs[registration_id] = register.procedure
self._session_to_registrations[session].add(registration_id)
reply = message.Registered(register.request, registration_id)
else:
reply = message.Error(message.Register.MESSAGE_TYPE, register.request, ApplicationError.NOT_AUTHORIZED, ["session is not authorized to register procedure '{}'".format(register.procedure)])
session._transport.send(reply)
def on_authorize_error(err):
reply = message.Error(message.Register.MESSAGE_TYPE, register.request, ApplicationError.AUTHORIZATION_FAILED, ["failed to authorize session for registering procedure '{}': {}".format(register.procedure, err.value)])
session._transport.send(reply)
self._add_future_callbacks(d, on_authorize_success, on_authorize_error)
else:
reply = message.Error(message.Register.MESSAGE_TYPE, register.request, ApplicationError.PROCEDURE_ALREADY_EXISTS, ["register for already registered procedure '{}'".format(register.procedure)])
session._transport.send(reply)
def processUnregister(self, session, unregister):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processUnregister`
"""
assert(session in self._session_to_registrations)
if unregister.registration in self._regs_to_procs:
## map registration ID to procedure URI
procedure = self._regs_to_procs[unregister.registration]
## get the session that originally registered the procedure
_, reg_session, _ = self._procs_to_regs[procedure]
if session != reg_session:
## procedure was registered by a different session!
##
reply = message.Error(message.Unregister.MESSAGE_TYPE, unregister.request, ApplicationError.NO_SUCH_REGISTRATION)
else:
## alright. the procedure had been registered by the session
## that now wants to unregister it.
##
del self._procs_to_regs[procedure]
del self._regs_to_procs[unregister.registration]
self._session_to_registrations[session].discard(unregister.registration)
reply = message.Unregistered(unregister.request)
else:
reply = message.Error(message.Unregister.MESSAGE_TYPE, unregister.request, ApplicationError.NO_SUCH_REGISTRATION)
session._transport.send(reply)
def processCall(self, session, call):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processCall`
"""
assert(session in self._session_to_registrations)
## check procedure URI
##
if (not self._option_uri_strict and not _URI_PAT_LOOSE_NON_EMPTY.match(call.procedure)) or \
( self._option_uri_strict and not _URI_PAT_STRICT_NON_EMPTY.match(call.procedure)):
reply = message.Error(message.Call.MESSAGE_TYPE, call.request, ApplicationError.INVALID_URI, ["call with invalid procedure URI '{}'".format(call.procedure)])
session._transport.send(reply)
else:
if call.procedure in self._procs_to_regs:
## validate payload
##
try:
self._router.validate('call', call.procedure, call.args, call.kwargs)
except Exception as e:
reply = message.Error(message.Call.MESSAGE_TYPE, call.request, ApplicationError.INVALID_ARGUMENT, ["call of procedure '{}' with invalid application payload: {}".format(call.procedure, e)])
session._transport.send(reply)
return
## authorize action
##
d = self._as_future(self._router.authorize, session, call.procedure, IRouter.ACTION_CALL)
def on_authorize_success(authorized):
if authorized:
registration_id, endpoint_session, discloseCaller = self._procs_to_regs[call.procedure]
request_id = util.id()
if discloseCaller or call.discloseMe:
caller = session._session_id
authid = session._authid
authrole = session._authrole
authmethod = session._authmethod
else:
caller = None
authid = None
authrole = None
authmethod = None
invocation = message.Invocation(request_id,
registration_id,
args = call.args,
kwargs = call.kwargs,
timeout = call.timeout,
receive_progress = call.receive_progress,
caller = caller,
authid = authid,
authrole = authrole,
authmethod = authmethod)
self._invocations[request_id] = (call, session)
endpoint_session._transport.send(invocation)
else:
reply = message.Error(message.Call.MESSAGE_TYPE, call.request, ApplicationError.NOT_AUTHORIZED, ["session is not authorized to call procedure '{}'".format(call.procedure)])
session._transport.send(reply)
def on_authorize_error(err):
reply = message.Error(message.Call.MESSAGE_TYPE, call.request, ApplicationError.AUTHORIZATION_FAILED, ["failed to authorize session for calling procedure '{}': {}".format(call.procedure, err.value)])
session._transport.send(reply)
self._add_future_callbacks(d, on_authorize_success, on_authorize_error)
else:
reply = message.Error(message.Call.MESSAGE_TYPE, call.request, ApplicationError.NO_SUCH_PROCEDURE, ["no procedure '{}' registered".format(call.procedure)])
session._transport.send(reply)
def processCancel(self, session, cancel):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processCancel`
"""
assert(session in self._session_to_registrations)
raise Exception("not implemented")
def processYield(self, session, yield_):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processYield`
"""
assert(session in self._session_to_registrations)
if yield_.request in self._invocations:
## get original call message and calling session
##
call_msg, call_session = self._invocations[yield_.request]
## validate payload
##
is_valid = True
try:
self._router.validate('call_result', call_msg.procedure, yield_.args, yield_.kwargs)
except Exception as e:
is_valid = False
reply = message.Error(message.Call.MESSAGE_TYPE, call_msg.request, ApplicationError.INVALID_ARGUMENT, ["call result from procedure '{}' with invalid application payload: {}".format(call_msg.procedure, e)])
else:
reply = message.Result(call_msg.request, args = yield_.args, kwargs = yield_.kwargs, progress = yield_.progress)
## the calling session might have been lost in the meantime ..
##
if call_session._transport:
call_session._transport.send(reply)
## the call is done if it's a regular call (non-progressive) or if the payload was invalid
##
if not yield_.progress or not is_valid:
del self._invocations[yield_.request]
else:
raise ProtocolError("Dealer.onYield(): YIELD received for non-pending request ID {}".format(yield_.request))
def processInvocationError(self, session, error):
"""
Implements :func:`autobahn.wamp.interfaces.IDealer.processInvocationError`
"""
assert(session in self._session_to_registrations)
if error.request in self._invocations:
## get original call message and calling session
##
call_msg, call_session = self._invocations[error.request]
## validate payload
##
try:
self._router.validate('call_error', call_msg.procedure, error.args, error.kwargs)
except Exception as e:
reply = message.Error(message.Call.MESSAGE_TYPE, call_msg.request, ApplicationError.INVALID_ARGUMENT, ["call error from procedure '{}' with invalid application payload: {}".format(call_msg.procedure, e)])
else:
reply = message.Error(message.Call.MESSAGE_TYPE, call_msg.request, error.error, args = error.args, kwargs = error.kwargs)
## the calling session might have been lost in the meantime ..
##
if call_session._transport:
call_session._transport.send(reply)
## the call is done
##
del self._invocations[error.request]
else:
raise ProtocolError("Dealer.onInvocationError(): ERROR received for non-pending request_type {} and request ID {}".format(error.request_type, error.request))
IDealer.register(Dealer)
| bsd-3-clause |
Ebag333/Pyfa | gui/commandView.py | 1 | 7036 | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import wx
import gui.display as d
import gui.globalEvents as GE
import gui.droneView
from gui.builtinViewColumns.state import State
from gui.contextMenu import ContextMenu
from service.fit import Fit
from eos.saveddata.drone import Drone as es_Drone
class DummyItem:
def __init__(self, txt):
self.name = txt
self.icon = None
class DummyEntry:
def __init__(self, txt):
self.item = DummyItem(txt)
class CommandViewDrop(wx.PyDropTarget):
def __init__(self, dropFn):
wx.PyDropTarget.__init__(self)
self.dropFn = dropFn
# this is really transferring an EVE itemID
self.dropData = wx.PyTextDataObject()
self.SetDataObject(self.dropData)
def OnData(self, x, y, t):
if self.GetData():
data = self.dropData.GetText().split(':')
self.dropFn(x, y, data)
return t
class CommandView(d.Display):
DEFAULT_COLS = ["Base Name"]
def __init__(self, parent):
d.Display.__init__(self, parent, style=wx.LC_SINGLE_SEL | wx.BORDER_NONE)
self.lastFitId = None
self.mainFrame.Bind(GE.FIT_CHANGED, self.fitChanged)
self.Bind(wx.EVT_LEFT_DOWN, self.click)
self.Bind(wx.EVT_RIGHT_DOWN, self.click)
self.Bind(wx.EVT_LEFT_DCLICK, self.remove)
self.Bind(wx.EVT_KEY_UP, self.kbEvent)
self.droneView = gui.droneView.DroneView
if "__WXGTK__" in wx.PlatformInfo:
self.Bind(wx.EVT_RIGHT_UP, self.scheduleMenu)
else:
self.Bind(wx.EVT_RIGHT_DOWN, self.scheduleMenu)
self.Bind(wx.EVT_LIST_BEGIN_DRAG, self.startDrag)
self.SetDropTarget(CommandViewDrop(self.handleListDrag))
def handleListDrag(self, x, y, data):
'''
Handles dragging of items from various pyfa displays which support it
data is list with two indices:
data[0] is hard-coded str of originating source
data[1] is typeID or index of data we want to manipulate
'''
pass
def kbEvent(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_DELETE or keycode == wx.WXK_NUMPAD_DELETE:
fitID = self.mainFrame.getActiveFit()
sFit = Fit.getInstance()
row = self.GetFirstSelected()
if row != -1:
sFit.removeCommand(fitID, self.get(row))
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=fitID))
def handleDrag(self, type, fitID):
# Those are drags coming from pyfa sources, NOT builtin wx drags
if type == "fit":
activeFit = self.mainFrame.getActiveFit()
if activeFit:
sFit = Fit.getInstance()
draggedFit = sFit.getFit(fitID)
sFit.addCommandFit(activeFit, draggedFit)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=activeFit))
def startDrag(self, event):
row = event.GetIndex()
if row != -1 and isinstance(self.get(row), es_Drone):
data = wx.PyTextDataObject()
data.SetText("command:" + str(self.GetItemData(row)))
dropSource = wx.DropSource(self)
dropSource.SetData(data)
dropSource.DoDragDrop()
def fitSort(self, fit):
return fit.name
def fitChanged(self, event):
sFit = Fit.getInstance()
fit = sFit.getFit(event.fitID)
self.Parent.Parent.DisablePage(self, not fit or fit.isStructure)
# Clear list and get out if current fitId is None
if event.fitID is None and self.lastFitId is not None:
self.DeleteAllItems()
self.lastFitId = None
event.Skip()
return
stuff = []
if fit is not None:
self.fits = fit.commandFits[:]
self.fits.sort(key=self.fitSort)
stuff.extend(self.fits)
if event.fitID != self.lastFitId:
self.lastFitId = event.fitID
item = self.GetNextItem(-1, wx.LIST_NEXT_ALL, wx.LIST_STATE_DONTCARE)
if item != -1:
self.EnsureVisible(item)
self.deselectItems()
# todo: verify
if stuff == []:
stuff = [DummyEntry("Drag a fit to this area")]
self.update(stuff)
def get(self, row):
numFits = len(self.fits)
if (numFits) == 0:
return None
return self.fits[row]
def click(self, event):
event.Skip()
row, _ = self.HitTest(event.Position)
if row != -1:
item = self.get(row)
col = self.getColumn(event.Position)
if col == self.getColIndex(State):
fitID = self.mainFrame.getActiveFit()
sFit = Fit.getInstance()
sFit.toggleCommandFit(fitID, item)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=fitID))
def scheduleMenu(self, event):
event.Skip()
if self.getColumn(event.Position) != self.getColIndex(State):
wx.CallAfter(self.spawnMenu)
def spawnMenu(self):
sel = self.GetFirstSelected()
menu = None
if sel != -1:
item = self.get(sel)
if item is None:
return
fitSrcContext = "commandFit"
fitItemContext = item.name
context = ((fitSrcContext, fitItemContext),)
context = context + (("command",),)
menu = ContextMenu.getMenu((item,), *context)
elif sel == -1:
fitID = self.mainFrame.getActiveFit()
if fitID is None:
return
context = (("command",),)
menu = ContextMenu.getMenu([], *context)
if menu is not None:
self.PopupMenu(menu)
def remove(self, event):
row, _ = self.HitTest(event.Position)
if row != -1:
col = self.getColumn(event.Position)
if col != self.getColIndex(State):
fitID = self.mainFrame.getActiveFit()
sFit = Fit.getInstance()
sFit.removeCommand(fitID, self.get(row))
wx.PostEvent(self.mainFrame, GE.FitChanged(fitID=fitID))
| gpl-3.0 |
wndias/bc.repository | plugin.video.superlistamilton/resources/lib/jsunpack.py | 6 | 1448 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
def unpack(script):
aSplit = script.split(";',")
p = str(aSplit[0])
aSplit = aSplit[1].split(",")
a = int(aSplit[0])
c = int(aSplit[1])
k = aSplit[2].split(".")[0].replace("'", '').split('|')
e = ''
d = ''
sUnpacked = str(__unpack(p, a, c, k, e, d))
sUnpacked = sUnpacked.replace('\\', '')
return sUnpacked
def __unpack(p, a, c, k, e, d):
while (c > 1):
c = c -1
if (k[c]):
p = re.sub('\\b' + str(__itoa(c, a)) +'\\b', k[c], p)
return p
def __itoa(num, radix):
result = ""
while num > 0:
result = "0123456789abcdefghijklmnopqrstuvwxyz"[num % radix] + result
num /= radix
return result
| gpl-2.0 |
FAForever/client | src/config/__init__.py | 1 | 10828 | from . import version
import os
import sys
import locale
import logging
import fafpath
import traceback
import faulthandler
from PyQt5 import QtCore
from logging.handlers import RotatingFileHandler, MemoryHandler
if sys.platform == 'win32':
import win32api
import win32con
import win32security
import ctypes
from . import admin
_settings = QtCore.QSettings(QtCore.QSettings.IniFormat,
QtCore.QSettings.UserScope,
"ForgedAllianceForever", "FA Lobby")
_unpersisted_settings = {}
CONFIG_PATH = os.path.dirname(_settings.fileName())
UNITDB_CONFIG_FILE = os.path.join(CONFIG_PATH, "unitdb.conf")
class Settings:
"""
This wraps QSettings, fetching default values from the
selected configuration module if the key isn't found.
"""
@staticmethod
def get(key, default=None, type=str):
# Get from a local dict cache before hitting QSettings
# this is for properties such as client.login which we
# don't necessarily want to persist
if key in _unpersisted_settings:
return _unpersisted_settings[key]
# Hit QSettings to see if the user has defined a value for the key
if _settings.contains(key):
return _settings.value(key, type=type)
# Try out our defaults for the current environment
return defaults.get(key, default)
@staticmethod
def set(key, value, persist=True):
_unpersisted_settings[key] = value
if not persist:
_settings.remove(key)
else:
_settings.setValue(key, value)
@staticmethod
def remove(key):
if key in _unpersisted_settings:
del _unpersisted_settings[key]
if _settings.contains(key):
_settings.remove(key)
@staticmethod
def persisted_property(key, default_value=None,
persist_if=lambda self: True, type=str):
"""
Create a magically persisted property
:param key: QSettings key to persist with
:param default_value: default value
:param persist_if: Lambda predicate that gets self as a first argument.
Determines whether or not to persist the value
:param type: Type of values for persisting
:return: a property suitable for a class
"""
return property(
lambda s: Settings.get(key, default=default_value, type=type),
lambda s, v: Settings.set(key, v, persist=persist_if(s)),
doc='Persisted property: {}. Default: '.format(key, default_value))
@staticmethod
def sync():
_settings.sync()
@staticmethod
def fileName():
return _settings.fileName()
@staticmethod
def contains(key):
return key in _unpersisted_settings or _settings.contains(key)
def set_data_path_permissions():
"""
Set the owner of C:\ProgramData\FAForever recursively to the current user
"""
if not admin.isUserAdmin():
win32api.MessageBox(
0,
("FA Forever needs to fix folder permissions due to user change. "
"Please confirm the following two admin prompts."),
"User changed")
if sys.platform == 'win32' and ('CI' not in os.environ):
data_path = Settings.get('client/data_path')
if os.path.exists(data_path):
my_user = win32api.GetUserNameEx(win32con.NameSamCompatible)
admin.runAsAdmin(["icacls", data_path, "/setowner", my_user, "/T"])
admin.runAsAdmin(["icacls", data_path, "/reset", "/T"])
def check_data_path_permissions():
"""
Checks if the current user is owner of C:\ProgramData\FAForever
Fixes the permissions in case that FAF was run as different user before
"""
if sys.platform == 'win32' and ('CI' not in os.environ):
data_path = Settings.get('client/data_path')
if os.path.exists(data_path):
try:
my_user = win32api.GetUserNameEx(win32con.NameSamCompatible)
sd = win32security.GetFileSecurity(
data_path, win32security.OWNER_SECURITY_INFORMATION)
owner_sid = sd.GetSecurityDescriptorOwner()
name, domain, type = win32security.LookupAccountSid(
None, owner_sid)
data_path_owner = "%s\\%s" % (domain, name)
if my_user != data_path_owner:
set_data_path_permissions()
except Exception as e:
# we encountered error 1332 in win32security.LookupAccountSid
# here: http://forums.faforever.com/viewtopic.php?f=3&t=13728
# msdn.microsoft.com/en-us/library/windows/desktop/aa379166.aspx
# states:
# "It also occurs for SIDs that have no corresponding account
# name, such as a logon SID that identifies a logon session."
# so let's just fix permissions on every exception for now and
# wait for someone stuck in a permission-loop
win32api.MessageBox(
0,
"FA Forever ran into an exception "
"checking the data folder permissions: '{}'\n"
"If you get this popup more than one time, please report "
"a screenshot of this popup to tech support forum. "
"Full stacktrace:\n{}".format(e, traceback.format_exc()),
"Permission check exception")
set_data_path_permissions()
def make_dirs():
check_data_path_permissions()
for dir in [
'client/data_path',
'game/logs/path',
'game/bin/path',
'game/mods/path',
'game/engine/path',
'game/maps/path',
]:
path = Settings.get(dir)
if path is None:
raise Exception("Missing configured path for {}".format(dir))
if not os.path.isdir(path):
try:
os.makedirs(path)
except IOError as e:
set_data_path_permissions()
os.makedirs(path)
VERSION = version.get_release_version(dir=fafpath.get_resdir(),
git_dir=fafpath.get_srcdir())
def is_development_version():
return version.is_development_version(VERSION)
# FIXME: Don't initialize proxy code that shows a dialogue box on import
no_dialogs = False
environment = 'production'
def is_beta():
return environment == 'development'
# TODO: move stuff below to Settings __init__ once we make it an actual object
if _settings.contains('client/force_environment'):
environment = _settings.value('client/force_environment', 'development')
if environment == 'production':
from .production import defaults
elif environment == 'development':
from .develop import defaults
for k, v in defaults.items():
if isinstance(v, str):
defaults[k] = v.format(host=Settings.get('host'))
def os_language():
# locale is unreliable on Windows
if sys.platform == 'win32':
windll = ctypes.windll.kernel32
locale_code = windll.GetUserDefaultUILanguage()
os_locale = locale.windows_locale.get(locale_code, None)
else:
os_locale = locale.getlocale()[0]
# sanity checks
if os_locale is None:
return None
if len(os_locale) < 2:
return None
country = os_locale[:2].lower()
if not country.isalpha():
return None
return country
if not Settings.contains('client/language'):
Settings.set('client/language', os_language())
# Setup normal rotating log handler
make_dirs()
def setup_file_handler(filename):
# check permissions of writing the log file first
# (which fails when changing users)
log_file = os.path.join(Settings.get('client/logs/path'), filename)
try:
with open(log_file, "a"):
pass
except IOError as e:
set_data_path_permissions()
rotate = RotatingFileHandler(
os.path.join(Settings.get('client/logs/path'), filename),
maxBytes=int(Settings.get('client/logs/max_size')),
backupCount=1)
rotate.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)-8s %(name)-30s %(message)s'))
return MemoryHandler(int(Settings.get('client/logs/buffer_size')),
target=rotate)
client_handler = setup_file_handler('forever.log')
logging.getLogger().addHandler(client_handler)
logging.getLogger().setLevel(Settings.get('client/logs/level', type=int))
if Settings.get('client/logs/console', False, type=bool):
# Setup logging output to console
devh = logging.StreamHandler()
devh.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)-8s %(name)-30s %(message)s'))
logging.getLogger().addHandler(devh)
logging.getLogger().setLevel(Settings.get('client/logs/level', type=int))
logging.getLogger().info("FAF version: {} Environment: {}".format(
VERSION, environment))
def qt_log_handler(type_, context, text):
loglvl = None
if type_ == QtCore.QtDebugMsg:
loglvl = logging.DEBUG
elif type_ == QtCore.QtInfoMsg:
loglvl = logging.INFO
elif type_ == QtCore.QtWarningMsg:
loglvl = logging.WARNING
elif type_ == QtCore.QtCriticalMsg:
loglvl = logging.ERROR
elif type_ == QtCore.QtFatalMsg:
loglvl = logging.CRITICAL
if loglvl is None:
return
logging.getLogger().log(loglvl, "Qt: " + text)
QtCore.qInstallMessageHandler(qt_log_handler)
fault_handler_file = None
def setup_fault_handler():
global fault_handler_file
log_path = os.path.join(Settings.get('client/logs/path'), 'crash.log')
try:
max_sz = int(Settings.get('client/logs/max_size'))
rotate = RotatingFileHandler(
log_path,
maxBytes=max_sz,
backupCount=1)
# Rollover does it unconditionally, not looking at max size,
# so we need to check it manually
try:
finfo = os.stat(log_path)
if finfo.st_size > max_sz:
rotate.doRollover()
except FileNotFoundError:
pass
rotate.close()
# This file must be kept open so that faulthandler can write to the
# same file descriptor no matter the circumstances
fault_handler_file = open(log_path, 'a')
except IOError as e:
logging.getLogger().error(
'Failed to setup crash.log for the fault handler: ' + e.strerror)
return
faulthandler.enable(fault_handler_file)
setup_fault_handler()
def clear_logging_handlers():
global fault_handler_file
QtCore.qInstallMessageHandler(None)
faulthandler.disable()
fault_handler_file.close()
| gpl-3.0 |
angyukai/boulderactive2016-landing-page | node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py | 240 | 23404 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions to perform Xcode-style build steps.
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
import re
import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
executor = MacTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class MacTool(object):
"""This class performs all the Mac tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest, convert_to_binary)
else:
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _ConvertToBinary(self, dest):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
def _CopyStringsFile(self, source, dest, convert_to_binary):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
except e:
fp.close()
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile(r'[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
fd.write(lines)
fd.close()
# Now write out PkgInfo file now that the Info.plist file has been
# "compiled".
self._WritePkgInfo(dest)
if convert_to_binary == 'True':
self._ConvertToBinary(dest)
def _WritePkgInfo(self, info_plist):
"""This writes the PkgInfo file from the data stored in Info.plist."""
plist = plistlib.readPlist(info_plist)
if not plist:
return
# Only create PkgInfo for executable types.
package_type = plist['CFBundlePackageType']
if package_type != 'APPL':
return
# The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead.
signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
fp = open(dest, 'w')
fp.write('%s%s' % (package_type, signature_code))
fp.close()
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
fcntl.flock(fd, fcntl.LOCK_EX)
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
r'\(no object file members in the library define global symbols\)$')
env = os.environ.copy()
# Ref:
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
# The problem with this flag is that it resets the file mtime on the file to
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
env['ZERO_AR_DATE'] = '1'
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
print >>sys.stderr, line
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
for i in range(len(cmd_list) - 1):
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
os.utime(cmd_list[i+1], None)
break
return libtoolout.returncode
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
# Find the name of the binary based on the part before the ".framework".
binary = os.path.basename(framework).split('.')[0]
CURRENT = 'Current'
RESOURCES = 'Resources'
VERSIONS = 'Versions'
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
# Binary-less frameworks don't seem to contain symlinks (see e.g.
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
return
# Move into the framework directory to set the symlinks correctly.
pwd = os.getcwd()
os.chdir(framework)
# Set up the Current version.
self._Relink(version, os.path.join(VERSIONS, CURRENT))
# Set up the root symlinks.
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
# Back to where we were before!
os.chdir(pwd)
def _Relink(self, dest, link):
"""Creates a symlink to |dest| named |link|. If |link| already exists,
it is overwritten."""
if os.path.lexists(link):
os.remove(link)
os.symlink(dest, link)
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
This invokes 'actool' to compile all the inputs .xcassets files. The
|keys| arguments is a json-encoded dictionary of extra arguments to
pass to 'actool' when the asset catalogs contains an application icon
or a launch image.
Note that 'actool' does not create the Assets.car file if the asset
catalogs does not contains imageset.
"""
command_line = [
'xcrun', 'actool', '--output-format', 'human-readable-text',
'--compress-pngs', '--notices', '--warnings', '--errors',
]
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
if is_iphone_target:
platform = os.environ['CONFIGURATION'].split('-')[-1]
if platform not in ('iphoneos', 'iphonesimulator'):
platform = 'iphonesimulator'
command_line.extend([
'--platform', platform, '--target-device', 'iphone',
'--target-device', 'ipad', '--minimum-deployment-target',
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
])
else:
command_line.extend([
'--platform', 'macosx', '--target-device', 'mac',
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
'--compile',
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
])
if keys:
keys = json.loads(keys)
for key, value in keys.iteritems():
arg_name = '--' + key
if isinstance(value, bool):
if value:
command_line.append(arg_name)
elif isinstance(value, list):
for v in value:
command_line.append(arg_name)
command_line.append(str(v))
else:
command_line.append(arg_name)
command_line.append(str(value))
# Note: actool crashes if inputs path are relative, so use os.path.abspath
# to get absolute path name for inputs.
command_line.extend(map(os.path.abspath, inputs))
subprocess.check_call(command_line)
def ExecMergeInfoPlist(self, output, *inputs):
"""Merge multiple .plist files into a single .plist file."""
merged_plist = {}
for path in inputs:
plist = self._LoadPlistMaybeBinary(path)
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
for key, value in plist.iteritems():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
self._MergePlist(merged_value, value)
merged_plist[key] = merged_value
else:
merged_plist[key] = value
else:
merged_plist[key] = value
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return dict((k, self._ExpandVariables(data[k],
substitutions)) for k in data)
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
slank/ansible | lib/ansible/modules/packaging/os/apt_rpm.py | 12 | 5374 | #!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Evgenii Terechkov
# Written by Evgenii Terechkov <evg@altlinux.org>
# Based on urpmi module written by Philippe Makowski <philippem@mageia.org>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: apt_rpm
short_description: apt_rpm package manager
description:
- Manages packages with I(apt-rpm). Both low-level (I(rpm)) and high-level (I(apt-get)) package manager binaries required.
version_added: "1.5"
options:
pkg:
description:
- name of package to install, upgrade or remove.
required: true
default: null
state:
description:
- Indicates the desired package state
required: false
default: present
choices: [ "absent", "present" ]
update_cache:
description:
- update the package database first C(apt-get update).
required: false
default: no
choices: [ "yes", "no" ]
author: "Evgenii Terechkov (@evgkrsk)"
notes: []
'''
EXAMPLES = '''
# install package foo
- apt_rpm:
pkg: foo
state: present
# remove package foo
- apt_rpm:
pkg: foo
state: absent
# description: remove packages foo and bar
- apt_rpm:
pkg: foo,bar
state: absent
# description: update the package database and install bar (bar will be the updated if a newer version exists)
- apt_rpm:
name: bar
state: present
update_cache: yes
'''
try:
import json
except ImportError:
import simplejson as json
import shlex
import os
import sys
APT_PATH="/usr/bin/apt-get"
RPM_PATH="/usr/bin/rpm"
def query_package(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
rc, out, err = module.run_command("%s -q %s" % (RPM_PATH,name))
if rc == 0:
return True
else:
return False
def query_package_provides(module, name):
# rpm -q returns 0 if the package is installed,
# 1 if it is not installed
rc, out, err = module.run_command("%s -q --provides %s" % (RPM_PATH,name))
return rc == 0
def update_package_db(module):
rc, out, err = module.run_command("%s update" % APT_PATH)
if rc != 0:
module.fail_json(msg="could not update package db: %s" % err)
def remove_packages(module, packages):
remove_c = 0
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc, out, err = module.run_command("%s -y remove %s" % (APT_PATH,package))
if rc != 0:
module.fail_json(msg="failed to remove %s: %s" % (package, err))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, pkgspec):
packages = ""
for package in pkgspec:
if not query_package_provides(module, package):
packages += "'%s' " % package
if len(packages) != 0:
rc, out, err = module.run_command("%s -y install %s" % (APT_PATH, packages))
installed = True
for packages in pkgspec:
if not query_package_provides(module, package):
installed = False
# apt-rpm always have 0 for exit code if --force is used
if rc or not installed:
module.fail_json(msg="'apt-get -y install %s' failed: %s" % (packages, err))
else:
module.exit_json(changed=True, msg="%s present(s)" % packages)
else:
module.exit_json(changed=False)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='installed', choices=['installed', 'removed', 'absent', 'present']),
update_cache = dict(default=False, aliases=['update-cache'], type='bool'),
package = dict(aliases=['pkg', 'name'], required=True)))
if not os.path.exists(APT_PATH) or not os.path.exists(RPM_PATH):
module.fail_json(msg="cannot find /usr/bin/apt-get and/or /usr/bin/rpm")
p = module.params
if p['update_cache']:
update_package_db(module)
packages = p['package'].split(',')
if p['state'] in [ 'installed', 'present' ]:
install_packages(module, packages)
elif p['state'] in [ 'removed', 'absent' ]:
remove_packages(module, packages)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
asimshankar/tensorflow | tensorflow/python/autograph/impl/conversion.py | 1 | 16281 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Core conversion logic, serves as main point of access."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import imp
import gast
from tensorflow.python.autograph import operators
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.converters import arg_defaults
from tensorflow.python.autograph.converters import asserts
from tensorflow.python.autograph.converters import break_statements
from tensorflow.python.autograph.converters import builtin_functions
from tensorflow.python.autograph.converters import call_trees
from tensorflow.python.autograph.converters import conditional_expressions
from tensorflow.python.autograph.converters import continue_statements
from tensorflow.python.autograph.converters import control_flow
from tensorflow.python.autograph.converters import decorators
from tensorflow.python.autograph.converters import directives
from tensorflow.python.autograph.converters import error_handlers
from tensorflow.python.autograph.converters import function_scopes
from tensorflow.python.autograph.converters import lists
from tensorflow.python.autograph.converters import logical_expressions
from tensorflow.python.autograph.converters import return_statements
from tensorflow.python.autograph.converters import side_effect_guards
from tensorflow.python.autograph.converters import slices
from tensorflow.python.autograph.core import config
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.core import errors
from tensorflow.python.autograph.core import function_wrapping
from tensorflow.python.autograph.lang import special_functions
from tensorflow.python.autograph.pyct import ast_util
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.autograph.pyct import inspect_utils
from tensorflow.python.autograph.pyct import origin_info
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import templates
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_inspect
# TODO(mdan): Might we not need any renaming at all?
def is_whitelisted_for_graph(o):
"""Check whether an entity is whitelisted for use in graph mode.
Examples of whitelisted entities include all members of the tensorflow
package.
Args:
o: A Python entity.
Returns:
Boolean
"""
# TODO(b/120224672): Fix this.
if isinstance(o, functools.partial):
# tf_inspect.getmodule(functools.partial(...)) otherwise returns None since
# functools.partial objects do not have a __module__ attribute.
m = functools
else:
m = tf_inspect.getmodule(o)
for prefix, in config.DEFAULT_UNCOMPILED_MODULES:
if m.__name__.startswith(prefix):
return True
if hasattr(o, 'autograph_info__'):
return True
if inspect_utils.isnamedtuple(o):
# Due to the way they're constructed, namedtuple types cannot be converted
# because they don't expose source code. But we assume they are safe for
# graph mode since they are just containers.
if tf_inspect.isclass(o) and len(o.__bases__) > 1:
logging.log_first_n(
logging.level_warning(),
'Entity {} looks like a namedtuple subclass. If it has any custom'
' methods, they will not be converted by AutoGraph.'.format(o), 1)
return True
return False
def entity_to_graph(o, program_ctx, arg_values, arg_types):
"""Compile a Python entity into equivalent TensorFlow.
The function will also recursively compile all the entities that `o`
references, updating `dependency_cache`.
This function is reentrant, and relies on dependency_cache to avoid
generating duplicate code.
Args:
o: A Python entity.
program_ctx: A ProgramContext object.
arg_values: A dict containing value hints for symbols like function
parameters.
arg_types: A dict containing type hints for symbols like function
parameters.
Returns:
A tuple (ast, new_name, namespace):
* ast: An AST representing an entity with interface equivalent to `o`,
but which when executed it creates TF a graph.
* new_name: The symbol name under which the new entity can be found.
* namespace: A dict mapping all symbols visible to the converted entity,
keyed by their symbol name.
Raises:
ValueError: if the entity type is not supported.
"""
logging.vlog(logging.DEBUG, 'Converting %s', o)
if tf_inspect.isclass(o):
node, name, ns = class_to_graph(o, program_ctx)
elif tf_inspect.isfunction(o):
node, name, ns = function_to_graph(o, program_ctx, arg_values, arg_types)
elif tf_inspect.ismethod(o):
node, name, ns = function_to_graph(o, program_ctx, arg_values, arg_types)
# TODO(mdan,yashkatariya): Remove when object conversion is implemented.
elif hasattr(o, '__class__'):
raise NotImplementedError(
'Object conversion is not yet supported. If you are '
'trying to convert code that uses an existing object, '
'try including the creation of that object in the '
'conversion. For example, instead of converting the method '
'of a class, try converting the entire class instead. '
'See https://github.com/tensorflow/tensorflow/blob/master/tensorflow/'
'contrib/autograph/README.md#using-the-functional-api '
'for more information.')
else:
raise ValueError(
'Entity "%s" has unsupported type "%s". Only functions and classes are '
'supported for now.' % (o, type(o)))
# TODO(mdan): This is temporary. it should be created using a converter.
# TODO(mdan): The attribute should be added with a helper, not directly.
# The helper can ensure there are no collisions.
template = '''
entity.autograph_info__ = {}
'''
node.extend(templates.replace(template, entity=name))
program_ctx.add_to_cache(o, node)
if logging.get_verbosity() <= logging.DEBUG:
logging.vlog(logging.DEBUG, 'Compiled output of %s:\n\n%s\n', o,
compiler.ast_to_source(node))
if program_ctx.options.recursive:
while True:
candidate = None
for obj in program_ctx.name_map.keys():
if obj not in program_ctx.dependency_cache:
candidate = obj
break
if candidate is None:
break
if (hasattr(candidate, 'im_class') and
getattr(candidate, 'im_class') not in program_ctx.partial_types):
# Class members are converted with their objects, unless they're
# only converted partially.
continue
entity_to_graph(candidate, program_ctx, {}, {})
return node, name, ns
def class_to_graph(c, program_ctx):
"""Specialization of `entity_to_graph` for classes."""
converted_members = {}
method_filter = lambda m: tf_inspect.isfunction(m) or tf_inspect.ismethod(m)
members = tf_inspect.getmembers(c, predicate=method_filter)
if not members:
raise ValueError('Cannot convert %s: it has no member methods.' % c)
class_namespace = {}
for _, m in members:
# Only convert the members that are directly defined by the class.
if inspect_utils.getdefiningclass(m, c) is not c:
continue
node, _, namespace = function_to_graph(
m,
program_ctx=program_ctx,
arg_values={},
arg_types={'self': (c.__name__, c)},
owner_type=c)
if class_namespace is None:
class_namespace = namespace
else:
class_namespace.update(namespace)
converted_members[m] = node[0]
namer = program_ctx.new_namer(class_namespace)
class_name = namer.compiled_class_name(c.__name__, c)
# TODO(mdan): This needs to be explained more thoroughly.
# Process any base classes: if the superclass if of a whitelisted type, an
# absolute import line is generated. Otherwise, it is marked for conversion
# (as a side effect of the call to namer.compiled_class_name() followed by
# program_ctx.update_name_map(namer)).
output_nodes = []
renames = {}
base_names = []
for base in c.__bases__:
if isinstance(object, base):
base_names.append('object')
continue
if is_whitelisted_for_graph(base):
alias = namer.new_symbol(base.__name__, ())
output_nodes.append(
gast.ImportFrom(
module=base.__module__,
names=[gast.alias(name=base.__name__, asname=alias)],
level=0))
else:
# This will trigger a conversion into a class with this name.
alias = namer.compiled_class_name(base.__name__, base)
base_names.append(alias)
renames[qual_names.QN(base.__name__)] = qual_names.QN(alias)
program_ctx.update_name_map(namer)
# Generate the definition of the converted class.
bases = [gast.Name(n, gast.Load(), None) for n in base_names]
class_def = gast.ClassDef(
class_name,
bases=bases,
keywords=[],
body=list(converted_members.values()),
decorator_list=[])
# Make a final pass to replace references to the class or its base classes.
# Most commonly, this occurs when making super().__init__() calls.
# TODO(mdan): Making direct references to superclass' superclass will fail.
class_def = qual_names.resolve(class_def)
renames[qual_names.QN(c.__name__)] = qual_names.QN(class_name)
class_def = ast_util.rename_symbols(class_def, renames)
output_nodes.append(class_def)
return output_nodes, class_name, class_namespace
def _add_reserved_symbol(namespace, name, entity):
if name not in namespace:
namespace[name] = entity
elif namespace[name] != entity:
raise ValueError('The name "%s" is reserved and may not be used.' % name)
ag_internal = None
def _add_self_references(namespace, autograph_module):
"""Adds namespace references to the module that exposes the api itself."""
global ag_internal
if ag_internal is None:
# Craft a module that exposes parts of the external API as well as certain
# internal modules.
ag_internal = imp.new_module('autograph')
ag_internal.__dict__.update(autograph_module.__dict__)
ag_internal.utils = utils
ag_internal.function_scope = function_wrapping.function_scope
ag_internal.rewrite_graph_construction_error = (
errors.rewrite_graph_construction_error)
# TODO(mdan): Add safeguards against name clashes.
# We don't want to create a submodule because we want the operators to be
# accessible as ag__.<operator>
ag_internal.__dict__.update(special_functions.__dict__)
ag_internal.__dict__.update(operators.__dict__)
_add_reserved_symbol(namespace, 'ag__', ag_internal)
def function_to_graph(f,
program_ctx,
arg_values,
arg_types,
owner_type=None):
"""Specialization of `entity_to_graph` for callable functions."""
node, source = parser.parse_entity(f)
node = node.body[0]
# In general, the output of inspect.getsource is inexact because it uses
# regex matching to adjust the exact location around the line number that
# CPython records. This is particularly problematic for lambda functions,
# where the entire containing lines are returned.
nodes = ast_util.find_matching_definitions(node, f)
if len(nodes) != 1:
if f.__name__ == '<lambda>':
raise ValueError(
'Unable to identify source code of lambda function {}. It was'
' defined on this line: {}, which must contain a single lambda with'
' matching signature. To avoid ambiguity, define each lambda'
' in a separate expression.'.format(f, source))
else:
raise ValueError(
'Unable to identify source code of function {}. The source code'
' reported by Python did not include exactly one matching signature:'
'\n{}\n. This is an extremely rare occurrence. Please report it to'
' the TensorFlow team.'.format(f, source))
node, = nodes
# TODO(znado): Place inside standard_analysis.
origin_info.resolve(node, source, f)
namespace = inspect_utils.getnamespace(f)
_add_self_references(namespace, program_ctx.autograph_module)
namer = program_ctx.new_namer(namespace)
entity_info = transformer.EntityInfo(
source_code=source,
source_file='<fragment>',
namespace=namespace,
arg_values=arg_values,
arg_types=arg_types,
owner_type=owner_type)
context = converter.EntityContext(namer, entity_info, program_ctx)
node = node_to_graph(node, context)
if isinstance(node, gast.Lambda):
new_name = namer.new_symbol('tf__lambda', ())
node = gast.Assign(
targets=[gast.Name(new_name, gast.Store(), None)], value=node)
else:
# TODO(mdan): This somewhat duplicates the renaming logic in call_trees.py
new_name, did_rename = namer.compiled_function_name(f.__name__, f,
owner_type)
if did_rename:
node.name = new_name
else:
new_name = f.__name__
assert node.name == new_name
program_ctx.update_name_map(namer)
# TODO(mdan): Use this at compilation.
return [node], new_name, namespace
def node_to_graph(node, context):
"""Convert Python code to equivalent TF graph mode code.
Args:
node: AST, the code to convert.
context: converter.EntityContext
Returns:
A tuple (node, deps):
* node: A Python ast node, representing the converted code.
* deps: A set of strings, the fully qualified names of entity
dependencies that this node has.
"""
# TODO(mdan): Insert list_comprehensions somewhere.
node = converter.standard_analysis(node, context, is_initial=True)
# Past this point, line numbers are no longer accurate so we ignore the
# source.
# TODO(mdan): Is it feasible to reconstruct intermediate source code?
context.info.source_code = None
if context.program.options.uses(converter.Feature.DECORATORS):
node = converter.apply_(node, context, decorators)
node = converter.apply_(node, context, arg_defaults)
node = converter.apply_(node, context, directives)
node = converter.apply_(node, context, break_statements)
node = converter.apply_(node, context, asserts)
# Note: sequencing continue canonicalization before for loop one avoids
# dealing with the extra loop increment operation that the for
# canonicalization creates.
node = converter.apply_(node, context, continue_statements)
node = converter.apply_(node, context, return_statements)
if context.program.options.uses(converter.Feature.LISTS):
node = converter.apply_(node, context, lists)
node = converter.apply_(node, context, slices)
node = converter.apply_(node, context, builtin_functions)
node = converter.apply_(node, context, call_trees)
node = converter.apply_(node, context, control_flow)
node = converter.apply_(node, context, conditional_expressions)
node = converter.apply_(node, context, logical_expressions)
if context.program.options.uses(converter.Feature.AUTO_CONTROL_DEPS):
node = converter.apply_(node, context, side_effect_guards)
# TODO(mdan): If function scopes ever does more, the toggle will need moving.
if context.program.options.uses(converter.Feature.NAME_SCOPES):
node = converter.apply_(node, context, function_scopes)
if context.program.options.uses(converter.Feature.ERROR_REWRITING):
node = converter.apply_(node, context, error_handlers)
return node
| apache-2.0 |
MakeHer/edx-platform | lms/djangoapps/courseware/views.py | 1 | 68336 | """
Courseware views functions
"""
import logging
import json
import textwrap
import urllib
from collections import OrderedDict
from datetime import datetime
from django.utils.translation import ugettext as _
from django.conf import settings
from django.core.context_processors import csrf
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.db import transaction
from django.db.models import Q
from django.utils.timezone import UTC
from django.views.decorators.http import require_GET, require_POST, require_http_methods
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import redirect
from certificates import api as certs_api
from edxmako.shortcuts import render_to_response, render_to_string, marketing_link
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.cache import cache_control
from ipware.ip import get_ip
from markupsafe import escape
from rest_framework import status
import newrelic.agent
from courseware import grades
from courseware.access import has_access, has_ccx_coach_role, _adjust_start_date_for_beta_testers
from courseware.access_response import StartDateError
from courseware.access_utils import in_preview_mode
from courseware.courses import (
get_courses,
get_course,
get_course_by_id,
get_permission_for_course_about,
get_studio_url,
get_course_overview_with_access,
get_course_with_access,
sort_by_announcement,
sort_by_start_date,
UserNotEnrolled
)
from courseware.masquerade import setup_masquerade
from openedx.core.djangoapps.credit.api import (
get_credit_requirement_status,
is_user_eligible_for_credit,
is_credit_course
)
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from courseware.models import StudentModuleHistory
from courseware.model_data import FieldDataCache, ScoresClient
from .module_render import toc_for_course, get_module_for_descriptor, get_module, get_module_by_usage_id
from .entrance_exams import (
course_has_entrance_exam,
get_entrance_exam_content,
get_entrance_exam_score,
user_must_complete_entrance_exam,
user_has_passed_entrance_exam
)
from courseware.user_state_client import DjangoXBlockUserStateClient
from course_modes.models import CourseMode
from student.models import UserTestGroup, CourseEnrollment
from student.views import is_course_blocked
from util.cache import cache, cache_if_anonymous
from util.date_utils import strftime_localized
from util.db import outer_atomic
from xblock.fragment import Fragment
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.tabs import CourseTabList
from xmodule.x_module import STUDENT_VIEW
import shoppingcart
from shoppingcart.models import CourseRegistrationCode
from shoppingcart.utils import is_shopping_cart_enabled
from opaque_keys import InvalidKeyError
from util.milestones_helpers import get_prerequisite_courses_display
from util.views import _record_feedback_in_zendesk
from microsite_configuration import microsite
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.keys import CourseKey, UsageKey
from instructor.enrollment import uses_shib
import survey.utils
import survey.views
from util.views import ensure_valid_course_key
from eventtracking import tracker
import analytics
from courseware.url_helpers import get_redirect_url
from lms.djangoapps.ccx.custom_exception import CCXLocatorValidationException
from lang_pref import LANGUAGE_KEY
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference
from student.helpers import is_teacher_of
log = logging.getLogger("edx.courseware")
template_imports = {'urllib': urllib}
CONTENT_DEPTH = 2
# Only display the requirements on learner dashboard for
# credit and verified modes.
REQUIREMENTS_DISPLAY_MODES = CourseMode.CREDIT_MODES + [CourseMode.VERIFIED]
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated():
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key)
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration)
return group_names
@ensure_csrf_cookie
@cache_if_anonymous()
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses_list = []
course_discovery_meanings = getattr(settings, 'COURSE_DISCOVERY_MEANINGS', {})
if not settings.FEATURES.get('ENABLE_COURSE_DISCOVERY'):
courses_list = get_courses(request.user)
if microsite.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses_list = sort_by_start_date(courses_list)
else:
courses_list = sort_by_announcement(courses_list)
return render_to_response(
"courseware/courses.html",
{'courses': courses_list, 'course_discovery_meanings': course_discovery_meanings}
)
def render_accordion(user, request, course, chapter, section, field_data_cache):
"""
Draws navigation bar. Takes current position in accordion as
parameter.
If chapter and section are '' or None, renders a default accordion.
course, chapter, and section are the url_names.
Returns the html string
"""
# grab the table of contents
toc = toc_for_course(user, request, course, chapter, section, field_data_cache)
context = dict([
('toc', toc),
('course_id', course.id.to_deprecated_string()),
('csrf', csrf(request)['csrf_token']),
('due_date_display_format', course.due_date_display_format)
] + template_imports.items())
return render_to_string('courseware/accordion.html', context)
def get_current_child(xmodule, min_depth=None):
"""
Get the xmodule.position's display item of an xmodule that has a position and
children. If xmodule has no position or is out of bounds, return the first
child with children extending down to content_depth.
For example, if chapter_one has no position set, with two child sections,
section-A having no children and section-B having a discussion unit,
`get_current_child(chapter, min_depth=1)` will return section-B.
Returns None only if there are no children at all.
"""
def _get_default_child_module(child_modules):
"""Returns the first child of xmodule, subject to min_depth."""
if not child_modules:
default_child = None
elif not min_depth > 0:
default_child = child_modules[0]
else:
content_children = [child for child in child_modules if
child.has_children_at_depth(min_depth - 1) and child.get_display_items()]
default_child = content_children[0] if content_children else None
return default_child
if not hasattr(xmodule, 'position'):
return None
if xmodule.position is None:
return _get_default_child_module(xmodule.get_display_items())
else:
# position is 1-indexed.
pos = xmodule.position - 1
children = xmodule.get_display_items()
if 0 <= pos < len(children):
child = children[pos]
elif len(children) > 0:
# module has a set position, but the position is out of range.
# return default child.
child = _get_default_child_module(children)
else:
child = None
return child
def redirect_to_course_position(course_module, content_depth):
"""
Return a redirect to the user's current place in the course.
If this is the user's first time, redirects to COURSE/CHAPTER/SECTION.
If this isn't the users's first time, redirects to COURSE/CHAPTER,
and the view will find the current section and display a message
about reusing the stored position.
If there is no current position in the course or chapter, then selects
the first child.
"""
urlargs = {'course_id': course_module.id.to_deprecated_string()}
chapter = get_current_child(course_module, min_depth=content_depth)
if chapter is None:
# oops. Something bad has happened.
raise Http404("No chapter found when loading current position in course")
urlargs['chapter'] = chapter.url_name
if course_module.position is not None:
return redirect(reverse('courseware_chapter', kwargs=urlargs))
# Relying on default of returning first child
section = get_current_child(chapter, min_depth=content_depth - 1)
if section is None:
raise Http404("No section found when loading current position in course")
urlargs['section'] = section.url_name
return redirect(reverse('courseware_section', kwargs=urlargs))
def save_child_position(seq_module, child_name):
"""
child_name: url_name of the child
"""
for position, c in enumerate(seq_module.get_display_items(), start=1):
if c.location.name == child_name:
# Only save if position changed
if position != seq_module.position:
seq_module.position = position
# Save this new position to the underlying KeyValueStore
seq_module.save()
def save_positions_recursively_up(user, request, field_data_cache, xmodule, course=None):
"""
Recurses up the course tree starting from a leaf
Saving the position property based on the previous node as it goes
"""
current_module = xmodule
while current_module:
parent_location = modulestore().get_parent_location(current_module.location)
parent = None
if parent_location:
parent_descriptor = modulestore().get_item(parent_location)
parent = get_module_for_descriptor(
user,
request,
parent_descriptor,
field_data_cache,
current_module.location.course_key,
course=course
)
if parent and hasattr(parent, 'position'):
save_child_position(parent, current_module.location.name)
current_module = parent
@transaction.non_atomic_requests
@login_required
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@ensure_valid_course_key
@outer_atomic(read_committed=True)
def index(request, course_id, chapter=None, section=None,
position=None):
"""
Displays courseware accordion and associated content. If course, chapter,
and section are all specified, renders the page, or returns an error if they
are invalid.
If section is not specified, displays the accordion opened to the right chapter.
If neither chapter or section are specified, redirects to user's most recent
chapter, or the first chapter if this is the user's first visit.
Arguments:
- request : HTTP request
- course_id : course id (str: ORG/course/URL_NAME)
- chapter : chapter url_name (str)
- section : section url_name (str)
- position : position in module, eg of <sequential> module (str)
Returns:
- HTTPresponse
"""
course_key = CourseKey.from_string(course_id)
# Gather metrics for New Relic so we can slice data in New Relic Insights
newrelic.agent.add_custom_parameter('course_id', unicode(course_key))
newrelic.agent.add_custom_parameter('org', unicode(course_key.org))
user = User.objects.prefetch_related("groups").get(id=request.user.id)
redeemed_registration_codes = CourseRegistrationCode.objects.filter(
course_id=course_key,
registrationcoderedemption__redeemed_by=request.user
)
# Redirect to dashboard if the course is blocked due to non-payment.
if is_course_blocked(request, redeemed_registration_codes, course_key):
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
log.warning(
u'User %s cannot access the course %s because payment has not yet been received',
user,
course_key.to_deprecated_string()
)
return redirect(reverse('dashboard'))
request.user = user # keep just one instance of User
with modulestore().bulk_operations(course_key):
return _index_bulk_op(request, course_key, chapter, section, position)
# pylint: disable=too-many-statements
def _index_bulk_op(request, course_key, chapter, section, position):
"""
Render the index page for the specified course.
"""
# Verify that position a string is in fact an int
if position is not None:
try:
int(position)
except ValueError:
raise Http404(u"Position {} is not an integer!".format(position))
course = get_course_with_access(request.user, 'load', course_key, depth=2)
staff_access = has_access(request.user, 'staff', course)
masquerade, user = setup_masquerade(request, course_key, staff_access, reset_masquerade_data=True)
registered = registered_for_course(course, user)
if not registered:
# TODO (vshnayder): do course instructors need to be registered to see course?
log.debug(u'User %s tried to view course %s but is not enrolled', user, course.location.to_deprecated_string())
return redirect(reverse('about_course', args=[course_key.to_deprecated_string()]))
# see if all pre-requisites (as per the milestones app feature) have been fulfilled
# Note that if the pre-requisite feature flag has been turned off (default) then this check will
# always pass
if not has_access(user, 'view_courseware_with_prerequisites', course):
# prerequisites have not been fulfilled therefore redirect to the Dashboard
log.info(
u'User %d tried to view course %s '
u'without fulfilling prerequisites',
user.id, unicode(course.id))
return redirect(reverse('dashboard'))
# Entrance Exam Check
# If the course has an entrance exam and the requested chapter is NOT the entrance exam, and
# the user hasn't yet met the criteria to bypass the entrance exam, redirect them to the exam.
if chapter and course_has_entrance_exam(course):
chapter_descriptor = course.get_child_by(lambda m: m.location.name == chapter)
if chapter_descriptor and not getattr(chapter_descriptor, 'is_entrance_exam', False) \
and user_must_complete_entrance_exam(request, user, course):
log.info(u'User %d tried to view course %s without passing entrance exam', user.id, unicode(course.id))
return redirect(reverse('courseware', args=[unicode(course.id)]))
# check to see if there is a required survey that must be taken before
# the user can access the course.
if survey.utils.must_answer_survey(course, user):
return redirect(reverse('course_survey', args=[unicode(course.id)]))
bookmarks_api_url = reverse('bookmarks')
try:
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course_key, user, course, depth=2)
course_module = get_module_for_descriptor(
user, request, course, field_data_cache, course_key, course=course
)
if course_module is None:
log.warning(u'If you see this, something went wrong: if we got this'
u' far, should have gotten a course module for this user')
return redirect(reverse('about_course', args=[course_key.to_deprecated_string()]))
studio_url = get_studio_url(course, 'course')
language_preference = get_user_preference(request.user, LANGUAGE_KEY)
if not language_preference:
language_preference = settings.LANGUAGE_CODE
context = {
'csrf': csrf(request)['csrf_token'],
'accordion': render_accordion(user, request, course, chapter, section, field_data_cache),
'COURSE_TITLE': course.display_name_with_default_escaped,
'course': course,
'init': '',
'fragment': Fragment(),
'staff_access': staff_access,
'studio_url': studio_url,
'masquerade': masquerade,
'xqa_server': settings.FEATURES.get('XQA_SERVER', "http://your_xqa_server.com"),
'bookmarks_api_url': bookmarks_api_url,
'language_preference': language_preference,
'disable_optimizely': True,
}
now = datetime.now(UTC())
effective_start = _adjust_start_date_for_beta_testers(user, course, course_key)
if not in_preview_mode() and staff_access and now < effective_start:
# Disable student view button if user is staff and
# course is not yet visible to students.
context['disable_student_access'] = True
has_content = course.has_children_at_depth(CONTENT_DEPTH)
if not has_content:
# Show empty courseware for a course with no units
return render_to_response('courseware/courseware.html', context)
elif chapter is None:
# Check first to see if we should instead redirect the user to an Entrance Exam
if course_has_entrance_exam(course):
exam_chapter = get_entrance_exam_content(request, course)
if exam_chapter:
exam_section = None
if exam_chapter.get_children():
exam_section = exam_chapter.get_children()[0]
if exam_section:
return redirect('courseware_section',
course_id=unicode(course_key),
chapter=exam_chapter.url_name,
section=exam_section.url_name)
# passing CONTENT_DEPTH avoids returning 404 for a course with an
# empty first section and a second section with content
return redirect_to_course_position(course_module, CONTENT_DEPTH)
chapter_descriptor = course.get_child_by(lambda m: m.location.name == chapter)
if chapter_descriptor is not None:
save_child_position(course_module, chapter)
else:
raise Http404('No chapter descriptor found with name {}'.format(chapter))
chapter_module = course_module.get_child_by(lambda m: m.location.name == chapter)
if chapter_module is None:
# User may be trying to access a chapter that isn't live yet
if masquerade and masquerade.role == 'student': # if staff is masquerading as student be kinder, don't 404
log.debug('staff masquerading as student: no chapter %s', chapter)
return redirect(reverse('courseware', args=[course.id.to_deprecated_string()]))
raise Http404
if course_has_entrance_exam(course):
# Message should not appear outside the context of entrance exam subsection.
# if section is none then we don't need to show message on welcome back screen also.
if getattr(chapter_module, 'is_entrance_exam', False) and section is not None:
context['entrance_exam_current_score'] = get_entrance_exam_score(request, course)
context['entrance_exam_passed'] = user_has_passed_entrance_exam(request, course)
if section is not None:
section_descriptor = chapter_descriptor.get_child_by(lambda m: m.location.name == section)
if section_descriptor is None:
# Specifically asked-for section doesn't exist
if masquerade and masquerade.role == 'student': # don't 404 if staff is masquerading as student
log.debug('staff masquerading as student: no section %s', section)
return redirect(reverse('courseware', args=[course.id.to_deprecated_string()]))
raise Http404
## Allow chromeless operation
if section_descriptor.chrome:
chrome = [s.strip() for s in section_descriptor.chrome.lower().split(",")]
if 'accordion' not in chrome:
context['disable_accordion'] = True
if 'tabs' not in chrome:
context['disable_tabs'] = True
if section_descriptor.default_tab:
context['default_tab'] = section_descriptor.default_tab
# cdodge: this looks silly, but let's refetch the section_descriptor with depth=None
# which will prefetch the children more efficiently than doing a recursive load
section_descriptor = modulestore().get_item(section_descriptor.location, depth=None)
# Load all descendants of the section, because we're going to display its
# html, which in general will need all of its children
field_data_cache.add_descriptor_descendents(
section_descriptor, depth=None
)
section_module = get_module_for_descriptor(
user,
request,
section_descriptor,
field_data_cache,
course_key,
position,
course=course
)
if section_module is None:
# User may be trying to be clever and access something
# they don't have access to.
raise Http404
# Save where we are in the chapter.
save_child_position(chapter_module, section)
section_render_context = {'activate_block_id': request.GET.get('activate_block_id')}
context['fragment'] = section_module.render(STUDENT_VIEW, section_render_context)
context['section_title'] = section_descriptor.display_name_with_default_escaped
else:
# section is none, so display a message
studio_url = get_studio_url(course, 'course')
prev_section = get_current_child(chapter_module)
if prev_section is None:
# Something went wrong -- perhaps this chapter has no sections visible to the user.
# Clearing out the last-visited state and showing "first-time" view by redirecting
# to courseware.
course_module.position = None
course_module.save()
return redirect(reverse('courseware', args=[course.id.to_deprecated_string()]))
prev_section_url = reverse('courseware_section', kwargs={
'course_id': course_key.to_deprecated_string(),
'chapter': chapter_descriptor.url_name,
'section': prev_section.url_name
})
context['fragment'] = Fragment(content=render_to_string(
'courseware/welcome-back.html',
{
'course': course,
'studio_url': studio_url,
'chapter_module': chapter_module,
'prev_section': prev_section,
'prev_section_url': prev_section_url
}
))
result = render_to_response('courseware/courseware.html', context)
except Exception as e:
# Doesn't bar Unicode characters from URL, but if Unicode characters do
# cause an error it is a graceful failure.
if isinstance(e, UnicodeEncodeError):
raise Http404("URL contains Unicode characters")
if isinstance(e, Http404):
# let it propagate
raise
# In production, don't want to let a 500 out for any reason
if settings.DEBUG:
raise
else:
log.exception(
u"Error in index view: user=%s, effective_user=%s, course=%s, chapter=%s section=%s position=%s",
request.user, user, course, chapter, section, position
)
try:
result = render_to_response('courseware/courseware-error.html', {
'staff_access': staff_access,
'course': course
})
except:
# Let the exception propagate, relying on global config to at
# at least return a nice error message
log.exception("Error while rendering courseware-error page")
raise
return result
@ensure_csrf_cookie
@ensure_valid_course_key
def jump_to_id(request, course_id, module_id):
"""
This entry point allows for a shorter version of a jump to where just the id of the element is
passed in. This assumes that id is unique within the course_id namespace
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
items = modulestore().get_items(course_key, qualifiers={'name': module_id})
if len(items) == 0:
raise Http404(
u"Could not find id: {0} in course_id: {1}. Referer: {2}".format(
module_id, course_id, request.META.get("HTTP_REFERER", "")
))
if len(items) > 1:
log.warning(
u"Multiple items found with id: %s in course_id: %s. Referer: %s. Using first: %s",
module_id,
course_id,
request.META.get("HTTP_REFERER", ""),
items[0].location.to_deprecated_string()
)
return jump_to(request, course_id, items[0].location.to_deprecated_string())
@ensure_csrf_cookie
def jump_to(_request, course_id, location):
"""
Show the page that contains a specific location.
If the location is invalid or not in any class, return a 404.
Otherwise, delegates to the index view to figure out whether this user
has access, and what they should see.
"""
try:
course_key = CourseKey.from_string(course_id)
usage_key = UsageKey.from_string(location).replace(course_key=course_key)
except InvalidKeyError:
raise Http404(u"Invalid course_key or usage_key")
try:
redirect_url = get_redirect_url(course_key, usage_key)
except ItemNotFoundError:
raise Http404(u"No data at this location: {0}".format(usage_key))
except NoPathToItem:
raise Http404(u"This location is not in any class: {0}".format(usage_key))
return redirect(redirect_url)
@ensure_csrf_cookie
@ensure_valid_course_key
def course_info(request, course_id):
"""
Display the course's info.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_by_id(course_key, depth=2)
access_response = has_access(request.user, 'load', course, course_key)
if not access_response:
# The user doesn't have access to the course. If they're
# denied permission due to the course not being live yet,
# redirect to the dashboard page.
if isinstance(access_response, StartDateError):
start_date = strftime_localized(course.start, 'SHORT_DATE')
params = urllib.urlencode({'notlive': start_date})
return redirect('{0}?{1}'.format(reverse('dashboard'), params))
# Otherwise, give a 404 to avoid leaking info about access
# control.
raise Http404("Course not found.")
staff_access = has_access(request.user, 'staff', course)
masquerade, user = setup_masquerade(request, course_key, staff_access, reset_masquerade_data=True)
# if user is not enrolled in a course then app will show enroll/get register link inside course info page.
show_enroll_banner = request.user.is_authenticated() and not CourseEnrollment.is_enrolled(user, course.id)
if show_enroll_banner and hasattr(course_key, 'ccx'):
# if course is CCX and user is not enrolled/registered then do not let him open course direct via link for
# self registration. Because only CCX coach can register/enroll a student. If un-enrolled user try
# to access CCX redirect him to dashboard.
return redirect(reverse('dashboard'))
# If the user needs to take an entrance exam to access this course, then we'll need
# to send them to that specific course module before allowing them into other areas
if user_must_complete_entrance_exam(request, user, course):
return redirect(reverse('courseware', args=[unicode(course.id)]))
# check to see if there is a required survey that must be taken before
# the user can access the course.
if request.user.is_authenticated() and survey.utils.must_answer_survey(course, user):
return redirect(reverse('course_survey', args=[unicode(course.id)]))
studio_url = get_studio_url(course, 'course_info')
# link to where the student should go to enroll in the course:
# about page if there is not marketing site, SITE_NAME if there is
url_to_enroll = reverse(course_about, args=[course_id])
if settings.FEATURES.get('ENABLE_MKTG_SITE'):
url_to_enroll = marketing_link('COURSES')
context = {
'request': request,
'masquerade_user': user,
'course_id': course_key.to_deprecated_string(),
'cache': None,
'course': course,
'staff_access': staff_access,
'masquerade': masquerade,
'studio_url': studio_url,
'show_enroll_banner': show_enroll_banner,
'url_to_enroll': url_to_enroll,
}
now = datetime.now(UTC())
effective_start = _adjust_start_date_for_beta_testers(user, course, course_key)
if not in_preview_mode() and staff_access and now < effective_start:
# Disable student view button if user is staff and
# course is not yet visible to students.
context['disable_student_access'] = True
return render_to_response('courseware/info.html', context)
@ensure_csrf_cookie
@ensure_valid_course_key
def static_tab(request, course_id, tab_slug):
"""
Display the courses tab with the given name.
Assumes the course_id is in a valid format.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
tab = CourseTabList.get_tab_by_slug(course.tabs, tab_slug)
if tab is None:
raise Http404
contents = get_static_tab_contents(
request,
course,
tab
)
if contents is None:
raise Http404
return render_to_response('courseware/static_tab.html', {
'course': course,
'tab': tab,
'tab_contents': contents,
})
@ensure_csrf_cookie
@ensure_valid_course_key
def syllabus(request, course_id):
"""
Display the course's syllabus.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
return render_to_response('courseware/syllabus.html', {
'course': course,
'staff_access': staff_access,
})
def registered_for_course(course, user):
"""
Return True if user is registered for course, else False
"""
if user is None:
return False
if user.is_authenticated():
return CourseEnrollment.is_enrolled(user, course.id)
else:
return False
def get_cosmetic_display_price(course, registration_price):
"""
Return Course Price as a string preceded by correct currency, or 'Free'
"""
currency_symbol = settings.PAID_COURSE_REGISTRATION_CURRENCY[1]
price = course.cosmetic_display_price
if registration_price > 0:
price = registration_price
if price:
# Translators: This will look like '$50', where {currency_symbol} is a symbol such as '$' and {price} is a
# numerical amount in that currency. Adjust this display as needed for your language.
return _("{currency_symbol}{price}").format(currency_symbol=currency_symbol, price=price)
else:
# Translators: This refers to the cost of the course. In this case, the course costs nothing so it is free.
return _('Free')
@ensure_csrf_cookie
@cache_if_anonymous()
def course_about(request, course_id):
"""
Display the course's about page.
Assumes the course_id is in a valid format.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if hasattr(course_key, 'ccx'):
# if un-enrolled/non-registered user try to access CCX (direct for registration)
# then do not show him about page to avoid self registration.
# Note: About page will only be shown to user who is not register. So that he can register. But for
# CCX only CCX coach can enroll students.
return redirect(reverse('dashboard'))
with modulestore().bulk_operations(course_key):
permission = get_permission_for_course_about()
course = get_course_with_access(request.user, permission, course_key)
if microsite.get_value('ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False)):
return redirect(reverse('info', args=[course.id.to_deprecated_string()]))
registered = registered_for_course(course, request.user)
staff_access = bool(has_access(request.user, 'staff', course))
studio_url = get_studio_url(course, 'settings/details')
if has_access(request.user, 'load', course):
course_target = reverse('info', args=[course.id.to_deprecated_string()])
else:
course_target = reverse('about_course', args=[course.id.to_deprecated_string()])
show_courseware_link = bool(
(
has_access(request.user, 'load', course)
and has_access(request.user, 'view_courseware_with_prerequisites', course)
)
or settings.FEATURES.get('ENABLE_LMS_MIGRATION')
)
# Note: this is a flow for payment for course registration, not the Verified Certificate flow.
registration_price = 0
in_cart = False
reg_then_add_to_cart_link = ""
_is_shopping_cart_enabled = is_shopping_cart_enabled()
if _is_shopping_cart_enabled:
registration_price = CourseMode.min_course_price_for_currency(course_key,
settings.PAID_COURSE_REGISTRATION_CURRENCY[0])
if request.user.is_authenticated():
cart = shoppingcart.models.Order.get_cart_for_user(request.user)
in_cart = shoppingcart.models.PaidCourseRegistration.contained_in_order(cart, course_key) or \
shoppingcart.models.CourseRegCodeItem.contained_in_order(cart, course_key)
reg_then_add_to_cart_link = "{reg_url}?course_id={course_id}&enrollment_action=add_to_cart".format(
reg_url=reverse('register_user'), course_id=urllib.quote(str(course_id)))
course_price = get_cosmetic_display_price(course, registration_price)
can_add_course_to_cart = _is_shopping_cart_enabled and registration_price
# Used to provide context to message to student if enrollment not allowed
can_enroll = bool(has_access(request.user, 'enroll', course))
invitation_only = course.invitation_only
is_course_full = CourseEnrollment.objects.is_course_full(course)
# Register button should be disabled if one of the following is true:
# - Student is already registered for course
# - Course is already full
# - Student cannot enroll in course
active_reg_button = not(registered or is_course_full or not can_enroll)
is_shib_course = uses_shib(course)
# get prerequisite courses display names
pre_requisite_courses = get_prerequisite_courses_display(course)
# Overview
overview = CourseOverview.get_from_id(course.id)
return render_to_response('courseware/course_about.html', {
'course': course,
'staff_access': staff_access,
'studio_url': studio_url,
'registered': registered,
'course_target': course_target,
'is_cosmetic_price_enabled': settings.FEATURES.get('ENABLE_COSMETIC_DISPLAY_PRICE'),
'course_price': course_price,
'in_cart': in_cart,
'reg_then_add_to_cart_link': reg_then_add_to_cart_link,
'show_courseware_link': show_courseware_link,
'is_course_full': is_course_full,
'can_enroll': can_enroll,
'invitation_only': invitation_only,
'active_reg_button': active_reg_button,
'is_shib_course': is_shib_course,
# We do not want to display the internal courseware header, which is used when the course is found in the
# context. This value is therefor explicitly set to render the appropriate header.
'disable_courseware_header': True,
'can_add_course_to_cart': can_add_course_to_cart,
'cart_link': reverse('shoppingcart.views.show_cart'),
'pre_requisite_courses': pre_requisite_courses,
'course_image_urls': overview.image_urls,
})
@transaction.non_atomic_requests
@login_required
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@ensure_valid_course_key
def progress(request, course_id, student_id=None):
""" Display the progress page. """
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
return _progress(request, course_key, student_id)
def _progress(request, course_key, student_id):
"""
Unwrapped version of "progress".
User progress. We show the grade bar and every problem score.
Course staff are allowed to see the progress of students in their class.
"""
course = get_course_with_access(request.user, 'load', course_key, depth=None, check_if_enrolled=True)
# check to see if there is a required survey that must be taken before
# the user can access the course.
if survey.utils.must_answer_survey(course, request.user):
return redirect(reverse('course_survey', args=[unicode(course.id)]))
teacher_access = bool(has_access(request.user,'teacher',course_key))
staff_access = bool(has_access(request.user, 'staff', course))
try:
coach_access = has_ccx_coach_role(request.user, course_key)
except CCXLocatorValidationException:
coach_access = False
if student_id is None or student_id == request.user.id:
# always allowed to see your own profile
student = request.user
else:
# Requesting access to a different student's profile
#if not has_access_on_students_profiles:
# raise Http404
try:
student = User.objects.get(id=student_id)
has_access_on_students_profiles = staff_access or coach_access or (teacher_access and is_teacher_of(student,request.user,course_key))
# Check for ValueError if 'student_id' cannot be converted to integer.
except (ValueError, User.DoesNotExist):
raise Http404
if not has_access_on_students_profiles:
raise Http404
#if (teacher_access and not is_teacher_of(student,request.user,course_key)):
# raise Http404
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id)
with outer_atomic():
field_data_cache = grades.field_data_cache_for_grading(course, student)
scores_client = ScoresClient.from_field_data_cache(field_data_cache)
courseware_summary = grades.progress_summary(
student, request, course, field_data_cache=field_data_cache, scores_client=scores_client
)
grade_summary = grades.grade(
student, request, course, field_data_cache=field_data_cache, scores_client=scores_client
)
studio_url = get_studio_url(course, 'settings/grading')
teacher_dashboard_url = reverse('teacher_dashboard',kwargs={'course_id':unicode(course_key)})+"#view-membership"
if courseware_summary is None:
#This means the student didn't have access to the course (which the instructor requested)
raise Http404
# checking certificate generation configuration
show_generate_cert_btn = certs_api.cert_generation_enabled(course_key)
context = {
'course': course,
'courseware_summary': courseware_summary,
'studio_url': studio_url,
'teacher_dashboard_url': teacher_dashboard_url,
'grade_summary': grade_summary,
'staff_access': staff_access,
'teacher_access': teacher_access,
'student': student,
'passed': is_course_passed(course, grade_summary),
'show_generate_cert_btn': show_generate_cert_btn,
'credit_course_requirements': _credit_course_requirements(course_key, student),
}
if show_generate_cert_btn:
cert_status = certs_api.certificate_downloadable_status(student, course_key)
context.update(cert_status)
# showing the certificate web view button if feature flags are enabled.
if certs_api.has_html_certificates_enabled(course_key, course):
if certs_api.get_active_web_certificate(course) is not None:
context.update({
'show_cert_web_view': True,
'cert_web_view_url': certs_api.get_certificate_url(course_id=course_key, uuid=cert_status['uuid']),
})
else:
context.update({
'is_downloadable': False,
'is_generating': True,
'download_url': None
})
with outer_atomic():
response = render_to_response('courseware/progress.html', context)
return response
def _credit_course_requirements(course_key, student):
"""Return information about which credit requirements a user has satisfied.
Arguments:
course_key (CourseKey): Identifier for the course.
student (User): Currently logged in user.
Returns: dict if the credit eligibility enabled and it is a credit course
and the user is enrolled in either verified or credit mode, and None otherwise.
"""
# If credit eligibility is not enabled or this is not a credit course,
# short-circuit and return `None`. This indicates that credit requirements
# should NOT be displayed on the progress page.
if not (settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY", False) and is_credit_course(course_key)):
return None
# If student is enrolled not enrolled in verified or credit mode,
# short-circuit and return None. This indicates that
# credit requirements should NOT be displayed on the progress page.
enrollment = CourseEnrollment.get_enrollment(student, course_key)
if enrollment.mode not in REQUIREMENTS_DISPLAY_MODES:
return None
# Credit requirement statuses for which user does not remain eligible to get credit.
non_eligible_statuses = ['failed', 'declined']
# Retrieve the status of the user for each eligibility requirement in the course.
# For each requirement, the user's status is either "satisfied", "failed", or None.
# In this context, `None` means that we don't know the user's status, either because
# the user hasn't done something (for example, submitting photos for verification)
# or we're waiting on more information (for example, a response from the photo
# verification service).
requirement_statuses = get_credit_requirement_status(course_key, student.username)
# If the user has been marked as "eligible", then they are *always* eligible
# unless someone manually intervenes. This could lead to some strange behavior
# if the requirements change post-launch. For example, if the user was marked as eligible
# for credit, then a new requirement was added, the user will see that they're eligible
# AND that one of the requirements is still pending.
# We're assuming here that (a) we can mitigate this by properly training course teams,
# and (b) it's a better user experience to allow students who were at one time
# marked as eligible to continue to be eligible.
# If we need to, we can always manually move students back to ineligible by
# deleting CreditEligibility records in the database.
if is_user_eligible_for_credit(student.username, course_key):
eligibility_status = "eligible"
# If the user has *failed* any requirements (for example, if a photo verification is denied),
# then the user is NOT eligible for credit.
elif any(requirement['status'] in non_eligible_statuses for requirement in requirement_statuses):
eligibility_status = "not_eligible"
# Otherwise, the user may be eligible for credit, but the user has not
# yet completed all the requirements.
else:
eligibility_status = "partial_eligible"
return {
'eligibility_status': eligibility_status,
'requirements': requirement_statuses,
}
@login_required
@ensure_valid_course_key
def submission_history(request, course_id, student_username, location):
"""Render an HTML fragment (meant for inclusion elsewhere) that renders a
history of all state changes made by this user for this problem location.
Right now this only works for problems because that's all
StudentModuleHistory records.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
usage_key = course_key.make_usage_key_from_deprecated_string(location)
except (InvalidKeyError, AssertionError):
return HttpResponse(escape(_(u'Invalid location.')))
course = get_course_overview_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
# Permission Denied if they don't have staff access and are trying to see
# somebody else's submission history.
if (student_username != request.user.username) and (not staff_access):
raise PermissionDenied
user_state_client = DjangoXBlockUserStateClient()
try:
history_entries = list(user_state_client.get_history(student_username, usage_key))
except DjangoXBlockUserStateClient.DoesNotExist:
return HttpResponse(escape(_(u'User {username} has never accessed problem {location}').format(
username=student_username,
location=location
)))
# This is ugly, but until we have a proper submissions API that we can use to provide
# the scores instead, it will have to do.
scores = list(StudentModuleHistory.objects.filter(
student_module__module_state_key=usage_key,
student_module__student__username=student_username,
student_module__course_id=course_key
).order_by('-id'))
if len(scores) != len(history_entries):
log.warning(
"Mismatch when fetching scores for student "
"history for course %s, user %s, xblock %s. "
"%d scores were found, and %d history entries were found. "
"Matching scores to history entries by date for display.",
course_id,
student_username,
location,
len(scores),
len(history_entries),
)
scores_by_date = {
score.created: score
for score in scores
}
scores = [
scores_by_date[history.updated]
for history in history_entries
]
context = {
'history_entries': history_entries,
'scores': scores,
'username': student_username,
'location': location,
'course_id': course_key.to_deprecated_string()
}
return render_to_response('courseware/submission_history.html', context)
def get_static_tab_contents(request, course, tab):
"""
Returns the contents for the given static tab
"""
loc = course.id.make_usage_key(
tab.type,
tab.url_slug,
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, modulestore().get_item(loc), depth=0
)
tab_module = get_module(
request.user, request, loc, field_data_cache, static_asset_path=course.static_asset_path, course=course
)
logging.debug('course_module = %s', tab_module)
html = ''
if tab_module is not None:
try:
html = tab_module.render(STUDENT_VIEW).content
except Exception: # pylint: disable=broad-except
html = render_to_string('courseware/error-message.html', None)
log.exception(
u"Error rendering course=%s, tab=%s", course, tab['url_slug']
)
return html
@require_GET
@ensure_valid_course_key
def get_course_lti_endpoints(request, course_id):
"""
View that, given a course_id, returns the a JSON object that enumerates all of the LTI endpoints for that course.
The LTI 2.0 result service spec at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
says "This specification document does not prescribe a method for discovering the endpoint URLs." This view
function implements one way of discovering these endpoints, returning a JSON array when accessed.
Arguments:
request (django request object): the HTTP request object that triggered this view function
course_id (unicode): id associated with the course
Returns:
(django response object): HTTP response. 404 if course is not found, otherwise 200 with JSON body.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
course = get_course(course_key, depth=2)
except ValueError:
return HttpResponse(status=404)
anonymous_user = AnonymousUser()
anonymous_user.known = False # make these "noauth" requests like module_render.handle_xblock_callback_noauth
lti_descriptors = modulestore().get_items(course.id, qualifiers={'category': 'lti'})
lti_noauth_modules = [
get_module_for_descriptor(
anonymous_user,
request,
descriptor,
FieldDataCache.cache_for_descriptor_descendents(
course_key,
anonymous_user,
descriptor
),
course_key,
course=course
)
for descriptor in lti_descriptors
]
endpoints = [
{
'display_name': module.display_name,
'lti_2_0_result_service_json_endpoint': module.get_outcome_service_url(
service_name='lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
'lti_1_1_result_service_xml_endpoint': module.get_outcome_service_url(
service_name='grade_handler'),
}
for module in lti_noauth_modules
]
return HttpResponse(json.dumps(endpoints), content_type='application/json')
@login_required
def course_survey(request, course_id):
"""
URL endpoint to present a survey that is associated with a course_id
Note that the actual implementation of course survey is handled in the
views.py file in the Survey Djangoapp
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
redirect_url = reverse('info', args=[course_id])
# if there is no Survey associated with this course,
# then redirect to the course instead
if not course.course_survey_name:
return redirect(redirect_url)
return survey.views.view_student_survey(
request.user,
course.course_survey_name,
course=course,
redirect_url=redirect_url,
is_required=course.course_survey_required,
)
def is_course_passed(course, grade_summary=None, student=None, request=None):
"""
check user's course passing status. return True if passed
Arguments:
course : course object
grade_summary (dict) : contains student grade details.
student : user object
request (HttpRequest)
Returns:
returns bool value
"""
nonzero_cutoffs = [cutoff for cutoff in course.grade_cutoffs.values() if cutoff > 0]
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
if grade_summary is None:
grade_summary = grades.grade(student, request, course)
return success_cutoff and grade_summary['percent'] >= success_cutoff
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@require_POST
def generate_user_cert(request, course_id):
"""Start generating a new certificate for the user.
Certificate generation is allowed if:
* The user has passed the course, and
* The user does not already have a pending/completed certificate.
Note that if an error occurs during certificate generation
(for example, if the queue is down), then we simply mark the
certificate generation task status as "error" and re-run
the task with a management command. To students, the certificate
will appear to be "generating" until it is re-run.
Args:
request (HttpRequest): The POST request to this view.
course_id (unicode): The identifier for the course.
Returns:
HttpResponse: 200 on success, 400 if a new certificate cannot be generated.
"""
if not request.user.is_authenticated():
log.info(u"Anon user trying to generate certificate for %s", course_id)
return HttpResponseBadRequest(
_('You must be signed in to {platform_name} to create a certificate.').format(
platform_name=settings.PLATFORM_NAME
)
)
student = request.user
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key, depth=2)
if not course:
return HttpResponseBadRequest(_("Course is not valid"))
if not is_course_passed(course, None, student, request):
return HttpResponseBadRequest(_("Your certificate will be available when you pass the course."))
certificate_status = certs_api.certificate_downloadable_status(student, course.id)
if certificate_status["is_downloadable"]:
return HttpResponseBadRequest(_("Certificate has already been created."))
elif certificate_status["is_generating"]:
return HttpResponseBadRequest(_("Certificate is being created."))
else:
# If the certificate is not already in-process or completed,
# then create a new certificate generation task.
# If the certificate cannot be added to the queue, this will
# mark the certificate with "error" status, so it can be re-run
# with a management command. From the user's perspective,
# it will appear that the certificate task was submitted successfully.
certs_api.generate_user_certificates(student, course.id, course=course, generation_mode='self')
_track_successful_certificate_generation(student.id, course.id)
return HttpResponse()
def _track_successful_certificate_generation(user_id, course_id): # pylint: disable=invalid-name
"""
Track a successful certificate generation event.
Arguments:
user_id (str): The ID of the user generting the certificate.
course_id (CourseKey): Identifier for the course.
Returns:
None
"""
if settings.LMS_SEGMENT_KEY:
event_name = 'edx.bi.user.certificate.generate'
tracking_context = tracker.get_tracker().resolve_context()
analytics.track(
user_id,
event_name,
{
'category': 'certificates',
'label': unicode(course_id)
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
@require_http_methods(["GET", "POST"])
def render_xblock(request, usage_key_string, check_if_enrolled=True):
"""
Returns an HttpResponse with HTML content for the xBlock with the given usage_key.
The returned HTML is a chromeless rendering of the xBlock (excluding content of the containing courseware).
"""
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
course_key = usage_key.course_key
requested_view = request.GET.get('view', 'student_view')
if requested_view != 'student_view':
return HttpResponseBadRequest("Rendering of the xblock view '{}' is not supported.".format(requested_view))
with modulestore().bulk_operations(course_key):
# verify the user has access to the course, including enrollment check
try:
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=check_if_enrolled)
except UserNotEnrolled:
raise Http404("Course not found.")
# get the block, which verifies whether the user has access to the block.
block, _ = get_module_by_usage_id(
request, unicode(course_key), unicode(usage_key), disable_staff_debug_info=True, course=course
)
context = {
'fragment': block.render('student_view', context=request.GET),
'course': course,
'disable_accordion': True,
'allow_iframing': True,
'disable_header': True,
'disable_footer': True,
'disable_window_wrap': True,
'disable_preview_menu': True,
'staff_access': bool(has_access(request.user, 'staff', course)),
'xqa_server': settings.FEATURES.get('XQA_SERVER', 'http://your_xqa_server.com'),
}
return render_to_response('courseware/courseware-chromeless.html', context)
# Translators: "percent_sign" is the symbol "%". "platform_name" is a
# string identifying the name of this installation, such as "edX".
FINANCIAL_ASSISTANCE_HEADER = _(
'{platform_name} now offers financial assistance for learners who want to earn Verified Certificates but'
' who may not be able to pay the Verified Certificate fee. Eligible learners may receive up to 90{percent_sign} off'
' the Verified Certificate fee for a course.\nTo apply for financial assistance, enroll in the'
' audit track for a course that offers Verified Certificates, and then complete this application.'
' Note that you must complete a separate application for each course you take.\n We plan to use this'
' information to evaluate your application for financial assistance and to further develop our'
' financial assistance program.'
).format(
percent_sign="%",
platform_name=settings.PLATFORM_NAME
).split('\n')
FA_INCOME_LABEL = _('Annual Household Income')
FA_REASON_FOR_APPLYING_LABEL = _(
'Tell us about your current financial situation. Why do you need assistance?'
)
FA_GOALS_LABEL = _(
'Tell us about your learning or professional goals. How will a Verified Certificate in'
' this course help you achieve these goals?'
)
FA_EFFORT_LABEL = _(
'Tell us about your plans for this course. What steps will you take to help you complete'
' the course work and receive a certificate?'
)
FA_SHORT_ANSWER_INSTRUCTIONS = _('Use between 250 and 500 words or so in your response.')
@login_required
def financial_assistance(_request):
"""Render the initial financial assistance page."""
return render_to_response('financial-assistance/financial-assistance.html', {
'header_text': FINANCIAL_ASSISTANCE_HEADER
})
@login_required
@require_POST
def financial_assistance_request(request):
"""Submit a request for financial assistance to Zendesk."""
try:
data = json.loads(request.body)
# Simple sanity check that the session belongs to the user
# submitting an FA request
username = data['username']
if request.user.username != username:
return HttpResponseForbidden()
course_id = data['course']
course = modulestore().get_course(CourseKey.from_string(course_id))
legal_name = data['name']
email = data['email']
country = data['country']
income = data['income']
reason_for_applying = data['reason_for_applying']
goals = data['goals']
effort = data['effort']
marketing_permission = data['mktg-permission']
ip_address = get_ip(request)
except ValueError:
# Thrown if JSON parsing fails
return HttpResponseBadRequest(u'Could not parse request JSON.')
except InvalidKeyError:
# Thrown if course key parsing fails
return HttpResponseBadRequest(u'Could not parse request course key.')
except KeyError as err:
# Thrown if fields are missing
return HttpResponseBadRequest(u'The field {} is required.'.format(err.message))
zendesk_submitted = _record_feedback_in_zendesk(
legal_name,
email,
u'Financial assistance request for learner {username} in course {course_name}'.format(
username=username,
course_name=course.display_name
),
u'Financial Assistance Request',
{'course_id': course_id},
# Send the application as additional info on the ticket so
# that it is not shown when support replies. This uses
# OrderedDict so that information is presented in the right
# order.
OrderedDict((
('Username', username),
('Full Name', legal_name),
('Course ID', course_id),
('Annual Household Income', income),
('Country', country),
('Allowed for marketing purposes', 'Yes' if marketing_permission else 'No'),
(FA_REASON_FOR_APPLYING_LABEL, '\n' + reason_for_applying + '\n\n'),
(FA_GOALS_LABEL, '\n' + goals + '\n\n'),
(FA_EFFORT_LABEL, '\n' + effort + '\n\n'),
('Client IP', ip_address),
)),
group_name='Financial Assistance',
require_update=True
)
if not zendesk_submitted:
# The call to Zendesk failed. The frontend will display a
# message to the user.
return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return HttpResponse(status=status.HTTP_204_NO_CONTENT)
@login_required
def financial_assistance_form(request):
"""Render the financial assistance application form page."""
user = request.user
enrolled_courses = [
{'name': enrollment.course_overview.display_name, 'value': unicode(enrollment.course_id)}
for enrollment in CourseEnrollment.enrollments_for_user(user).order_by('-created')
if CourseMode.objects.filter(
Q(_expiration_datetime__isnull=True) | Q(_expiration_datetime__gt=datetime.now(UTC())),
course_id=enrollment.course_id,
mode_slug=CourseMode.VERIFIED
).exists()
and enrollment.mode != CourseMode.VERIFIED
]
return render_to_response('financial-assistance/apply.html', {
'header_text': FINANCIAL_ASSISTANCE_HEADER,
'student_faq_url': marketing_link('FAQ'),
'dashboard_url': reverse('dashboard'),
'account_settings_url': reverse('account_settings'),
'platform_name': settings.PLATFORM_NAME,
'user_details': {
'email': user.email,
'username': user.username,
'name': user.profile.name,
'country': str(user.profile.country.name),
},
'submit_url': reverse('submit_financial_assistance_request'),
'fields': [
{
'name': 'course',
'type': 'select',
'label': _('Course'),
'placeholder': '',
'defaultValue': '',
'required': True,
'options': enrolled_courses,
'instructions': _(
'Select the course for which you want to earn a verified certificate. If'
' the course does not appear in the list, make sure that you have enrolled'
' in the audit track for the course.'
)
},
{
'name': 'income',
'type': 'text',
'label': FA_INCOME_LABEL,
'placeholder': _('income in US Dollars ($)'),
'defaultValue': '',
'required': True,
'restrictions': {},
'instructions': _('Specify your annual household income in US Dollars.')
},
{
'name': 'reason_for_applying',
'type': 'textarea',
'label': FA_REASON_FOR_APPLYING_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'goals',
'type': 'textarea',
'label': FA_GOALS_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'effort',
'type': 'textarea',
'label': FA_EFFORT_LABEL,
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'placeholder': '',
'name': 'mktg-permission',
'label': _(
'I allow edX to use the information provided in this application '
'(except for financial information) for edX marketing purposes.'
),
'defaultValue': '',
'type': 'checkbox',
'required': False,
'instructions': '',
'restrictions': {}
}
],
})
| agpl-3.0 |
sunqm/pyscf | pyscf/pbc/cc/kccsd_uhf.py | 1 | 59554 | #!/usr/bin/env python
# Copyright 2017-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: James D. McClain
# Mario Motta
# Yang Gao
# Qiming Sun <osirpt.sun@gmail.com>
# Jason Yu
# Alec White
#
from functools import reduce
import numpy as np
import h5py
from pyscf import lib
from pyscf.lib import logger
from pyscf.pbc import scf
from pyscf.cc import uccsd
from pyscf.pbc.lib import kpts_helper
from pyscf.pbc.lib.kpts_helper import gamma_point
from pyscf.lib.parameters import LOOSE_ZERO_TOL, LARGE_DENOM # noqa
from pyscf.pbc.mp.kump2 import (get_frozen_mask, get_nocc, get_nmo,
padded_mo_coeff, padding_k_idx) # noqa
from pyscf.pbc.cc import kintermediates_uhf
from pyscf import __config__
einsum = lib.einsum
# --- list2array
def mo_c_list_to_array(mo_coeff):
mo_coeff_tmp=[]
for js in range(2):
tmp_nk = len(mo_coeff[js])
tmp_nb = mo_coeff[js][0].shape[0]
tmp_array = np.zeros((tmp_nk,tmp_nb,tmp_nb),dtype=complex)
for ik in range(tmp_nk):
tmp_array[ik,:,:]=mo_coeff[js][ik][:,:]
mo_coeff_tmp.append(tmp_array)
return mo_coeff_tmp
def convert_mo_coeff(mo_coeff):
if isinstance(mo_coeff[0], list):
mo_coeff=mo_c_list_to_array(mo_coeff)
return mo_coeff
def update_amps(cc, t1, t2, eris):
time0 = logger.process_clock(), logger.perf_counter()
log = logger.Logger(cc.stdout, cc.verbose)
t1a, t1b = t1
t2aa, t2ab, t2bb = t2
Ht1a = np.zeros_like(t1a)
Ht1b = np.zeros_like(t1b)
Ht2aa = np.zeros_like(t2aa)
Ht2ab = np.zeros_like(t2ab)
Ht2bb = np.zeros_like(t2bb)
nkpts, nocca, nvira = t1a.shape
noccb, nvirb = t1b.shape[1:]
#fvv_ = eris.fock[0][:,nocca:,nocca:]
#fVV_ = eris.fock[1][:,noccb:,noccb:]
#foo_ = eris.fock[0][:,:nocca,:nocca]
#fOO_ = eris.fock[1][:,:noccb,:noccb]
fov_ = eris.fock[0][:,:nocca,nocca:]
fOV_ = eris.fock[1][:,:noccb,noccb:]
# Get location of padded elements in occupied and virtual space
nonzero_padding_alpha, nonzero_padding_beta = padding_k_idx(cc, kind="split")
nonzero_opadding_alpha, nonzero_vpadding_alpha = nonzero_padding_alpha
nonzero_opadding_beta, nonzero_vpadding_beta = nonzero_padding_beta
mo_ea_o = [e[:nocca] for e in eris.mo_energy[0]]
mo_eb_o = [e[:noccb] for e in eris.mo_energy[1]]
mo_ea_v = [e[nocca:] + cc.level_shift for e in eris.mo_energy[0]]
mo_eb_v = [e[noccb:] + cc.level_shift for e in eris.mo_energy[1]]
Fvv_, FVV_ = kintermediates_uhf.cc_Fvv(cc, t1, t2, eris)
Foo_, FOO_ = kintermediates_uhf.cc_Foo(cc, t1, t2, eris)
Fov_, FOV_ = kintermediates_uhf.cc_Fov(cc, t1, t2, eris)
# Move energy terms to the other side
for k in range(nkpts):
Fvv_[k][np.diag_indices(nvira)] -= mo_ea_v[k]
FVV_[k][np.diag_indices(nvirb)] -= mo_eb_v[k]
Foo_[k][np.diag_indices(nocca)] -= mo_ea_o[k]
FOO_[k][np.diag_indices(noccb)] -= mo_eb_o[k]
# Get the momentum conservation array
kconserv = cc.khelper.kconserv
# T1 equation
P = kintermediates_uhf.kconserv_mat(cc.nkpts, cc.khelper.kconserv)
Ht1a += fov_.conj()
Ht1b += fOV_.conj()
Ht1a += einsum('xyximae,yme->xia', t2aa, Fov_)
Ht1a += einsum('xyximae,yme->xia', t2ab, FOV_)
Ht1b += einsum('xyximae,yme->xia', t2bb, FOV_)
Ht1b += einsum('yxymiea,yme->xia', t2ab, Fov_)
Ht1a -= einsum('xyzmnae, xzymine->zia', t2aa, eris.ooov)
Ht1a -= einsum('xyzmNaE, xzymiNE->zia', t2ab, eris.ooOV)
#Ht1a -= einsum('xyzmnae,xzymine,xyzw->zia', t2aa, eris.ooov, P)
#Ht1a -= einsum('xyzmNaE,xzymiNE,xyzw->zia', t2ab, eris.ooOV, P)
Ht1b -= einsum('xyzmnae, xzymine->zia', t2bb, eris.OOOV)
#Ht1b -= einsum('xyzmnae,xzymine,xyzw->zia', t2bb, eris.OOOV, P)
Ht1b -= einsum('yxwnmea,xzymine,xyzw->zia', t2ab, eris.OOov, P)
for ka in range(nkpts):
Ht1a[ka] += einsum('ie,ae->ia', t1a[ka], Fvv_[ka])
Ht1b[ka] += einsum('ie,ae->ia', t1b[ka], FVV_[ka])
Ht1a[ka] -= einsum('ma,mi->ia', t1a[ka], Foo_[ka])
Ht1b[ka] -= einsum('ma,mi->ia', t1b[ka], FOO_[ka])
for km in range(nkpts):
# ka == ki; km == kf == km
# <ma||if> = [mi|af] - [mf|ai]
# => [mi|af] - [fm|ia]
Ht1a[ka] += einsum('mf,aimf->ia', t1a[km], eris.voov[ka, ka, km])
Ht1a[ka] -= einsum('mf,miaf->ia', t1a[km], eris.oovv[km, ka, ka])
Ht1a[ka] += einsum('MF,aiMF->ia', t1b[km], eris.voOV[ka, ka, km])
# miaf - mfai => miaf - fmia
Ht1b[ka] += einsum('MF,AIMF->IA', t1b[km], eris.VOOV[ka, ka, km])
Ht1b[ka] -= einsum('MF,MIAF->IA', t1b[km], eris.OOVV[km, ka, ka])
Ht1b[ka] += einsum('mf,fmIA->IA', t1a[km], eris.voOV[km, km, ka].conj())
for kf in range(nkpts):
ki = ka
ke = kconserv[ki, kf, km]
Ht1a[ka] += einsum('imef,fmea->ia', t2aa[ki,km,ke], eris.vovv[kf,km,ke].conj())
Ht1a[ka] += einsum('iMeF,FMea->ia', t2ab[ki,km,ke], eris.VOvv[kf,km,ke].conj())
Ht1b[ka] += einsum('IMEF,FMEA->IA', t2bb[ki,km,ke], eris.VOVV[kf,km,ke].conj())
Ht1b[ka] += einsum('mIfE,fmEA->IA', t2ab[km,ki,kf], eris.voVV[kf,km,ke].conj())
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
# Fvv equation
Ftmpa_kb = Fvv_[kb] - 0.5 * einsum('mb,me->be', t1a[kb], Fov_[kb])
Ftmpb_kb = FVV_[kb] - 0.5 * einsum('MB,ME->BE', t1b[kb], FOV_[kb])
Ftmpa_ka = Fvv_[ka] - 0.5 * einsum('mb,me->be', t1a[ka], Fov_[ka])
Ftmpb_ka = FVV_[ka] - 0.5 * einsum('MB,ME->BE', t1b[ka], FOV_[ka])
tmp = einsum('ijae,be->ijab', t2aa[ki, kj, ka], Ftmpa_kb)
Ht2aa[ki, kj, ka] += tmp
tmp = einsum('IJAE,BE->IJAB', t2bb[ki, kj, ka], Ftmpb_kb)
Ht2bb[ki, kj, ka] += tmp
tmp = einsum('iJaE,BE->iJaB', t2ab[ki, kj, ka], Ftmpb_kb)
Ht2ab[ki, kj, ka] += tmp
tmp = einsum('iJeB,ae->iJaB', t2ab[ki, kj, ka], Ftmpa_ka)
Ht2ab[ki, kj, ka] += tmp
#P(ab)
tmp = einsum('ijbe,ae->ijab', t2aa[ki, kj, kb], Ftmpa_ka)
Ht2aa[ki, kj, ka] -= tmp
tmp = einsum('IJBE,AE->IJAB', t2bb[ki, kj, kb], Ftmpb_ka)
Ht2bb[ki, kj, ka] -= tmp
# Foo equation
Ftmpa_kj = Foo_[kj] + 0.5 * einsum('je,me->mj', t1a[kj], Fov_[kj])
Ftmpb_kj = FOO_[kj] + 0.5 * einsum('JE,ME->MJ', t1b[kj], FOV_[kj])
Ftmpa_ki = Foo_[ki] + 0.5 * einsum('je,me->mj', t1a[ki], Fov_[ki])
Ftmpb_ki = FOO_[ki] + 0.5 * einsum('JE,ME->MJ', t1b[ki], FOV_[ki])
tmp = einsum('imab,mj->ijab', t2aa[ki, kj, ka], Ftmpa_kj)
Ht2aa[ki, kj, ka] -= tmp
tmp = einsum('IMAB,MJ->IJAB', t2bb[ki, kj, ka], Ftmpb_kj)
Ht2bb[ki, kj, ka] -= tmp
tmp = einsum('iMaB,MJ->iJaB', t2ab[ki, kj, ka], Ftmpb_kj)
Ht2ab[ki, kj, ka] -= tmp
tmp = einsum('mJaB,mi->iJaB', t2ab[ki, kj, ka], Ftmpa_ki)
Ht2ab[ki, kj, ka] -= tmp
#P(ij)
tmp = einsum('jmab,mi->ijab', t2aa[kj, ki, ka], Ftmpa_ki)
Ht2aa[ki, kj, ka] += tmp
tmp = einsum('JMAB,MI->IJAB', t2bb[kj, ki, ka], Ftmpb_ki)
Ht2bb[ki, kj, ka] += tmp
# T2 equation
eris_ovov = np.asarray(eris.ovov)
eris_OVOV = np.asarray(eris.OVOV)
eris_ovOV = np.asarray(eris.ovOV)
Ht2aa += (eris_ovov.transpose(0,2,1,3,5,4,6) - eris_ovov.transpose(2,0,1,5,3,4,6)).conj()
Ht2bb += (eris_OVOV.transpose(0,2,1,3,5,4,6) - eris_OVOV.transpose(2,0,1,5,3,4,6)).conj()
Ht2ab += eris_ovOV.transpose(0,2,1,3,5,4,6).conj()
tauaa, tauab, taubb = kintermediates_uhf.make_tau(cc, t2, t1, t1)
Woooo, WooOO, WOOOO = kintermediates_uhf.cc_Woooo(cc, t1, t2, eris)
# Add the contributions from Wvvvv
for km, ki, kn in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km,ki,kn]
Woooo[km,ki,kn] += .5 * einsum('xmenf, xijef->minj', eris_ovov[km,:,kn], tauaa[ki,kj])
WOOOO[km,ki,kn] += .5 * einsum('xMENF, xIJEF->MINJ', eris_OVOV[km,:,kn], taubb[ki,kj])
WooOO[km,ki,kn] += .5 * einsum('xmeNF, xiJeF->miNJ', eris_ovOV[km,:,kn], tauab[ki,kj])
for km, ki, kn in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km,ki,kn]
Ht2aa[ki,kj,:] += einsum('minj,wmnab->wijab', Woooo[km,ki,kn], tauaa[km,kn]) * .5
Ht2bb[ki,kj,:] += einsum('MINJ,wMNAB->wIJAB', WOOOO[km,ki,kn], taubb[km,kn]) * .5
Ht2ab[ki,kj,:] += einsum('miNJ,wmNaB->wiJaB', WooOO[km,ki,kn], tauab[km,kn])
add_vvvv_(cc, (Ht2aa, Ht2ab, Ht2bb), t1, t2, eris)
Wovvo, WovVO, WOVvo, WOVVO, WoVVo, WOvvO = \
kintermediates_uhf.cc_Wovvo(cc, t1, t2, eris)
#:Ht2ab += einsum('xwzimae,wvumeBJ,xwzv,wuvy->xyziJaB', t2aa, WovVO, P, P)
#:Ht2ab += einsum('xwziMaE,wvuMEBJ,xwzv,wuvy->xyziJaB', t2ab, WOVVO, P, P)
#:Ht2ab -= einsum('xie,zma,uwzBJme,zuwx,xyzu->xyziJaB', t1a, t1a, eris.VOov, P, P)
for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
kv = kconserv[kx, kz, kw]
for ku in range(nkpts):
ky = kconserv[kw, kv, ku]
Ht2ab[kx, ky, kz] += lib.einsum('imae,mebj->ijab', t2aa[kx,kw,kz], WovVO[kw,kv,ku])
Ht2ab[kx, ky, kz] += lib.einsum('imae,mebj->ijab', t2ab[kx,kw,kz], WOVVO[kw,kv,ku])
#for kz, ku, kw in kpts_helper.loop_kkk(nkpts):
# kx = kconserv[kz,kw,ku]
# ky = kconserv[kz,kx,ku]
# continue
# Ht2ab[kx, ky, kz] -= lib.einsum('ie, ma, emjb->ijab', t1a[kx], t1a[kz], eris.voOV[kx,kz,kw].conj())
Ht2ab -= einsum('xie, yma, xyzemjb->xzyijab', t1a, t1a, eris.voOV[:].conj())
#:Ht2ab += einsum('wxvmIeA,wvumebj,xwzv,wuvy->yxujIbA', t2ab, Wovvo, P, P)
#:Ht2ab += einsum('wxvMIEA,wvuMEbj,xwzv,wuvy->yxujIbA', t2bb, WOVvo, P, P)
#:Ht2ab -= einsum('xIE,zMA,uwzbjME,zuwx,xyzu->yxujIbA', t1b, t1b, eris.voOV, P, P)
#for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
# kv = kconserv[kx, kz, kw]
# for ku in range(nkpts):
# ky = kconserv[kw, kv, ku]
# #Ht2ab[ky,kx,ku] += lib.einsum('miea, mebj-> jiba', t2ab[kw,kx,kv], Wovvo[kw,kv,ku])
# #Ht2ab[ky,kx,ku] += lib.einsum('miea, mebj-> jiba', t2bb[kw,kx,kv], WOVvo[kw,kv,ku])
for km, ke, kb in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km, ke, kb]
Ht2ab[kj,:,kb] += einsum('xmiea, mebj->xjiba', t2ab[km,:,ke], Wovvo[km,ke,kb])
Ht2ab[kj,:,kb] += einsum('xmiea, mebj->xjiba', t2bb[km,:,ke], WOVvo[km,ke,kb])
for kz, ku, kw in kpts_helper.loop_kkk(nkpts):
kx = kconserv[kz, kw, ku]
ky = kconserv[kz, kx, ku]
Ht2ab[ky,kx,ku] -= lib.einsum('ie, ma, bjme->jiba', t1b[kx], t1b[kz], eris.voOV[ku,kw,kz])
#:Ht2ab += einsum('xwviMeA,wvuMebJ,xwzv,wuvy->xyuiJbA', t2ab, WOvvO, P, P)
#:Ht2ab -= einsum('xie,zMA,zwuMJbe,zuwx,xyzu->xyuiJbA', t1a, t1b, eris.OOvv, P, P)
#for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
# kv = kconserv[kx, kz, kw]
# for ku in range(nkpts):
# ky = kconserv[kw, kv, ku]
# Ht2ab[kx,ky,ku] += lib.einsum('imea,mebj->ijba', t2ab[kx,kw,kv],WOvvO[kw,kv,ku])
for km, ke, kb in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km, ke, kb]
Ht2ab[:,kj,kb] += einsum('ximea, mebj->xijba', t2ab[:,km,ke], WOvvO[km,ke,kb])
for kz,ku,kw in kpts_helper.loop_kkk(nkpts):
kx = kconserv[kz, kw, ku]
ky = kconserv[kz, kx, ku]
Ht2ab[kx,ky,ku] -= lib.einsum('ie, ma, mjbe->ijba', t1a[kx], t1b[kz], eris.OOvv[kz, kw, ku])
#:Ht2ab += einsum('wxzmIaE,wvumEBj,xwzv,wuvy->yxzjIaB', t2ab, WoVVo, P, P)
#:Ht2ab -= einsum('xIE,zma,zwumjBE,zuwx,xyzu->yxzjIaB', t1b, t1a, eris.ooVV, P, P)
for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
kv = kconserv[kx, kz, kw]
for ku in range(nkpts):
ky = kconserv[kw, kv, ku]
Ht2ab[ky, kx, kz] += lib.einsum('miae,mebj->jiab', t2ab[kw,kx,kz], WoVVo[kw,kv,ku])
for kz, ku, kw in kpts_helper.loop_kkk(nkpts):
kx = kconserv[kz,kw,ku]
ky = kconserv[kz,kx,ku]
Ht2ab[ky,kx,kz] -= lib.einsum('ie, ma, mjbe->jiab', t1b[kx], t1a[kz], eris.ooVV[kz,kw,ku])
#:u2aa = einsum('xwzimae,wvumebj,xwzv,wuvy->xyzijab', t2aa, Wovvo, P, P)
#:u2aa += einsum('xwziMaE,wvuMEbj,xwzv,wuvy->xyzijab', t2ab, WOVvo, P, P)
#Left this in to keep proper shape, need to replace later
u2aa = np.zeros_like(t2aa)
for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
kv = kconserv[kx, kz, kw]
for ku in range(nkpts):
ky = kconserv[kw, kv, ku]
u2aa[kx,ky,kz] += lib.einsum('imae, mebj->ijab', t2aa[kx,kw,kz], Wovvo[kw,kv,ku])
u2aa[kx,ky,kz] += lib.einsum('imae, mebj->ijab', t2ab[kx,kw,kz], WOVvo[kw,kv,ku])
#:u2aa += einsum('xie,zma,zwumjbe,zuwx,xyzu->xyzijab', t1a, t1a, eris.oovv, P, P)
#:u2aa -= einsum('xie,zma,uwzbjme,zuwx,xyzu->xyzijab', t1a, t1a, eris.voov, P, P)
for kz, ku, kw in kpts_helper.loop_kkk(nkpts):
kx = kconserv[kz,kw,ku]
ky = kconserv[kz,kx,ku]
u2aa[kx,ky,kz] += lib.einsum('ie,ma,mjbe->ijab',t1a[kx],t1a[kz],eris.oovv[kz,kw,ku])
u2aa[kx,ky,kz] -= lib.einsum('ie,ma,bjme->ijab',t1a[kx],t1a[kz],eris.voov[ku,kw,kz])
#:u2aa += np.einsum('xie,uyzbjae,uzyx->xyzijab', t1a, eris.vovv, P)
#:u2aa -= np.einsum('zma,xzyimjb->xyzijab', t1a, eris.ooov.conj())
for ky, kx, ku in kpts_helper.loop_kkk(nkpts):
kz = kconserv[ky, ku, kx]
u2aa[kx, ky, kz] += lib.einsum('ie, bjae->ijab', t1a[kx], eris.vovv[ku,ky,kz])
u2aa[kx, ky, kz] -= lib.einsum('ma, imjb->ijab', t1a[kz], eris.ooov[kx,kz,ky].conj())
u2aa = u2aa - u2aa.transpose(1,0,2,4,3,5,6)
u2aa = u2aa - einsum('xyzijab,xyzu->xyuijba', u2aa, P)
Ht2aa += u2aa
#:u2bb = einsum('xwzimae,wvumebj,xwzv,wuvy->xyzijab', t2bb, WOVVO, P, P)
#:u2bb += einsum('wxvMiEa,wvuMEbj,xwzv,wuvy->xyzijab', t2ab, WovVO, P, P)
#:u2bb += einsum('xie,zma,zwumjbe,zuwx,xyzu->xyzijab', t1b, t1b, eris.OOVV, P, P)
#:u2bb -= einsum('xie,zma,uwzbjme,zuwx,xyzu->xyzijab', t1b, t1b, eris.VOOV, P, P)
u2bb = np.zeros_like(t2bb)
for kx, kw, kz in kpts_helper.loop_kkk(nkpts):
kv = kconserv[kx, kz, kw]
for ku in range(nkpts):
ky = kconserv[kw,kv, ku]
u2bb[kx, ky, kz] += lib.einsum('imae,mebj->ijab', t2bb[kx,kw,kz], WOVVO[kw,kv,ku])
u2bb[kx, ky, kz] += lib.einsum('miea, mebj-> ijab', t2ab[kw,kx,kv],WovVO[kw,kv,ku])
for kz, ku, kw in kpts_helper.loop_kkk(nkpts):
kx = kconserv[kz, kw, ku]
ky = kconserv[kz, kx, ku]
u2bb[kx, ky, kz] += lib.einsum('ie, ma, mjbe->ijab',t1b[kx],t1b[kz],eris.OOVV[kz,kw,ku])
u2bb[kx, ky, kz] -= lib.einsum('ie, ma, bjme->ijab', t1b[kx], t1b[kz],eris.VOOV[ku,kw,kz])
#:u2bb += np.einsum('xie,uzybjae,uzyx->xyzijab', t1b, eris.VOVV, P)
#:u2bb -= np.einsum('zma,xzyimjb->xyzijab', t1b, eris.OOOV.conj())
for ky, kx, ku in kpts_helper.loop_kkk(nkpts):
kz = kconserv[ky, ku, kx]
u2bb[kx,ky,kz] += lib.einsum('ie,bjae->ijab', t1b[kx], eris.VOVV[ku,ky,kz])
#for kx, kz, ky in kpts_helper.loop_kkk(nkpts):
# u2bb[kx,ky,kz] -= lib.einsum('ma, imjb-> ijab', t1b[kz], eris.OOOV[kx,kz,ky].conj())
u2bb -= einsum('zma, xzyimjb->xyzijab', t1b, eris.OOOV[:].conj())
u2bb = u2bb - u2bb.transpose(1,0,2,4,3,5,6)
u2bb = u2bb - einsum('xyzijab,xyzu->xyuijba', u2bb, P)
Ht2bb += u2bb
#:Ht2ab += np.einsum('xie,uyzBJae,uzyx->xyziJaB', t1a, eris.VOvv, P)
#:Ht2ab += np.einsum('yJE,zxuaiBE,zuxy->xyziJaB', t1b, eris.voVV, P)
#:Ht2ab -= np.einsum('zma,xzyimjb->xyzijab', t1a, eris.ooOV.conj())
#:Ht2ab -= np.einsum('umb,yuxjmia,xyuz->xyzijab', t1b, eris.OOov.conj(), P)
for ky, kx, ku in kpts_helper.loop_kkk(nkpts):
kz = kconserv[ky,ku,kx]
Ht2ab[kx,ky,kz] += lib.einsum('ie, bjae-> ijab', t1a[kx], eris.VOvv[ku,ky,kz])
Ht2ab[kx,ky,kz] += lib.einsum('je, aibe-> ijab', t1b[ky], eris.voVV[kz,kx,ku])
#for kx, kz, ky in kpts_helper.loop_kkk(nkpts):
# Ht2ab[kx,ky,kz] -= lib.einsum('ma, imjb->ijab', t1a[kz], eris.ooOV[kx,kz,ky].conj())
Ht2ab -= einsum('zma, xzyimjb->xyzijab', t1a, eris.ooOV[:].conj())
for kx, ky, ku in kpts_helper.loop_kkk(nkpts):
kz = kconserv[kx, ku, ky]
Ht2ab[kx,ky,kz] -= lib.einsum('mb,jmia->ijab',t1b[ku],eris.OOov[ky,ku,kx].conj())
eia = []
eIA = []
for ki in range(nkpts):
tmp_alpha = []
tmp_beta = []
for ka in range(nkpts):
tmp_eia = LARGE_DENOM * np.ones((nocca, nvira), dtype=eris.mo_energy[0][0].dtype)
tmp_eIA = LARGE_DENOM * np.ones((noccb, nvirb), dtype=eris.mo_energy[0][0].dtype)
n0_ovp_ia = np.ix_(nonzero_opadding_alpha[ki], nonzero_vpadding_alpha[ka])
n0_ovp_IA = np.ix_(nonzero_opadding_beta[ki], nonzero_vpadding_beta[ka])
tmp_eia[n0_ovp_ia] = (mo_ea_o[ki][:,None] - mo_ea_v[ka])[n0_ovp_ia]
tmp_eIA[n0_ovp_IA] = (mo_eb_o[ki][:,None] - mo_eb_v[ka])[n0_ovp_IA]
tmp_alpha.append(tmp_eia)
tmp_beta.append(tmp_eIA)
eia.append(tmp_alpha)
eIA.append(tmp_beta)
for ki in range(nkpts):
ka = ki
# Remove zero/padded elements from denominator
Ht1a[ki] /= eia[ki][ka]
Ht1b[ki] /= eIA[ki][ka]
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
eijab = eia[ki][ka][:,None,:,None] + eia[kj][kb][:,None,:]
Ht2aa[ki,kj,ka] /= eijab
eijab = eia[ki][ka][:,None,:,None] + eIA[kj][kb][:,None,:]
Ht2ab[ki,kj,ka] /= eijab
eijab = eIA[ki][ka][:,None,:,None] + eIA[kj][kb][:,None,:]
Ht2bb[ki,kj,ka] /= eijab
time0 = log.timer_debug1('update t1 t2', *time0)
return (Ht1a, Ht1b), (Ht2aa, Ht2ab, Ht2bb)
def get_normt_diff(cc, t1, t2, t1new, t2new):
'''Calculates norm(t1 - t1new) + norm(t2 - t2new).'''
return (np.linalg.norm(t1new[0] - t1[0])**2 +
np.linalg.norm(t1new[1] - t1[1])**2 +
np.linalg.norm(t2new[0] - t2[0])**2 +
np.linalg.norm(t2new[1] - t2[1])**2 +
np.linalg.norm(t2new[2] - t2[2])**2) ** .5
def energy(cc, t1, t2, eris):
t1a, t1b = t1
t2aa, t2ab, t2bb = t2
kka, noa, nva = t1a.shape
kkb, nob, nvb = t1b.shape
assert(kka == kkb)
nkpts = kka
s = 0.0 + 0j
fa, fb = eris.fock
for ki in range(nkpts):
s += einsum('ia,ia', fa[ki, :noa, noa:], t1a[ki, :, :])
s += einsum('ia,ia', fb[ki, :nob, nob:], t1b[ki, :, :])
t1t1aa = np.zeros(shape=t2aa.shape, dtype=t2aa.dtype)
t1t1ab = np.zeros(shape=t2ab.shape, dtype=t2ab.dtype)
t1t1bb = np.zeros(shape=t2bb.shape, dtype=t2bb.dtype)
for ki in range(nkpts):
ka = ki
for kj in range(nkpts):
t1t1aa[ki, kj, ka, :, :, :, :] = einsum('ia,jb->ijab', t1a[ki, :, :], t1a[kj, :, :])
t1t1ab[ki, kj, ka, :, :, :, :] = einsum('ia,jb->ijab', t1a[ki, :, :], t1b[kj, :, :])
t1t1bb[ki, kj, ka, :, :, :, :] = einsum('ia,jb->ijab', t1b[ki, :, :], t1b[kj, :, :])
tauaa = t2aa + 2*t1t1aa
tauab = t2ab + t1t1ab
taubb = t2bb + 2*t1t1bb
d = 0.0 + 0.j
d += 0.25*(einsum('xzyiajb,xyzijab->',eris.ovov,tauaa) -
einsum('yzxjaib,xyzijab->',eris.ovov,tauaa))
d += einsum('xzyiajb,xyzijab->',eris.ovOV,tauab)
d += 0.25*(einsum('xzyiajb,xyzijab->',eris.OVOV,taubb) -
einsum('yzxjaib,xyzijab->',eris.OVOV,taubb))
e = s + d
e /= nkpts
if abs(e.imag) > 1e-4:
logger.warn(cc, 'Non-zero imaginary part found in KCCSD energy %s', e)
return e.real
#def get_nocc(cc, per_kpoint=False):
# '''See also function get_nocc in pyscf/pbc/mp2/kmp2.py'''
# if cc._nocc is not None:
# return cc._nocc
#
# assert(cc.frozen == 0)
#
# if isinstance(cc.frozen, (int, np.integer)):
# nocca = [(np.count_nonzero(cc.mo_occ[0][k] > 0) - cc.frozen) for k in range(cc.nkpts)]
# noccb = [(np.count_nonzero(cc.mo_occ[1][k] > 0) - cc.frozen) for k in range(cc.nkpts)]
#
# else:
# raise NotImplementedError
#
# if not per_kpoint:
# nocca = np.amax(nocca)
# noccb = np.amax(noccb)
# return nocca, noccb
#
#def get_nmo(cc, per_kpoint=False):
# '''See also function get_nmo in pyscf/pbc/mp2/kmp2.py'''
# if cc._nmo is not None:
# return cc._nmo
#
# assert(cc.frozen == 0)
#
# if isinstance(cc.frozen, (int, np.integer)):
# nmoa = [(cc.mo_occ[0][k].size - cc.frozen) for k in range(cc.nkpts)]
# nmob = [(cc.mo_occ[1][k].size - cc.frozen) for k in range(cc.nkpts)]
#
# else:
# raise NotImplementedError
#
# if not per_kpoint:
# nmoa = np.amax(nmoa)
# nmob = np.amax(nmob)
# return nmoa, nmob
#
#def get_frozen_mask(cc):
# '''See also get_frozen_mask function in pyscf/pbc/mp2/kmp2.py'''
#
# moidxa = [np.ones(x.size, dtype=np.bool) for x in cc.mo_occ[0]]
# moidxb = [np.ones(x.size, dtype=np.bool) for x in cc.mo_occ[1]]
# assert(cc.frozen == 0)
#
# if isinstance(cc.frozen, (int, np.integer)):
# for idx in moidxa:
# idx[:cc.frozen] = False
# for idx in moidxb:
# idx[:cc.frozen] = False
# else:
# raise NotImplementedError
#
# return moidxa, moisxb
def amplitudes_to_vector(t1, t2):
return np.hstack((t1[0].ravel(), t1[1].ravel(),
t2[0].ravel(), t2[1].ravel(), t2[2].ravel()))
def vector_to_amplitudes(vec, nmo, nocc, nkpts=1):
nocca, noccb = nocc
nmoa, nmob = nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
sizes = (nkpts*nocca*nvira, nkpts*noccb*nvirb,
nkpts**3*nocca**2*nvira**2, nkpts**3*nocca*noccb*nvira*nvirb,
nkpts**3*noccb**2*nvirb**2)
sections = np.cumsum(sizes[:-1])
t1a, t1b, t2aa, t2ab, t2bb = np.split(vec, sections)
t1a = t1a.reshape(nkpts,nocca,nvira)
t1b = t1b.reshape(nkpts,noccb,nvirb)
t2aa = t2aa.reshape(nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira)
t2ab = t2ab.reshape(nkpts,nkpts,nkpts,nocca,noccb,nvira,nvirb)
t2bb = t2bb.reshape(nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb)
return (t1a,t1b), (t2aa,t2ab,t2bb)
def add_vvvv_(cc, Ht2, t1, t2, eris):
nocca, noccb = cc.nocc
nmoa, nmob = cc.nmo
nkpts = cc.nkpts
kconserv = cc.khelper.kconserv
t1a, t1b = t1
t2aa, t2ab, t2bb = t2
Ht2aa, Ht2ab, Ht2bb = Ht2
if cc.direct and getattr(eris, 'Lpv', None) is not None:
def get_Wvvvv(ka, kc, kb):
kd = kconserv[ka,kc,kb]
Lpv = eris.Lpv
LPV = eris.LPV
Lbd = (Lpv[kb,kd][:,nocca:] -
lib.einsum('Lkd,kb->Lbd', Lpv[kb,kd][:,:nocca], t1a[kb]))
Wvvvv = lib.einsum('Lac,Lbd->acbd', Lpv[ka,kc][:,nocca:], Lbd)
kcbd = lib.einsum('Lkc,Lbd->kcbd', Lpv[ka,kc][:,:nocca],
Lpv[kb,kd][:,nocca:])
Wvvvv -= lib.einsum('kcbd,ka->acbd', kcbd, t1a[ka])
LBD = (LPV[kb,kd][:,noccb:] -
lib.einsum('Lkd,kb->Lbd', LPV[kb,kd][:,:noccb], t1b[kb]))
WvvVV = lib.einsum('Lac,Lbd->acbd', Lpv[ka,kc][:,nocca:], LBD)
kcbd = lib.einsum('Lkc,Lbd->kcbd', Lpv[ka,kc][:,:nocca],
LPV[kb,kd][:,noccb:])
WvvVV -= lib.einsum('kcbd,ka->acbd', kcbd, t1a[ka])
WVVVV = lib.einsum('Lac,Lbd->acbd', LPV[ka,kc][:,noccb:], LBD)
kcbd = lib.einsum('Lkc,Lbd->kcbd', LPV[ka,kc][:,:noccb],
LPV[kb,kd][:,noccb:])
WVVVV -= lib.einsum('kcbd,ka->acbd', kcbd, t1b[ka])
Wvvvv *= (1./nkpts)
WvvVV *= (1./nkpts)
WVVVV *= (1./nkpts)
return Wvvvv, WvvVV, WVVVV
else:
_Wvvvv, _WvvVV, _WVVVV = kintermediates_uhf.cc_Wvvvv_half(cc, t1, t2, eris)
def get_Wvvvv(ka, kc, kb):
return _Wvvvv[ka,kc,kb], _WvvVV[ka,kc,kb], _WVVVV[ka,kc,kb]
#:Ht2aa += np.einsum('xyuijef,zuwaebf,xyuv,zwuv->xyzijab', tauaa, _Wvvvv-_Wvvvv.transpose(2,1,0,5,4,3,6), P, P) * .5
#:Ht2bb += np.einsum('xyuijef,zuwaebf,xyuv,zwuv->xyzijab', taubb, _WVVVV-_WVVVV.transpose(2,1,0,5,4,3,6), P, P) * .5
#:Ht2ab += np.einsum('xyuiJeF,zuwaeBF,xyuv,zwuv->xyziJaB', tauab, _WvvVV, P, P)
for ka, kb, kc in kpts_helper.loop_kkk(nkpts):
kd = kconserv[ka,kc,kb]
Wvvvv, WvvVV, WVVVV = get_Wvvvv(ka, kc, kb)
for ki in range(nkpts):
kj = kconserv[ka,ki,kb]
tauaa = t2aa[ki,kj,kc].copy()
tauab = t2ab[ki,kj,kc].copy()
taubb = t2bb[ki,kj,kc].copy()
if ki == kc and kj == kd:
tauaa += einsum('ic,jd->ijcd', t1a[ki], t1a[kj])
tauab += einsum('ic,jd->ijcd', t1a[ki], t1b[kj])
taubb += einsum('ic,jd->ijcd', t1b[ki], t1b[kj])
if ki == kd and kj == kc:
tauaa -= einsum('id,jc->ijcd', t1a[ki], t1a[kj])
taubb -= einsum('id,jc->ijcd', t1b[ki], t1b[kj])
tmp = lib.einsum('acbd,ijcd->ijab', Wvvvv, tauaa) * .5
Ht2aa[ki,kj,ka] += tmp
Ht2aa[ki,kj,kb] -= tmp.transpose(0,1,3,2)
tmp = lib.einsum('acbd,ijcd->ijab', WVVVV, taubb) * .5
Ht2bb[ki,kj,ka] += tmp
Ht2bb[ki,kj,kb] -= tmp.transpose(0,1,3,2)
Ht2ab[ki,kj,ka] += lib.einsum('acbd,ijcd->ijab', WvvVV, tauab)
Wvvvv = WvvVV = WVVVV = None
_Wvvvv = _WvvVV = _WVVVV = None
# Contractions below are merged to Woooo intermediates
# tauaa, tauab, taubb = kintermediates_uhf.make_tau(cc, t2, t1, t1)
# P = kintermediates_uhf.kconserv_mat(cc.nkpts, cc.khelper.kconserv)
# minj = np.einsum('xwymenf,uvwijef,xywz,uvwz->xuyminj', eris.ovov, tauaa, P, P)
# MINJ = np.einsum('xwymenf,uvwijef,xywz,uvwz->xuyminj', eris.OVOV, taubb, P, P)
# miNJ = np.einsum('xwymeNF,uvwiJeF,xywz,uvwz->xuymiNJ', eris.ovOV, tauab, P, P)
# Ht2aa += np.einsum('xuyminj,xywmnab,xyuv->uvwijab', minj, tauaa, P) * .25
# Ht2bb += np.einsum('xuyminj,xywmnab,xyuv->uvwijab', MINJ, taubb, P) * .25
# Ht2ab += np.einsum('xuymiNJ,xywmNaB,xyuv->uvwiJaB', miNJ, tauab, P) * .5
return (Ht2aa, Ht2ab, Ht2bb)
class KUCCSD(uccsd.UCCSD):
max_space = getattr(__config__, 'pbc_cc_kccsd_uhf_KUCCSD_max_space', 20)
def __init__(self, mf, frozen=None, mo_coeff=None, mo_occ=None):
assert(isinstance(mf, scf.khf.KSCF))
uccsd.UCCSD.__init__(self, mf, frozen, mo_coeff, mo_occ)
self.kpts = mf.kpts
self.mo_energy = mf.mo_energy
self.khelper = kpts_helper.KptsHelper(mf.cell, self.kpts)
self.direct = True # If possible, use GDF to compute Wvvvv on-the-fly
keys = set(['kpts', 'mo_energy', 'khelper', 'max_space', 'direct'])
self._keys = self._keys.union(keys)
@property
def nkpts(self):
return len(self.kpts)
get_normt_diff = get_normt_diff
get_nocc = get_nocc
get_nmo = get_nmo
get_frozen_mask = get_frozen_mask
update_amps = update_amps
energy = energy
def dump_flags(self, verbose=None):
return uccsd.UCCSD.dump_flags(self, verbose)
def ao2mo(self, mo_coeff=None):
from pyscf.pbc.df.df import GDF
cell = self._scf.cell
nkpts = self.nkpts
nmoa, nmob = self.nmo
mem_incore = nkpts**3 * (nmoa**4 + nmob**4) * 8 / 1e6
mem_now = lib.current_memory()[0]
if (mem_incore + mem_now < self.max_memory) or self.mol.incore_anyway:
return _make_eris_incore(self, mo_coeff)
elif (self.direct and type(self._scf.with_df) is GDF
and cell.dimension != 2):
# DFKCCSD does not support MDF
return _make_df_eris(self, mo_coeff)
else:
return _make_eris_outcore(self, mo_coeff)
def init_amps(self, eris):
time0 = logger.process_clock(), logger.perf_counter()
nocca, noccb = self.nocc
nmoa, nmob = self.nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
nkpts = self.nkpts
t1a = np.zeros((nkpts, nocca, nvira), dtype=np.complex128)
t1b = np.zeros((nkpts, noccb, nvirb), dtype=np.complex128)
t1 = (t1a, t1b)
t2aa = np.zeros((nkpts, nkpts, nkpts, nocca, nocca, nvira, nvira), dtype=np.complex128)
t2ab = np.zeros((nkpts, nkpts, nkpts, nocca, noccb, nvira, nvirb), dtype=np.complex128)
t2bb = np.zeros((nkpts, nkpts, nkpts, noccb, noccb, nvirb, nvirb), dtype=np.complex128)
mo_ea_o = [e[:nocca] for e in eris.mo_energy[0]]
mo_eb_o = [e[:noccb] for e in eris.mo_energy[1]]
mo_ea_v = [e[nocca:] for e in eris.mo_energy[0]]
mo_eb_v = [e[noccb:] for e in eris.mo_energy[1]]
# Get location of padded elements in occupied and virtual space
nonzero_padding_alpha, nonzero_padding_beta = padding_k_idx(self, kind="split")
nonzero_opadding_alpha, nonzero_vpadding_alpha = nonzero_padding_alpha
nonzero_opadding_beta, nonzero_vpadding_beta = nonzero_padding_beta
eia = []
eIA = []
# Create denominators, ignoring padded elements
for ki in range(nkpts):
tmp_alpha = []
tmp_beta = []
for ka in range(nkpts):
tmp_eia = LARGE_DENOM * np.ones((nocca, nvira), dtype=eris.mo_energy[0][0].dtype)
tmp_eIA = LARGE_DENOM * np.ones((noccb, nvirb), dtype=eris.mo_energy[0][0].dtype)
n0_ovp_ia = np.ix_(nonzero_opadding_alpha[ki], nonzero_vpadding_alpha[ka])
n0_ovp_IA = np.ix_(nonzero_opadding_beta[ki], nonzero_vpadding_beta[ka])
tmp_eia[n0_ovp_ia] = (mo_ea_o[ki][:,None] - mo_ea_v[ka])[n0_ovp_ia]
tmp_eIA[n0_ovp_IA] = (mo_eb_o[ki][:,None] - mo_eb_v[ka])[n0_ovp_IA]
tmp_alpha.append(tmp_eia)
tmp_beta.append(tmp_eIA)
eia.append(tmp_alpha)
eIA.append(tmp_beta)
kconserv = kpts_helper.get_kconserv(self._scf.cell, self.kpts)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
Daa = eia[ki][ka][:,None,:,None] + eia[kj][kb][:,None,:]
Dab = eia[ki][ka][:,None,:,None] + eIA[kj][kb][:,None,:]
Dbb = eIA[ki][ka][:,None,:,None] + eIA[kj][kb][:,None,:]
t2aa[ki,kj,ka] = eris.ovov[ki,ka,kj].conj().transpose((0,2,1,3)) / Daa
t2aa[ki,kj,ka]-= eris.ovov[kj,ka,ki].conj().transpose((2,0,1,3)) / Daa
t2ab[ki,kj,ka] = eris.ovOV[ki,ka,kj].conj().transpose((0,2,1,3)) / Dab
t2bb[ki,kj,ka] = eris.OVOV[ki,ka,kj].conj().transpose((0,2,1,3)) / Dbb
t2bb[ki,kj,ka]-= eris.OVOV[kj,ka,ki].conj().transpose((2,0,1,3)) / Dbb
t2 = (t2aa,t2ab,t2bb)
d = 0.0 + 0.j
d += 0.25*(einsum('xzyiajb,xyzijab->',eris.ovov,t2aa) -
einsum('yzxjaib,xyzijab->',eris.ovov,t2aa))
d += einsum('xzyiajb,xyzijab->',eris.ovOV,t2ab)
d += 0.25*(einsum('xzyiajb,xyzijab->',eris.OVOV,t2bb) -
einsum('yzxjaib,xyzijab->',eris.OVOV,t2bb))
self.emp2 = d/nkpts
logger.info(self, 'Init t2, MP2 energy = %.15g', self.emp2.real)
logger.timer(self, 'init mp2', *time0)
return self.emp2, t1, t2
def amplitudes_to_vector(self, t1, t2):
return amplitudes_to_vector(t1, t2)
def vector_to_amplitudes(self, vec, nmo=None, nocc=None, nkpts=None):
if nocc is None: nocc = self.nocc
if nmo is None: nmo = self.nmo
if nkpts is None: nkpts = self.nkpts
return vector_to_amplitudes(vec, nmo, nocc, nkpts)
UCCSD = KUCCSD
#######################################
#
# _ERIS.
#
# Note the two electron integrals are stored in different orders from
# kccsd_rhf._ERIS. Integrals (ab|cd) are stored as [ka,kb,kc,a,b,c,d] here
# while the order is [ka,kc,kb,a,c,b,d] in kccsd_rhf._ERIS
#
# TODO: use the same convention as kccsd_rhf
#
def _make_eris_incore(cc, mo_coeff=None):
eris = uccsd._ChemistsERIs()
if mo_coeff is None:
mo_coeff = cc.mo_coeff
mo_coeff = convert_mo_coeff(mo_coeff) # FIXME: Remove me!
mo_coeff = padded_mo_coeff(cc, mo_coeff)
eris.mo_coeff = mo_coeff
eris.nocc = cc.nocc
nkpts = cc.nkpts
nocca, noccb = cc.nocc
nmoa, nmob = cc.nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
if gamma_point(cc.kpts):
dtype = np.double
else:
dtype = np.complex128
dtype = np.result_type(dtype, *mo_coeff[0])
eris.oooo = np.empty((nkpts,nkpts,nkpts,nocca,nocca,nocca,nocca), dtype=dtype)
eris.ooov = np.empty((nkpts,nkpts,nkpts,nocca,nocca,nocca,nvira), dtype=dtype)
eris.oovv = np.empty((nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira), dtype=dtype)
eris.ovov = np.empty((nkpts,nkpts,nkpts,nocca,nvira,nocca,nvira), dtype=dtype)
eris.voov = np.empty((nkpts,nkpts,nkpts,nvira,nocca,nocca,nvira), dtype=dtype)
eris.vovv = np.empty((nkpts,nkpts,nkpts,nvira,nocca,nvira,nvira), dtype=dtype)
eris.OOOO = np.empty((nkpts,nkpts,nkpts,noccb,noccb,noccb,noccb), dtype=dtype)
eris.OOOV = np.empty((nkpts,nkpts,nkpts,noccb,noccb,noccb,nvirb), dtype=dtype)
eris.OOVV = np.empty((nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb), dtype=dtype)
eris.OVOV = np.empty((nkpts,nkpts,nkpts,noccb,nvirb,noccb,nvirb), dtype=dtype)
eris.VOOV = np.empty((nkpts,nkpts,nkpts,nvirb,noccb,noccb,nvirb), dtype=dtype)
eris.VOVV = np.empty((nkpts,nkpts,nkpts,nvirb,noccb,nvirb,nvirb), dtype=dtype)
eris.ooOO = np.empty((nkpts,nkpts,nkpts,nocca,nocca,noccb,noccb), dtype=dtype)
eris.ooOV = np.empty((nkpts,nkpts,nkpts,nocca,nocca,noccb,nvirb), dtype=dtype)
eris.ooVV = np.empty((nkpts,nkpts,nkpts,nocca,nocca,nvirb,nvirb), dtype=dtype)
eris.ovOV = np.empty((nkpts,nkpts,nkpts,nocca,nvira,noccb,nvirb), dtype=dtype)
eris.voOV = np.empty((nkpts,nkpts,nkpts,nvira,nocca,noccb,nvirb), dtype=dtype)
eris.voVV = np.empty((nkpts,nkpts,nkpts,nvira,nocca,nvirb,nvirb), dtype=dtype)
eris.OOoo = None
eris.OOov = np.empty((nkpts,nkpts,nkpts,noccb,noccb,nocca,nvira), dtype=dtype)
eris.OOvv = np.empty((nkpts,nkpts,nkpts,noccb,noccb,nvira,nvira), dtype=dtype)
eris.OVov = np.empty((nkpts,nkpts,nkpts,noccb,nvirb,nocca,nvira), dtype=dtype)
eris.VOov = np.empty((nkpts,nkpts,nkpts,nvirb,noccb,nocca,nvira), dtype=dtype)
eris.VOvv = np.empty((nkpts,nkpts,nkpts,nvirb,noccb,nvira,nvira), dtype=dtype)
_kuccsd_eris_common_(cc, eris)
thisdf = cc._scf.with_df
orbva = np.asarray(mo_coeff[0][:,:,nocca:], order='C')
orbvb = np.asarray(mo_coeff[1][:,:,noccb:], order='C')
eris.vvvv = thisdf.ao2mo_7d(orbva, factor=1./nkpts)
eris.VVVV = thisdf.ao2mo_7d(orbvb, factor=1./nkpts)
eris.vvVV = thisdf.ao2mo_7d([orbva,orbva,orbvb,orbvb], factor=1./nkpts)
return eris
def _kuccsd_eris_common_(cc, eris, buf=None):
from pyscf.pbc import tools
from pyscf.pbc.cc.ccsd import _adjust_occ
#if not (cc.frozen is None or cc.frozen == 0):
# raise NotImplementedError('cc.frozen = %s' % str(cc.frozen))
cput0 = (logger.process_clock(), logger.perf_counter())
log = logger.new_logger(cc)
cell = cc._scf.cell
thisdf = cc._scf.with_df
kpts = cc.kpts
nkpts = cc.nkpts
mo_coeff = eris.mo_coeff
nocca, noccb = eris.nocc
nmoa, nmob = cc.nmo
mo_a, mo_b = mo_coeff
# Re-make our fock MO matrix elements from density and fock AO
dm = cc._scf.make_rdm1(cc.mo_coeff, cc.mo_occ)
hcore = cc._scf.get_hcore()
with lib.temporary_env(cc._scf, exxdiv=None):
vhf = cc._scf.get_veff(cell, dm)
focka = [reduce(np.dot, (mo.conj().T, hcore[k]+vhf[0][k], mo))
for k, mo in enumerate(mo_a)]
fockb = [reduce(np.dot, (mo.conj().T, hcore[k]+vhf[1][k], mo))
for k, mo in enumerate(mo_b)]
eris.fock = (np.asarray(focka), np.asarray(fockb))
eris.e_hf = cc._scf.energy_tot(dm=dm, vhf=vhf)
madelung = tools.madelung(cell, kpts)
mo_ea = [focka[k].diagonal().real for k in range(nkpts)]
mo_eb = [fockb[k].diagonal().real for k in range(nkpts)]
mo_ea = [_adjust_occ(e, nocca, -madelung) for e in mo_ea]
mo_eb = [_adjust_occ(e, noccb, -madelung) for e in mo_eb]
eris.mo_energy = (mo_ea, mo_eb)
orboa = np.asarray(mo_coeff[0][:,:,:nocca], order='C')
orbob = np.asarray(mo_coeff[1][:,:,:noccb], order='C')
#orbva = np.asarray(mo_coeff[0][:,:,nocca:], order='C')
#orbvb = np.asarray(mo_coeff[1][:,:,noccb:], order='C')
dtype = np.result_type(*focka).char
# The momentum conservation array
kconserv = cc.khelper.kconserv
out = None
if isinstance(buf, h5py.Group):
out = buf.create_dataset('tmp', (nkpts,nkpts,nkpts,nocca,nmoa,nmoa,nmoa), dtype)
oppp = thisdf.ao2mo_7d([orboa,mo_coeff[0],mo_coeff[0],mo_coeff[0]], kpts,
factor=1./nkpts, out=out)
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp,kq,kr]
tmp = np.asarray(oppp[kp,kq,kr])
eris.oooo[kp,kq,kr] = tmp[:nocca,:nocca,:nocca,:nocca]
eris.ooov[kp,kq,kr] = tmp[:nocca,:nocca,:nocca,nocca:]
eris.oovv[kp,kq,kr] = tmp[:nocca,:nocca,nocca:,nocca:]
eris.ovov[kp,kq,kr] = tmp[:nocca,nocca:,:nocca,nocca:]
eris.voov[kq,kp,ks] = tmp[:nocca,nocca:,nocca:,:nocca].conj().transpose(1,0,3,2)
eris.vovv[kq,kp,ks] = tmp[:nocca,nocca:,nocca:,nocca:].conj().transpose(1,0,3,2)
oppp = None
if isinstance(buf, h5py.Group):
del(buf['tmp'])
out = buf.create_dataset('tmp', (nkpts,nkpts,nkpts,noccb,nmob,nmob,nmob), dtype)
oppp = thisdf.ao2mo_7d([orbob,mo_coeff[1],mo_coeff[1],mo_coeff[1]], kpts,
factor=1./nkpts, out=out)
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp,kq,kr]
tmp = np.asarray(oppp[kp,kq,kr])
eris.OOOO[kp,kq,kr] = tmp[:noccb,:noccb,:noccb,:noccb]
eris.OOOV[kp,kq,kr] = tmp[:noccb,:noccb,:noccb,noccb:]
eris.OOVV[kp,kq,kr] = tmp[:noccb,:noccb,noccb:,noccb:]
eris.OVOV[kp,kq,kr] = tmp[:noccb,noccb:,:noccb,noccb:]
eris.VOOV[kq,kp,ks] = tmp[:noccb,noccb:,noccb:,:noccb].conj().transpose(1,0,3,2)
eris.VOVV[kq,kp,ks] = tmp[:noccb,noccb:,noccb:,noccb:].conj().transpose(1,0,3,2)
oppp = None
if isinstance(buf, h5py.Group):
del(buf['tmp'])
out = buf.create_dataset('tmp', (nkpts,nkpts,nkpts,nocca,nmoa,nmob,nmob), dtype)
oppp = thisdf.ao2mo_7d([orboa,mo_coeff[0],mo_coeff[1],mo_coeff[1]], kpts,
factor=1./nkpts, out=out)
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp,kq,kr]
tmp = np.asarray(oppp[kp,kq,kr])
eris.ooOO[kp,kq,kr] = tmp[:nocca,:nocca,:noccb,:noccb]
eris.ooOV[kp,kq,kr] = tmp[:nocca,:nocca,:noccb,noccb:]
eris.ooVV[kp,kq,kr] = tmp[:nocca,:nocca,noccb:,noccb:]
eris.ovOV[kp,kq,kr] = tmp[:nocca,nocca:,:noccb,noccb:]
eris.voOV[kq,kp,ks] = tmp[:nocca,nocca:,noccb:,:noccb].conj().transpose(1,0,3,2)
eris.voVV[kq,kp,ks] = tmp[:nocca,nocca:,noccb:,noccb:].conj().transpose(1,0,3,2)
oppp = None
if isinstance(buf, h5py.Group):
del(buf['tmp'])
out = buf.create_dataset('tmp', (nkpts,nkpts,nkpts,noccb,nmob,nmoa,nmoa), dtype)
oppp = thisdf.ao2mo_7d([orbob,mo_coeff[1],mo_coeff[0],mo_coeff[0]], kpts,
factor=1./nkpts, out=out)
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp,kq,kr]
tmp = np.asarray(oppp[kp,kq,kr])
#eris.OOoo[kp,kq,kr] = tmp[:noccb,:noccb,:nocca,:nocca]
eris.OOov[kp,kq,kr] = tmp[:noccb,:noccb,:nocca,nocca:]
eris.OOvv[kp,kq,kr] = tmp[:noccb,:noccb,nocca:,nocca:]
eris.OVov[kp,kq,kr] = tmp[:noccb,noccb:,:nocca,nocca:]
eris.VOov[kq,kp,ks] = tmp[:noccb,noccb:,nocca:,:nocca].conj().transpose(1,0,3,2)
eris.VOvv[kq,kp,ks] = tmp[:noccb,noccb:,nocca:,nocca:].conj().transpose(1,0,3,2)
oppp = None
log.timer('CCSD integral transformation', *cput0)
return eris
def _make_eris_outcore(cc, mo_coeff=None):
eris = uccsd._ChemistsERIs()
if mo_coeff is None:
mo_coeff = cc.mo_coeff
mo_coeff = convert_mo_coeff(mo_coeff) # FIXME: Remove me!
mo_coeff = padded_mo_coeff(cc, mo_coeff)
eris.mo_coeff = mo_coeff
eris.nocc = cc.nocc
nkpts = cc.nkpts
nocca, noccb = cc.nocc
nmoa, nmob = cc.nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
if gamma_point(cc.kpts):
dtype = np.double
else:
dtype = np.complex128
dtype = np.result_type(dtype, *mo_coeff[0]).char
eris.feri = feri = lib.H5TmpFile()
eris.oooo = feri.create_dataset('oooo', (nkpts,nkpts,nkpts,nocca,nocca,nocca,nocca), dtype)
eris.ooov = feri.create_dataset('ooov', (nkpts,nkpts,nkpts,nocca,nocca,nocca,nvira), dtype)
eris.oovv = feri.create_dataset('oovv', (nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira), dtype)
eris.ovov = feri.create_dataset('ovov', (nkpts,nkpts,nkpts,nocca,nvira,nocca,nvira), dtype)
eris.voov = feri.create_dataset('voov', (nkpts,nkpts,nkpts,nvira,nocca,nocca,nvira), dtype)
eris.vovv = feri.create_dataset('vovv', (nkpts,nkpts,nkpts,nvira,nocca,nvira,nvira), dtype)
eris.vvvv = feri.create_dataset('vvvv', (nkpts,nkpts,nkpts,nvira,nvira,nvira,nvira), dtype)
eris.OOOO = feri.create_dataset('OOOO', (nkpts,nkpts,nkpts,noccb,noccb,noccb,noccb), dtype)
eris.OOOV = feri.create_dataset('OOOV', (nkpts,nkpts,nkpts,noccb,noccb,noccb,nvirb), dtype)
eris.OOVV = feri.create_dataset('OOVV', (nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb), dtype)
eris.OVOV = feri.create_dataset('OVOV', (nkpts,nkpts,nkpts,noccb,nvirb,noccb,nvirb), dtype)
eris.VOOV = feri.create_dataset('VOOV', (nkpts,nkpts,nkpts,nvirb,noccb,noccb,nvirb), dtype)
eris.VOVV = feri.create_dataset('VOVV', (nkpts,nkpts,nkpts,nvirb,noccb,nvirb,nvirb), dtype)
eris.VVVV = feri.create_dataset('VVVV', (nkpts,nkpts,nkpts,nvirb,nvirb,nvirb,nvirb), dtype)
eris.ooOO = feri.create_dataset('ooOO', (nkpts,nkpts,nkpts,nocca,nocca,noccb,noccb), dtype)
eris.ooOV = feri.create_dataset('ooOV', (nkpts,nkpts,nkpts,nocca,nocca,noccb,nvirb), dtype)
eris.ooVV = feri.create_dataset('ooVV', (nkpts,nkpts,nkpts,nocca,nocca,nvirb,nvirb), dtype)
eris.ovOV = feri.create_dataset('ovOV', (nkpts,nkpts,nkpts,nocca,nvira,noccb,nvirb), dtype)
eris.voOV = feri.create_dataset('voOV', (nkpts,nkpts,nkpts,nvira,nocca,noccb,nvirb), dtype)
eris.voVV = feri.create_dataset('voVV', (nkpts,nkpts,nkpts,nvira,nocca,nvirb,nvirb), dtype)
eris.vvVV = feri.create_dataset('vvVV', (nkpts,nkpts,nkpts,nvira,nvira,nvirb,nvirb), dtype)
eris.OOoo = None
eris.OOov = feri.create_dataset('OOov', (nkpts,nkpts,nkpts,noccb,noccb,nocca,nvira), dtype)
eris.OOvv = feri.create_dataset('OOvv', (nkpts,nkpts,nkpts,noccb,noccb,nvira,nvira), dtype)
eris.OVov = feri.create_dataset('OVov', (nkpts,nkpts,nkpts,noccb,nvirb,nocca,nvira), dtype)
eris.VOov = feri.create_dataset('VOov', (nkpts,nkpts,nkpts,nvirb,noccb,nocca,nvira), dtype)
eris.VOvv = feri.create_dataset('VOvv', (nkpts,nkpts,nkpts,nvirb,noccb,nvira,nvira), dtype)
eris.VVvv = None
fswap = lib.H5TmpFile()
_kuccsd_eris_common_(cc, eris, fswap)
fswap = None
thisdf = cc._scf.with_df
orbva = np.asarray(mo_coeff[0][:,:,nocca:], order='C')
orbvb = np.asarray(mo_coeff[1][:,:,noccb:], order='C')
thisdf.ao2mo_7d(orbva, cc.kpts, factor=1./nkpts, out=eris.vvvv)
thisdf.ao2mo_7d(orbvb, cc.kpts, factor=1./nkpts, out=eris.VVVV)
thisdf.ao2mo_7d([orbva,orbva,orbvb,orbvb], cc.kpts, factor=1./nkpts, out=eris.vvVV)
return eris
def _make_df_eris(cc, mo_coeff=None):
from pyscf.pbc.df import df
from pyscf.ao2mo import _ao2mo
cell = cc._scf.cell
if cell.dimension == 2:
raise NotImplementedError
eris = uccsd._ChemistsERIs()
if mo_coeff is None:
mo_coeff = cc.mo_coeff
mo_coeff = padded_mo_coeff(cc, mo_coeff)
eris.mo_coeff = mo_coeff
eris.nocc = cc.nocc
thisdf = cc._scf.with_df
kpts = cc.kpts
nkpts = cc.nkpts
nocca, noccb = cc.nocc
nmoa, nmob = cc.nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
#if getattr(thisdf, 'auxcell', None):
# naux = thisdf.auxcell.nao_nr()
#else:
# naux = thisdf.get_naoaux()
nao = cell.nao_nr()
mo_kpts_a, mo_kpts_b = eris.mo_coeff
if gamma_point(kpts):
dtype = np.double
else:
dtype = np.complex128
dtype = np.result_type(dtype, *mo_kpts_a)
eris.feri = feri = lib.H5TmpFile()
eris.oooo = feri.create_dataset('oooo', (nkpts,nkpts,nkpts,nocca,nocca,nocca,nocca), dtype)
eris.ooov = feri.create_dataset('ooov', (nkpts,nkpts,nkpts,nocca,nocca,nocca,nvira), dtype)
eris.oovv = feri.create_dataset('oovv', (nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira), dtype)
eris.ovov = feri.create_dataset('ovov', (nkpts,nkpts,nkpts,nocca,nvira,nocca,nvira), dtype)
eris.voov = feri.create_dataset('voov', (nkpts,nkpts,nkpts,nvira,nocca,nocca,nvira), dtype)
eris.vovv = feri.create_dataset('vovv', (nkpts,nkpts,nkpts,nvira,nocca,nvira,nvira), dtype)
eris.vvvv = None
eris.OOOO = feri.create_dataset('OOOO', (nkpts,nkpts,nkpts,noccb,noccb,noccb,noccb), dtype)
eris.OOOV = feri.create_dataset('OOOV', (nkpts,nkpts,nkpts,noccb,noccb,noccb,nvirb), dtype)
eris.OOVV = feri.create_dataset('OOVV', (nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb), dtype)
eris.OVOV = feri.create_dataset('OVOV', (nkpts,nkpts,nkpts,noccb,nvirb,noccb,nvirb), dtype)
eris.VOOV = feri.create_dataset('VOOV', (nkpts,nkpts,nkpts,nvirb,noccb,noccb,nvirb), dtype)
eris.VOVV = feri.create_dataset('VOVV', (nkpts,nkpts,nkpts,nvirb,noccb,nvirb,nvirb), dtype)
eris.VVVV = None
eris.ooOO = feri.create_dataset('ooOO', (nkpts,nkpts,nkpts,nocca,nocca,noccb,noccb), dtype)
eris.ooOV = feri.create_dataset('ooOV', (nkpts,nkpts,nkpts,nocca,nocca,noccb,nvirb), dtype)
eris.ooVV = feri.create_dataset('ooVV', (nkpts,nkpts,nkpts,nocca,nocca,nvirb,nvirb), dtype)
eris.ovOV = feri.create_dataset('ovOV', (nkpts,nkpts,nkpts,nocca,nvira,noccb,nvirb), dtype)
eris.voOV = feri.create_dataset('voOV', (nkpts,nkpts,nkpts,nvira,nocca,noccb,nvirb), dtype)
eris.voVV = feri.create_dataset('voVV', (nkpts,nkpts,nkpts,nvira,nocca,nvirb,nvirb), dtype)
eris.vvVV = None
eris.OOoo = None
eris.OOov = feri.create_dataset('OOov', (nkpts,nkpts,nkpts,noccb,noccb,nocca,nvira), dtype)
eris.OOvv = feri.create_dataset('OOvv', (nkpts,nkpts,nkpts,noccb,noccb,nvira,nvira), dtype)
eris.OVov = feri.create_dataset('OVov', (nkpts,nkpts,nkpts,noccb,nvirb,nocca,nvira), dtype)
eris.VOov = feri.create_dataset('VOov', (nkpts,nkpts,nkpts,nvirb,noccb,nocca,nvira), dtype)
eris.VOvv = feri.create_dataset('VOvv', (nkpts,nkpts,nkpts,nvirb,noccb,nvira,nvira), dtype)
eris.VVvv = None
fswap = lib.H5TmpFile()
_kuccsd_eris_common_(cc, eris, fswap)
fswap = None
eris.Lpv = Lpv = np.empty((nkpts,nkpts), dtype=object)
eris.LPV = LPV = np.empty((nkpts,nkpts), dtype=object)
with h5py.File(thisdf._cderi, 'r') as f:
kptij_lst = f['j3c-kptij'][:]
tao = []
ao_loc = None
for ki, kpti in enumerate(kpts):
for kj, kptj in enumerate(kpts):
kpti_kptj = np.array((kpti,kptj))
Lpq = np.asarray(df._getitem(f, 'j3c', kpti_kptj, kptij_lst))
mo_a = np.hstack((mo_kpts_a[ki], mo_kpts_a[kj][:,nocca:]))
mo_b = np.hstack((mo_kpts_b[ki], mo_kpts_b[kj][:,noccb:]))
mo_a = np.asarray(mo_a, dtype=dtype, order='F')
mo_b = np.asarray(mo_b, dtype=dtype, order='F')
if dtype == np.double:
outa = _ao2mo.nr_e2(Lpq, mo_a, (0, nmoa, nmoa, nmoa+nvira), aosym='s2')
outb = _ao2mo.nr_e2(Lpq, mo_b, (0, nmob, nmob, nmob+nvirb), aosym='s2')
else:
#Note: Lpq.shape[0] != naux if linear dependency is found in auxbasis
if Lpq[0].size != nao**2: # aosym = 's2'
Lpq = lib.unpack_tril(Lpq).astype(np.complex128)
outa = _ao2mo.r_e2(Lpq, mo_a, (0, nmoa, nmoa, nmoa+nvira), tao, ao_loc)
outb = _ao2mo.r_e2(Lpq, mo_b, (0, nmob, nmob, nmob+nvirb), tao, ao_loc)
Lpv[ki,kj] = outa.reshape(-1,nmoa,nvira)
LPV[ki,kj] = outb.reshape(-1,nmob,nvirb)
return eris
scf.kuhf.KUHF.CCSD = lib.class_as_method(KUCCSD)
if __name__ == '__main__':
from pyscf.pbc import gto
from pyscf import lo
cell = gto.Cell()
cell.atom='''
He 0.000000000000 0.000000000000 0.000000000000
He 1.685068664391 1.685068664391 1.685068664391
'''
#cell.basis = [[0, (1., 1.)], [1, (.5, 1.)]]
cell.basis = [[0, (1., 1.)], [0, (.5, 1.)]]
cell.a = '''
0.000000000, 3.370137329, 3.370137329
3.370137329, 0.000000000, 3.370137329
3.370137329, 3.370137329, 0.000000000'''
cell.unit = 'B'
cell.mesh = [13]*3
cell.build()
np.random.seed(2)
# Running HF and CCSD with 1x1x2 Monkhorst-Pack k-point mesh
kmf = scf.KUHF(cell, kpts=cell.make_kpts([1,1,3]), exxdiv=None)
nmo = cell.nao_nr()
kmf.mo_occ = np.zeros((2,3,nmo))
kmf.mo_occ[0,:,:3] = 1
kmf.mo_occ[1,:,:1] = 1
kmf.mo_energy = np.arange(nmo) + np.random.random((2,3,nmo)) * .3
kmf.mo_energy[kmf.mo_occ == 0] += 2
mo = (np.random.random((2,3,nmo,nmo)) +
np.random.random((2,3,nmo,nmo))*1j - .5-.5j)
s = kmf.get_ovlp()
kmf.mo_coeff = np.empty_like(mo)
nkpts = len(kmf.kpts)
for k in range(nkpts):
kmf.mo_coeff[0,k] = lo.orth.vec_lowdin(mo[0,k], s[k])
kmf.mo_coeff[1,k] = lo.orth.vec_lowdin(mo[1,k], s[k])
def rand_t1_t2(mycc):
nkpts = mycc.nkpts
nocca, noccb = mycc.nocc
nmoa, nmob = mycc.nmo
nvira, nvirb = nmoa - nocca, nmob - noccb
np.random.seed(1)
t1a = (np.random.random((nkpts,nocca,nvira)) +
np.random.random((nkpts,nocca,nvira))*1j - .5-.5j)
t1b = (np.random.random((nkpts,noccb,nvirb)) +
np.random.random((nkpts,noccb,nvirb))*1j - .5-.5j)
t2aa = (np.random.random((nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira)) +
np.random.random((nkpts,nkpts,nkpts,nocca,nocca,nvira,nvira))*1j - .5-.5j)
kconserv = kpts_helper.get_kconserv(kmf.cell, kmf.kpts)
t2aa = t2aa - t2aa.transpose(1,0,2,4,3,5,6)
tmp = t2aa.copy()
for ki, kj, kk in kpts_helper.loop_kkk(nkpts):
kl = kconserv[ki, kk, kj]
t2aa[ki,kj,kk] = t2aa[ki,kj,kk] - tmp[ki,kj,kl].transpose(0,1,3,2)
t2ab = (np.random.random((nkpts,nkpts,nkpts,nocca,noccb,nvira,nvirb)) +
np.random.random((nkpts,nkpts,nkpts,nocca,noccb,nvira,nvirb))*1j - .5-.5j)
t2bb = (np.random.random((nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb)) +
np.random.random((nkpts,nkpts,nkpts,noccb,noccb,nvirb,nvirb))*1j - .5-.5j)
t2bb = t2bb - t2bb.transpose(1,0,2,4,3,5,6)
tmp = t2bb.copy()
for ki, kj, kk in kpts_helper.loop_kkk(nkpts):
kl = kconserv[ki, kk, kj]
t2bb[ki,kj,kk] = t2bb[ki,kj,kk] - tmp[ki,kj,kl].transpose(0,1,3,2)
t1 = (t1a, t1b)
t2 = (t2aa, t2ab, t2bb)
return t1, t2
mycc = KUCCSD(kmf)
eris = mycc.ao2mo()
t1, t2 = rand_t1_t2(mycc)
Ht1, Ht2 = mycc.update_amps(t1, t2, eris)
print(lib.finger(Ht1[0]) - (2.2677885702176339-2.5150764056992041j))
print(lib.finger(Ht1[1]) - (-51.643438947846086+526.58026126100458j))
print(lib.finger(Ht2[0]) - (-29.490813482748258-8.7509143690136018j))
print(lib.finger(Ht2[1]) - (2256.0440056839416-193.16480896707569j))
print(lib.finger(Ht2[2]) - (-250.59447681063182-397.57189085666982j))
kmf.mo_occ[:] = 0
kmf.mo_occ[:,:,:2] = 1
mycc = KUCCSD(kmf)
eris = mycc.ao2mo()
t1, t2 = rand_t1_t2(mycc)
Ht1, Ht2 = mycc.update_amps(t1, t2, eris)
print(lib.finger(Ht1[0]) - (5.4622516572705662+1.990046725028729j))
print(lib.finger(Ht1[1]) - (4.8801120611799043-5.9940463787453488j))
print(lib.finger(Ht2[0]) - (-192.38864512375193+305.14191018543983j))
print(lib.finger(Ht2[1]) - (23085.044505825954-11527.802302550244j))
print(lib.finger(Ht2[2]) - (115.57932548288559-40.888597453928604j))
from pyscf.pbc.cc import kccsd
kgcc = kccsd.GCCSD(scf.addons.convert_to_ghf(kmf))
kccsd_eris = kccsd._make_eris_incore(kgcc, kgcc._scf.mo_coeff)
r1 = kgcc.spatial2spin(t1)
r2 = kgcc.spatial2spin(t2)
ge = kccsd.energy(kgcc, r1, r2, kccsd_eris)
r1, r2 = kgcc.update_amps(r1, r2, kccsd_eris)
ue = energy(mycc, t1, t2, eris)
print(abs(ge - ue))
print(abs(r1 - kgcc.spatial2spin(Ht1)).max())
print(abs(r2 - kgcc.spatial2spin(Ht2)).max())
kmf = kmf.density_fit(auxbasis=[[0, (1., 1.)]])
mycc = KUCCSD(kmf)
eris = _make_df_eris(mycc, mycc.mo_coeff)
t1, t2 = rand_t1_t2(mycc)
Ht1, Ht2 = mycc.update_amps(t1, t2, eris)
print(lib.finger(Ht1[0]) - (6.9341372555790013+0.87313546297025901j))
print(lib.finger(Ht1[1]) - (6.7538005829391992-0.95702422534126796j))
print(lib.finger(Ht2[0]) - (-509.24544842179876+448.00925776269855j))
print(lib.finger(Ht2[1]) - (107.5960392010511+40.869216223808067j) )
print(lib.finger(Ht2[2]) - (-196.75910296082139+218.53005038057515j))
kgcc = kccsd.GCCSD(scf.addons.convert_to_ghf(kmf))
kccsd_eris = kccsd._make_eris_incore(kgcc, kgcc._scf.mo_coeff)
r1 = kgcc.spatial2spin(t1)
r2 = kgcc.spatial2spin(t2)
ge = kccsd.energy(kgcc, r1, r2, kccsd_eris)
r1, r2 = kgcc.update_amps(r1, r2, kccsd_eris)
print(abs(r1 - kgcc.spatial2spin(Ht1)).max())
print(abs(r2 - kgcc.spatial2spin(Ht2)).max())
print(all([abs(lib.finger(eris.oooo) - (-0.18290712163391809-0.13839081039521306j) )<1e-8,
abs(lib.finger(eris.ooOO) - (-0.084752145202964035-0.28496525042110676j) )<1e-8,
#abs(lib.finger(eris.OOoo) - (0.43054922768629345-0.27990237216969871j) )<1e-8,
abs(lib.finger(eris.OOOO) - (-0.2941475969103261-0.047247498899840978j) )<1e-8,
abs(lib.finger(eris.ooov) - (0.23381463349517045-0.11703340936984277j) )<1e-8,
abs(lib.finger(eris.ooOV) - (-0.052655392703214066+0.69533309442418556j) )<1e-8,
abs(lib.finger(eris.OOov) - (-0.2111361247200903+0.85087916975274647j) )<1e-8,
abs(lib.finger(eris.OOOV) - (-0.36995992208047412-0.18887278030885621j) )<1e-8,
abs(lib.finger(eris.oovv) - (0.21107397525051516+0.0048714991438174871j) )<1e-8,
abs(lib.finger(eris.ooVV) - (-0.076411225687065987+0.11080438166425896j) )<1e-8,
abs(lib.finger(eris.OOvv) - (-0.17880337626095003-0.24174716216954206j) )<1e-8,
abs(lib.finger(eris.OOVV) - (0.059186286356424908+0.68433866387500164j) )<1e-8,
abs(lib.finger(eris.ovov) - (0.15402983765151051+0.064359681685222214j) )<1e-8,
abs(lib.finger(eris.ovOV) - (-0.10697649196044598+0.30351249676253234j) )<1e-8,
#abs(lib.finger(eris.OVov) - (-0.17619329728836752-0.56585020976035816j) )<1e-8,
abs(lib.finger(eris.OVOV) - (-0.63963235318492118+0.69863219317718828j) )<1e-8,
abs(lib.finger(eris.voov) - (-0.24137641647339092+0.18676684336011531j) )<1e-8,
abs(lib.finger(eris.voOV) - (0.19257709151227204+0.38929027819406414j) )<1e-8,
#abs(lib.finger(eris.VOov) - (0.07632606729926053-0.70350947950650355j) )<1e-8,
abs(lib.finger(eris.VOOV) - (-0.47970203195500816+0.46735207193861927j) )<1e-8,
abs(lib.finger(eris.vovv) - (-0.1342049915673903-0.23391327821719513j) )<1e-8,
abs(lib.finger(eris.voVV) - (-0.28989635223866056+0.9644368822688475j) )<1e-8,
abs(lib.finger(eris.VOvv) - (-0.32428269235420271+0.0029847254383674748j))<1e-8,
abs(lib.finger(eris.VOVV) - (0.45031779746222456-0.36858577475752041j) )<1e-8]))
eris = _make_eris_outcore(mycc, mycc.mo_coeff)
print(all([abs(lib.finger(eris.oooo) - (-0.18290712163391809-0.13839081039521306j) )<1e-8,
abs(lib.finger(eris.ooOO) - (-0.084752145202964035-0.28496525042110676j) )<1e-8,
#abs(lib.finger(eris.OOoo) - (0.43054922768629345-0.27990237216969871j) )<1e-8,
abs(lib.finger(eris.OOOO) - (-0.2941475969103261-0.047247498899840978j) )<1e-8,
abs(lib.finger(eris.ooov) - (0.23381463349517045-0.11703340936984277j) )<1e-8,
abs(lib.finger(eris.ooOV) - (-0.052655392703214066+0.69533309442418556j) )<1e-8,
abs(lib.finger(eris.OOov) - (-0.2111361247200903+0.85087916975274647j) )<1e-8,
abs(lib.finger(eris.OOOV) - (-0.36995992208047412-0.18887278030885621j) )<1e-8,
abs(lib.finger(eris.oovv) - (0.21107397525051516+0.0048714991438174871j) )<1e-8,
abs(lib.finger(eris.ooVV) - (-0.076411225687065987+0.11080438166425896j) )<1e-8,
abs(lib.finger(eris.OOvv) - (-0.17880337626095003-0.24174716216954206j) )<1e-8,
abs(lib.finger(eris.OOVV) - (0.059186286356424908+0.68433866387500164j) )<1e-8,
abs(lib.finger(eris.ovov) - (0.15402983765151051+0.064359681685222214j) )<1e-8,
abs(lib.finger(eris.ovOV) - (-0.10697649196044598+0.30351249676253234j) )<1e-8,
#abs(lib.finger(eris.OVov) - (-0.17619329728836752-0.56585020976035816j) )<1e-8,
abs(lib.finger(eris.OVOV) - (-0.63963235318492118+0.69863219317718828j) )<1e-8,
abs(lib.finger(eris.voov) - (-0.24137641647339092+0.18676684336011531j) )<1e-8,
abs(lib.finger(eris.voOV) - (0.19257709151227204+0.38929027819406414j) )<1e-8,
#abs(lib.finger(eris.VOov) - (0.07632606729926053-0.70350947950650355j) )<1e-8,
abs(lib.finger(eris.VOOV) - (-0.47970203195500816+0.46735207193861927j) )<1e-8,
abs(lib.finger(eris.vovv) - (-0.1342049915673903-0.23391327821719513j) )<1e-8,
abs(lib.finger(eris.voVV) - (-0.28989635223866056+0.9644368822688475j) )<1e-8,
abs(lib.finger(eris.VOvv) - (-0.32428269235420271+0.0029847254383674748j))<1e-8,
abs(lib.finger(eris.VOVV) - (0.45031779746222456-0.36858577475752041j) )<1e-8,
abs(lib.finger(eris.vvvv) - (-0.080512851258903173-0.2868384266725581j) )<1e-8,
abs(lib.finger(eris.vvVV) - (-0.5137063762484736+1.1036785801263898j) )<1e-8,
#abs(lib.finger(eris.VVvv) - (0.16468487082491939+0.25730725586992997j) )<1e-8,
abs(lib.finger(eris.VVVV) - (-0.56714875196802295+0.058636785679170501j) )<1e-8]))
| apache-2.0 |
baylee-d/osf.io | website/settings/local-travis.py | 2 | 2918 | # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
import inspect
import logging
from . import defaults
import os
DB_PORT = 54321
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SECURE_MODE = not DEBUG_MODE # Disable osf secure cookie
PROTOCOL = 'https://' if SECURE_MODE else 'http://'
DOMAIN = PROTOCOL + 'localhost:5000/'
API_DOMAIN = PROTOCOL + 'localhost:8000/'
ENABLE_INSTITUTIONS = True
PREPRINT_PROVIDER_DOMAINS = {
'enabled': False,
'prefix': 'http://local.',
'suffix': ':4201/'
}
USE_EXTERNAL_EMBER = True
EXTERNAL_EMBER_APPS = {
'ember_osf_web': {
'server': 'http://localhost:4200',
'path': os.environ.get('HOME') + 'website/ember_osf_web/'
},
'preprints': {
'server': 'http://localhost:4201',
'path': os.environ.get('HOME') + '/preprints/'
},
'registries': {
'server': 'http://localhost:4202/',
'path': os.environ.get('HOME') + '/registries/'
}
}
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = 'CHANGEME'
SESSION_COOKIE_SECURE = SECURE_MODE
SESSION_COOKIE_SAMESITE = 'None'
OSF_SERVER_KEY = None
OSF_SERVER_CERT = None
class CeleryConfig(defaults.CeleryConfig):
"""
Celery configuration
"""
## Default RabbitMQ broker
broker_url = 'amqp://'
# In-memory result backend
result_backend = 'cache'
cache_backend = 'memory'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
# if ENABLE_VARNISH isn't set in python read it from the env var and set it
locals().setdefault('ENABLE_VARNISH', os.environ.get('ENABLE_VARNISH') == 'True')
KEEN = {
'public': {
'project_id': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'master_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'write_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'read_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
},
'private': {
'project_id': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'write_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'read_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
},
}
NEW_AND_NOTEWORTHY_LINKS_NODE = 'helloo'
POPULAR_LINKS_NODE = 'hiyah'
POPULAR_LINKS_REGISTRATIONS = 'woooo'
logging.getLogger('celery.app.trace').setLevel(logging.FATAL)
DOI_FORMAT = '{prefix}/FK2osf.io/{guid}'
SHARE_ENABLED = False
| apache-2.0 |
GdZ/scriptfile | software/googleAppEngine/lib/yaml/examples/yaml-highlight/yaml_hl.py | 95 | 4429 | #!/usr/bin/python
import yaml, codecs, sys, os.path, optparse
class Style:
def __init__(self, header=None, footer=None,
tokens=None, events=None, replaces=None):
self.header = header
self.footer = footer
self.replaces = replaces
self.substitutions = {}
for domain, Class in [(tokens, 'Token'), (events, 'Event')]:
if not domain:
continue
for key in domain:
name = ''.join([part.capitalize() for part in key.split('-')])
cls = getattr(yaml, '%s%s' % (name, Class))
value = domain[key]
if not value:
continue
start = value.get('start')
end = value.get('end')
if start:
self.substitutions[cls, -1] = start
if end:
self.substitutions[cls, +1] = end
def __setstate__(self, state):
self.__init__(**state)
yaml.add_path_resolver(u'tag:yaml.org,2002:python/object:__main__.Style',
[None], dict)
yaml.add_path_resolver(u'tag:yaml.org,2002:pairs',
[None, u'replaces'], list)
class YAMLHighlight:
def __init__(self, options):
config = yaml.load(file(options.config, 'rb').read())
self.style = config[options.style]
if options.input:
self.input = file(options.input, 'rb')
else:
self.input = sys.stdin
if options.output:
self.output = file(options.output, 'wb')
else:
self.output = sys.stdout
def highlight(self):
input = self.input.read()
if input.startswith(codecs.BOM_UTF16_LE):
input = unicode(input, 'utf-16-le')
elif input.startswith(codecs.BOM_UTF16_BE):
input = unicode(input, 'utf-16-be')
else:
input = unicode(input, 'utf-8')
substitutions = self.style.substitutions
tokens = yaml.scan(input)
events = yaml.parse(input)
markers = []
number = 0
for token in tokens:
number += 1
if token.start_mark.index != token.end_mark.index:
cls = token.__class__
if (cls, -1) in substitutions:
markers.append([token.start_mark.index, +2, number, substitutions[cls, -1]])
if (cls, +1) in substitutions:
markers.append([token.end_mark.index, -2, number, substitutions[cls, +1]])
number = 0
for event in events:
number += 1
cls = event.__class__
if (cls, -1) in substitutions:
markers.append([event.start_mark.index, +1, number, substitutions[cls, -1]])
if (cls, +1) in substitutions:
markers.append([event.end_mark.index, -1, number, substitutions[cls, +1]])
markers.sort()
markers.reverse()
chunks = []
position = len(input)
for index, weight1, weight2, substitution in markers:
if index < position:
chunk = input[index:position]
for substring, replacement in self.style.replaces:
chunk = chunk.replace(substring, replacement)
chunks.append(chunk)
position = index
chunks.append(substitution)
chunks.reverse()
result = u''.join(chunks)
if self.style.header:
self.output.write(self.style.header)
self.output.write(result.encode('utf-8'))
if self.style.footer:
self.output.write(self.style.footer)
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-s', '--style', dest='style', default='ascii',
help="specify the highlighting style", metavar='STYLE')
parser.add_option('-c', '--config', dest='config',
default=os.path.join(os.path.dirname(sys.argv[0]), 'yaml_hl.cfg'),
help="set an alternative configuration file", metavar='CONFIG')
parser.add_option('-i', '--input', dest='input', default=None,
help="set the input file (default: stdin)", metavar='FILE')
parser.add_option('-o', '--output', dest='output', default=None,
help="set the output file (default: stdout)", metavar='FILE')
(options, args) = parser.parse_args()
hl = YAMLHighlight(options)
hl.highlight()
| mit |
cbernet/cpyroot | tools/fitter2d.py | 1 | 1550 | from ROOT import gDirectory, TH2F, TH1F, TFile
class Fitter2D(object):
def __init__(self, *args):
self.h2d = TH2F(*args)
def draw2D(self, *args):
self.h2d.Draw(*args)
self.hmean.Draw('psame')
def fit(self, bin, opt='0'):
hslice = self.h2d.ProjectionY("", bin, bin, "")
if not hslice.GetEntries():
return 0., 0., 0., 0., 0., 0.
hslice.Fit('gaus', opt)
func = hslice.GetFunction('gaus')
x = self.h2d.GetXaxis().GetBinCenter(bin)
dx = self.h2d.GetXaxis().GetBinWidth(bin)
mean = func.GetParameter(1)
dmean = func.GetParError(1)
sigma = func.GetParameter(2)
dsigma = func.GetParError(2)
return x, dx, mean, dmean, sigma, dsigma
def fit_slices(self):
self.h2d.FitSlicesY()
self.hmean = gDirectory.Get( self.h2d.GetName() + '_1' )
self.hsigma = gDirectory.Get( self.h2d.GetName() + '_2' )
# self.hsigma.SetYTitle('#sigma(MET_{x,y})')
self.hchi2 = gDirectory.Get( self.h2d.GetName() + '_chi2' )
def format(self, style, xtitle):
for hist in [self.hmean, self.hsigma, self.hchi2]:
style.format(hist)
hist.SetTitle('')
hist.SetXTitle(xtitle)
def write(self):
outfile = TFile(self.h2d.GetName()+'.root', 'recreate')
for hist in [self.hmean, self.hsigma, self.hchi2, self.h2d]:
hist.Clone()
hist.SetDirectory(outfile)
outfile.Write()
outfile.Close()
| gpl-2.0 |
attakei/ansible | test/units/parsing/vault/test_vault.py | 81 | 5965 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import getpass
import os
import shutil
import time
import tempfile
import six
from binascii import unhexlify
from binascii import hexlify
from nose.plugins.skip import SkipTest
from ansible.compat.tests import unittest
from ansible.utils.unicode import to_bytes, to_unicode
from ansible import errors
from ansible.parsing.vault import VaultLib
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
class TestVaultLib(unittest.TestCase):
def test_methods_exist(self):
v = VaultLib('ansible')
slots = ['is_encrypted',
'encrypt',
'decrypt',
'_format_output',
'_split_header',]
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def test_is_encrypted(self):
v = VaultLib(None)
assert not v.is_encrypted(u"foobar"), "encryption check on plaintext failed"
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
assert v.is_encrypted(data), "encryption check on headered text failed"
def test_format_output(self):
v = VaultLib('ansible')
v.cipher_name = "TEST"
sensitive_data = "ansible"
data = v._format_output(sensitive_data)
lines = data.split(b'\n')
assert len(lines) > 1, "failed to properly add header"
header = to_unicode(lines[0])
assert header.endswith(';TEST'), "header does end with cipher name"
header_parts = header.split(';')
assert len(header_parts) == 3, "header has the wrong number of parts"
assert header_parts[0] == '$ANSIBLE_VAULT', "header does not start with $ANSIBLE_VAULT"
assert header_parts[1] == v.b_version, "header version is incorrect"
assert header_parts[2] == 'TEST', "header does end with cipher name"
def test_split_header(self):
v = VaultLib('ansible')
data = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
rdata = v._split_header(data)
lines = rdata.split(b'\n')
assert lines[0] == b"ansible"
assert v.cipher_name == 'TEST', "cipher name was not set"
assert v.b_version == "9.9"
def test_encrypt_decrypt_aes(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = u'AES'
# AES encryption code has been removed, so this is old output for
# AES-encrypted 'foobar' with password 'ansible'.
enc_data = '$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n'
dec_data = v.decrypt(enc_data)
assert dec_data == "foobar", "decryption failed"
def test_encrypt_decrypt_aes256(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = 'AES256'
enc_data = v.encrypt("foobar")
dec_data = v.decrypt(enc_data)
assert enc_data != "foobar", "encryption failed"
assert dec_data == "foobar", "decryption failed"
def test_encrypt_encrypted(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = 'AES'
data = "$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(six.b("ansible"))
error_hit = False
try:
enc_data = v.encrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert error_hit, "No error was thrown when trying to encrypt data with a header"
def test_decrypt_decrypted(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
data = "ansible"
error_hit = False
try:
dec_data = v.decrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert error_hit, "No error was thrown when trying to decrypt data without a header"
def test_cipher_not_set(self):
# not setting the cipher should default to AES256
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
data = "ansible"
error_hit = False
try:
enc_data = v.encrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert not error_hit, "An error was thrown when trying to encrypt data without the cipher set"
assert v.cipher_name == "AES256", "cipher name is not set to AES256: %s" % v.cipher_name
| gpl-3.0 |
fuselock/odoo | addons/payment_sips/__openerp__.py | 196 | 1374 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright Eezee-It
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Worldline SIPS Payment Acquiring for online payments',
'version': '1.0',
'author': 'Eezee-It',
'category': 'Hidden',
'description': """
Worldline SIPS Payment Acquirer for online payments
Works with Worldline keys version 2.0, contains implementation of
payments acquirer using Worldline SIPS.""",
'depends': ['payment'],
'data': [
'views/sips.xml',
'views/payment_acquirer.xml',
'data/sips.xml',
],
'installable': True,
}
| agpl-3.0 |
tcffisher/namebench | nb_third_party/simplejson/decoder.py | 296 | 15152 | """Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
def _import_c_scanstring():
try:
from simplejson._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, pos)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
| apache-2.0 |
jcpowermac/ansible | test/units/modules/remote_management/oneview/test_oneview_network_set_facts.py | 73 | 3713 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.compat.tests import unittest
from oneview_module_loader import NetworkSetFactsModule
from hpe_test_utils import FactsParamsTestCase
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_ALL_WITHOUT_ETHERNET = dict(
config='config.json',
name=None,
options=['withoutEthernet']
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name='Network Set 1'
)
PARAMS_GET_BY_NAME_WITHOUT_ETHERNET = dict(
config='config.json',
name='Network Set 1',
options=['withoutEthernet']
)
class NetworkSetFactsSpec(unittest.TestCase,
FactsParamsTestCase):
def setUp(self):
self.configure_mocks(self, NetworkSetFactsModule)
self.network_sets = self.mock_ov_client.network_sets
FactsParamsTestCase.configure_client_mock(self, self.network_sets)
def test_should_get_all_network_sets(self):
network_sets = [{
"name": "Network Set 1",
"networkUris": ['/rest/ethernet-networks/aaa-bbb-ccc']
}, {
"name": "Network Set 2",
"networkUris": ['/rest/ethernet-networks/ddd-eee-fff', '/rest/ethernet-networks/ggg-hhh-fff']
}]
self.network_sets.get_all.return_value = network_sets
self.mock_ansible_module.params = PARAMS_GET_ALL
NetworkSetFactsModule().run()
self.network_sets.get_all.assert_called_once_with()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_sets=network_sets))
def test_should_get_all_network_sets_without_ethernet(self):
network_sets = [{
"name": "Network Set 1",
"networkUris": []
}, {
"name": "Network Set 2",
"networkUris": []
}]
self.network_sets.get_all.return_value = network_sets
self.mock_ansible_module.params = PARAMS_GET_ALL
NetworkSetFactsModule().run()
self.network_sets.get_all.assert_called_once_with()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_sets=network_sets))
def test_should_get_network_set_by_name(self):
network_sets = [{
"name": "Network Set 1",
"networkUris": ['/rest/ethernet-networks/aaa-bbb-ccc']
}]
self.network_sets.get_by.return_value = network_sets
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
NetworkSetFactsModule().run()
self.network_sets.get_by.assert_called_once_with('name', 'Network Set 1')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_sets=network_sets))
def test_should_get_network_set_by_name_without_ethernet(self):
network_sets = [{
"name": "Network Set 1",
"networkUris": []
}]
self.network_sets.get_all_without_ethernet.return_value = network_sets
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITHOUT_ETHERNET
NetworkSetFactsModule().run()
expected_filter = "\"'name'='Network Set 1'\""
self.network_sets.get_all_without_ethernet.assert_called_once_with(filter=expected_filter)
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_sets=network_sets))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
aswadrangnekar/khandelwal | app/lib/console/app/model/session.py | 13 | 4353 | # Based on the Google App Engine Samples project.
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
An interactive Python console "session".
The logging, os, sys, db, and users modules are imported automatically.
Interpreter state is stored in the datastore so that variables, function
definitions, and other values in the global and local namespaces can be used
across commands.
TODO: unit tests!
"""
import logging
import pickle
import types
from google.appengine.ext import db
# The entity kind for shell sessions. Feel free to rename to suit your app.
_SESSION_KIND = '_Console_Session'
class ShellSession(db.Model):
"""A shell session. Stores the session's globals.
Each session globals is stored in one of two places:
If the global is picklable, it's stored in the parallel globals and
global_names list properties. (They're parallel lists to work around the
unfortunate fact that the datastore can't store dictionaries natively.)
If the global is not picklable (e.g. modules, classes, and functions), or if
it was created by the same statement that created an unpicklable global,
it's not stored directly. Instead, the statement is stored in the
unpicklables list property. On each request, before executing the current
statement, the unpicklable statements are evaluated to recreate the
unpicklable globals.
The unpicklable_names property stores all of the names of globals that were
added by unpicklable statements. When we pickle and store the globals after
executing a statement, we skip the ones in unpicklable_names.
Using Text instead of string is an optimization. We don't query on any of
these properties, so they don't need to be indexed.
"""
global_names = db.ListProperty(db.Text)
globals = db.ListProperty(db.Blob)
unpicklable_names = db.ListProperty(db.Text)
unpicklables = db.ListProperty(db.Text)
def set_global(self, name, value):
"""Adds a global, or updates it if it already exists.
Also removes the global from the list of unpicklable names.
Args:
name: the name of the global to remove
value: any picklable value
"""
blob = db.Blob(pickle.dumps(value))
if name in self.global_names:
index = self.global_names.index(name)
self.globals[index] = blob
else:
self.global_names.append(db.Text(name))
self.globals.append(blob)
self.remove_unpicklable_name(name)
def remove_global(self, name):
"""Removes a global, if it exists.
Args:
name: string, the name of the global to remove
"""
if name in self.global_names:
index = self.global_names.index(name)
del self.global_names[index]
del self.globals[index]
def globals_dict(self):
"""Returns a dictionary view of the globals.
"""
return dict((name, pickle.loads(val))
for name, val in zip(self.global_names, self.globals))
def add_unpicklable(self, statement, names):
"""Adds a statement and list of names to the unpicklables.
Also removes the names from the globals.
Args:
statement: string, the statement that created new unpicklable global(s).
names: list of strings; the names of the globals created by the statement.
"""
self.unpicklables.append(db.Text(statement))
for name in names:
self.remove_global(name)
if name not in self.unpicklable_names:
self.unpicklable_names.append(db.Text(name))
def remove_unpicklable_name(self, name):
"""Removes a name from the list of unpicklable names, if it exists.
Args:
name: string, the name of the unpicklable global to remove
"""
if name in self.unpicklable_names:
self.unpicklable_names.remove(name)
def main():
logging.error("I should be running unit tests!")
if __name__ == '__main__':
main()
| mit |
xianjunzhengbackup/Cloud-Native-Python | env/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/response.py | 360 | 18615 | from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
from socket import timeout as SocketTimeout
from socket import error as SocketError
from ._collections import HTTPHeaderDict
from .exceptions import (
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
from .packages.six import string_types as basestring, binary_type, PY3
from .packages.six.moves import http_client as httplib
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode):
if mode == 'gzip':
return GzipDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
if body and isinstance(body, (basestring, binary_type)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get('transfer-encoding', '').lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
@property
def connection(self):
return self._connection
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessar.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
content_encoding = self.headers.get('content-encoding', '').lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content:
data += self._flush_decoder()
return data
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b'')
return buf + self._decoder.flush()
return b''
@contextmanager
def _error_catcher(self):
"""
Catch low-level python exceptions, instead re-raising urllib3
variants, so that low-level exceptions are not leaked in the
high-level api.
On exit, release the connection back to the pool.
"""
clean_exit = False
try:
try:
yield
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError('Connection broken: %r' % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
clean_exit = True
finally:
# If we didn't terminate cleanly, we need to throw away our
# connection.
if not clean_exit:
# The response may not be closed but we're not going to use it
# anymore so close it now to ensure that the connection is
# released back to the pool.
if self._original_response:
self._original_response.close()
# Closing the response may not actually be sufficient to close
# everything, so if we have a hold of the connection close that
# too.
if self._connection:
self._connection.close()
# If we hold the original response but it's closed now, we should
# return the connection back to the pool.
if self._original_response and self._original_response.isclosed():
self.release_conn()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
data = None
with self._error_catcher():
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
if data:
self._fp_bytes_read += len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked:
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
resp = ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
if self._connection:
self._connection.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[:len(temp)] = temp
return len(temp)
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
# Don't bother reading the body of a HEAD request.
if self._original_response and is_response_to_head(self._original_response):
self._original_response.close()
return
with self._error_catcher():
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
decoded = self._decode(chunk, decode_content=decode_content,
flush_decoder=False)
if decoded:
yield decoded
if decode_content:
# On CPython and PyPy, we should never need to flush the
# decoder. However, on Jython we *might* need to, so
# lets defensively do it anyway.
decoded = self._flush_decoder()
if decoded: # Platform-specific: Jython.
yield decoded
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b'\r\n':
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
| mit |
akash1808/nova | nova/tests/unit/scheduler/filters/test_image_props_filters.py | 66 | 10627 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import vm_mode
from nova.scheduler.filters import image_props_filter
from nova import test
from nova.tests.unit.scheduler import fakes
from nova import utils
class TestImagePropsFilter(test.NoDBTestCase):
def setUp(self):
super(TestImagePropsFilter, self).setUp()
self.filt_cls = image_props_filter.ImagePropertiesFilter()
def test_image_properties_filter_passes_same_inst_props_and_version(self):
img_props = {'properties': {'_architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0,<6.2'
}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_different_inst_props(self):
img_props = {'properties': {'architecture': arch.ARMV7,
'hypervisor_type': hv_type.QEMU,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_different_hyper_version(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.2'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_partial_inst_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_partial_inst_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.XEN, vm_mode.XEN)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_without_inst_props(self):
filter_properties = {'request_spec': {}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_without_host_props(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'enabled': True,
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_passes_without_hyper_version(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)]}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_fails_with_unsupported_hyper_ver(self):
img_props = {'properties': {'architecture': arch.X86_64,
'hypervisor_type': hv_type.KVM,
'vm_mode': vm_mode.HVM,
'hypervisor_version_requires': '>=6.0'}}
filter_properties = {'request_spec': {'image': img_props}}
capabilities = {'enabled': True,
'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': 5000}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_pv_mode_compat(self):
# if an old image has 'pv' for a vm_mode it should be treated as xen
img_props = {'properties': {'vm_mode': 'pv'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.XEN, vm_mode.XEN)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_hvm_mode_compat(self):
# if an old image has 'hv' for a vm_mode it should be treated as xen
img_props = {'properties': {'vm_mode': 'hv'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.X86_64, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_xen_arch_compat(self):
# if an old image has 'x86_32' for arch it should be treated as i686
img_props = {'properties': {'architecture': 'x86_32'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.KVM, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_xen_hv_type_compat(self):
# if an old image has 'xapi' for hv_type it should be treated as xen
img_props = {'properties': {'hypervisor_type': 'xapi'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.XEN, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_image_properties_filter_baremetal_vmmode_compat(self):
# if an old image has 'baremetal' for vmmode it should be
# treated as hvm
img_props = {'properties': {'vm_mode': 'baremetal'}}
filter_properties = {'request_spec': {'image': img_props}}
hypervisor_version = utils.convert_version_to_int('6.0.0')
capabilities = {'supported_instances':
[(arch.I686, hv_type.BAREMETAL, vm_mode.HVM)],
'hypervisor_version': hypervisor_version}
host = fakes.FakeHostState('host1', 'node1', capabilities)
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
| apache-2.0 |
tigeraniya/django-allauth | allauth/account/app_settings.py | 13 | 7700 | class AppSettings(object):
class AuthenticationMethod:
USERNAME = 'username'
EMAIL = 'email'
USERNAME_EMAIL = 'username_email'
class EmailVerificationMethod:
# After signing up, keep the user account inactive until the email
# address is verified
MANDATORY = 'mandatory'
# Allow login with unverified e-mail (e-mail verification is
# still sent)
OPTIONAL = 'optional'
# Don't send e-mail verification mails during signup
NONE = 'none'
def __init__(self, prefix):
self.prefix = prefix
# If login is by email, email must be required
assert (not self.AUTHENTICATION_METHOD
== self.AuthenticationMethod.EMAIL) or self.EMAIL_REQUIRED
# If login includes email, login must be unique
assert (self.AUTHENTICATION_METHOD
== self.AuthenticationMethod.USERNAME) or self.UNIQUE_EMAIL
assert (self.EMAIL_VERIFICATION
!= self.EmailVerificationMethod.MANDATORY) \
or self.EMAIL_REQUIRED
if not self.USER_MODEL_USERNAME_FIELD:
assert not self.USERNAME_REQUIRED
assert self.AUTHENTICATION_METHOD \
not in (self.AuthenticationMethod.USERNAME,
self.AuthenticationMethod.USERNAME_EMAIL)
def _setting(self, name, dflt):
from django.conf import settings
getter = getattr(settings,
'ALLAUTH_SETTING_GETTER',
lambda name, dflt: getattr(settings, name, dflt))
return getter(self.prefix + name, dflt)
@property
def DEFAULT_HTTP_PROTOCOL(self):
return self._setting("DEFAULT_HTTP_PROTOCOL", "http")
@property
def EMAIL_CONFIRMATION_EXPIRE_DAYS(self):
"""
Determines the expiration date of e-mail confirmation mails (#
of days)
"""
from django.conf import settings
return self._setting("EMAIL_CONFIRMATION_EXPIRE_DAYS",
getattr(settings, "EMAIL_CONFIRMATION_DAYS", 3))
@property
def EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL(self):
"""
The URL to redirect to after a successful e-mail confirmation, in
case of an authenticated user
"""
return self._setting("EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL",
None)
@property
def EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL(self):
"""
The URL to redirect to after a successful e-mail confirmation, in
case no user is logged in
"""
from django.conf import settings
return self._setting("EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL",
settings.LOGIN_URL)
@property
def EMAIL_REQUIRED(self):
"""
The user is required to hand over an e-mail address when signing up
"""
return self._setting("EMAIL_REQUIRED", False)
@property
def EMAIL_VERIFICATION(self):
"""
See e-mail verification method
"""
ret = self._setting("EMAIL_VERIFICATION",
self.EmailVerificationMethod.OPTIONAL)
# Deal with legacy (boolean based) setting
if ret is True:
ret = self.EmailVerificationMethod.MANDATORY
elif ret is False:
ret = self.EmailVerificationMethod.OPTIONAL
return ret
@property
def AUTHENTICATION_METHOD(self):
from django.conf import settings
if hasattr(settings, "ACCOUNT_EMAIL_AUTHENTICATION"):
import warnings
warnings.warn("ACCOUNT_EMAIL_AUTHENTICATION is deprecated,"
" use ACCOUNT_AUTHENTICATION_METHOD",
DeprecationWarning)
if getattr(settings, "ACCOUNT_EMAIL_AUTHENTICATION"):
ret = self.AuthenticationMethod.EMAIL
else:
ret = self.AuthenticationMethod.USERNAME
else:
ret = self._setting("AUTHENTICATION_METHOD",
self.AuthenticationMethod.USERNAME)
return ret
@property
def UNIQUE_EMAIL(self):
"""
Enforce uniqueness of e-mail addresses
"""
return self._setting("UNIQUE_EMAIL", True)
@property
def SIGNUP_PASSWORD_VERIFICATION(self):
"""
Signup password verification
"""
return self._setting("SIGNUP_PASSWORD_VERIFICATION", True)
@property
def PASSWORD_MIN_LENGTH(self):
"""
Minimum password Length
"""
return self._setting("PASSWORD_MIN_LENGTH", 6)
@property
def EMAIL_SUBJECT_PREFIX(self):
"""
Subject-line prefix to use for email messages sent
"""
return self._setting("EMAIL_SUBJECT_PREFIX", None)
@property
def SIGNUP_FORM_CLASS(self):
"""
Signup form
"""
return self._setting("SIGNUP_FORM_CLASS", None)
@property
def USERNAME_REQUIRED(self):
"""
The user is required to enter a username when signing up
"""
return self._setting("USERNAME_REQUIRED", True)
@property
def USERNAME_MIN_LENGTH(self):
"""
Minimum username Length
"""
return self._setting("USERNAME_MIN_LENGTH", 1)
@property
def USERNAME_BLACKLIST(self):
"""
List of usernames that are not allowed
"""
return self._setting("USERNAME_BLACKLIST", [])
@property
def PASSWORD_INPUT_RENDER_VALUE(self):
"""
render_value parameter as passed to PasswordInput fields
"""
return self._setting("PASSWORD_INPUT_RENDER_VALUE", False)
@property
def ADAPTER(self):
return self._setting('ADAPTER',
'allauth.account.adapter.DefaultAccountAdapter')
@property
def CONFIRM_EMAIL_ON_GET(self):
return self._setting('CONFIRM_EMAIL_ON_GET', False)
@property
def LOGIN_ON_EMAIL_CONFIRMATION(self):
"""
Autmatically log the user in once they confirmed their email address
"""
return self._setting('LOGIN_ON_EMAIL_CONFIRMATION', True)
@property
def LOGOUT_REDIRECT_URL(self):
return self._setting('LOGOUT_REDIRECT_URL', '/')
@property
def LOGOUT_ON_GET(self):
return self._setting('LOGOUT_ON_GET', False)
@property
def LOGOUT_ON_PASSWORD_CHANGE(self):
return self._setting('LOGOUT_ON_PASSWORD_CHANGE', False)
@property
def USER_MODEL_USERNAME_FIELD(self):
return self._setting('USER_MODEL_USERNAME_FIELD', 'username')
@property
def USER_MODEL_EMAIL_FIELD(self):
return self._setting('USER_MODEL_EMAIL_FIELD', 'email')
@property
def SESSION_COOKIE_AGE(self):
"""
Remembered sessions expire after this many seconds.
Defaults to 1814400 seconds which is 3 weeks.
"""
return self._setting('SESSION_COOKIE_AGE', 60 * 60 * 24 * 7 * 3)
@property
def SESSION_REMEMBER(self):
"""
Controls the life time of the session. Set to `None` to ask the user
("Remember me?"), `False` to not remember, and `True` to always
remember.
"""
return self._setting('SESSION_REMEMBER', None)
@property
def FORMS(self):
return self._setting('FORMS', {})
# Ugly? Guido recommends this himself ...
# http://mail.python.org/pipermail/python-ideas/2012-May/014969.html
import sys
app_settings = AppSettings('ACCOUNT_')
app_settings.__name__ = __name__
sys.modules[__name__] = app_settings
| mit |
jonobrien/School_Backups | cs1-python/Labs/week 8/hashtable_just docstrings to do.py | 1 | 9432 | """
Edited by: Jon O'Brien
Due date: 10/29/13
lab8 - hash tables
This program is used by word_cout.py to take an input text file and output the
number of unique words, total number of words, the words that appeared the most,
and the word count. A hash table was used to store the values for the text file
and the table was resized and rehashed when it was almost full, being detected
by a function for capacity and load of the table. A class was setup to hold
the entries for the table and different hash functions were tested to determine
the effectiveness of a given hash function over the construction of the hash
table. Each key and value pair had to be tested for being alreaedy in the table
and had to be acquired with a hashcode and put into the table, which was rehashed
if necessary.
"""
"""
file: hashtable.py
language: python3
author: sps@cs.rit.edu Sean Strout
author: jeh@cs.rit.edu James Heliotis
author: anh@cs.rit.edu Arthur Nunes-Harwitt
author: jsb@cs.rit.edu Jeremy Brown
description: open addressing Hash Table for CS 141 Lecture
"""
import copy
class HashTable( ):
"""
The HashTable data structure contains a collection of values
where each value is located by a hashable key.
No two values may have the same key, but more than one
key may have the same value.
"""
__slots__ = ( "table", "size" )
def mkHashTable(capacity):
"""
This function is the helper function for the hashTable class, it takes capacity as
a parameter. This function is used to make each instance for the hash table and
it helps to create the hash table that is rehashed when the need arises and the
table is made in this function as well with the list of lists defined in the range
of capacity.
"""
hTable = HashTable()
hTable.table = [list() for _ in range(capacity)]
hTable.size = 0
return hTable
def HashTableToStr(hashtable):
"""
This function takes the hashtable as a parameter. This function then converts the
results of every index in the table to a string. Everything is returned as a
result of the function after calling EntryToStr() function that does other changes
defined below.
"""
result = ""
for i in range( len( hashtable.table ) ):
if i != None:
result += str( i ) + ": "
result += EntryToStr( hashtable.table[i] ) + "\n"
return result
class _Entry( ):
"""
This is a class used to hold key-value pairs. This class takes no parameters. It stores
every key-value and is helped by other functions defined below.
"""
__slots__ = ( "key", "value" )
def EntryToStr(entry):
"""
This function takes entry as a parameter. This function is a helper function for the
class entry. It converts every entry into a string from a key-value pair.
"""
return "(" + str( entry.key ) + ", " + str( entry.value ) + ")"
def mkEntry(key, value):
"""
This function takes key and value as parameters. This function is a helper function for the
class of Entry() and it creates entries in the class that are used later and converted for use
by other functions.
"""
aEntry = _Entry()
aEntry.key = key
aEntry.value = value
return aEntry
def given_hash( val, n ):
"""
This function Computes a hash of the val string that is in [0 ... n), this hashing function was given in
the file and it makes hashcodes from the values in use in the program and these are useful
for the key-value pairs and indices in the table, also useful for rehashing as well.
"""
hashcode = hash( val ) % n
# hashcode = 0
# hashcode = len(val) % n
return hashcode
def hash_function(val, n):
"""
This is another method to acquire the hashcodes. Similar to the given_hash() function above
it takes the values and n that are parameters and makes hashcodes for use in the creation of
the table and indices for the key-value pairs. This funcion in particular takes the ordinal
value for each letter and assigns it an integer it can then perform modular division on to
calculate the hashcode on.
"""
number = 0
for letter in val:
number = number + ord(letter) #turn the letter into a number
return number%n
def keys( hTable ):
"""
This function returns a list of keys in the given hashTable. This function takes the
paramter of the hash table. Then the function iterates over each item in the table
and tests for an empty slot in the table, if there is, it is added to the table
and the calculations continue for each item in the table.
"""
result = []
for item in hTable.table:
if item != []:
for entry in item:
result.append( entry.key )
return result
def contains( hTable, key ):
"""
This function return True iff hTable has an entry with the given key. This function
takes the hash table and key as parameters. This function then iterates over each
intex in the table and checks for each key being in the table and returns false if not.
"""
index = hash_function( key, len( hTable.table ) )
lst = hTable.table[ index ]
for i in lst:
if i.key == key:
return True
return False
def put( hTable, key, value ):
"""
Using the given hash table, set the given key to the
given value. If the key already exists, the given value
will replace the previous one already in the table.
If the table is full, an Exception is raised.
The load is tested here to determine if rehashing is necessary and if the load is
met or exceeded, then the table is rehashed and the functions are ran again until
the table is full and there are no more key-value pairs to add to the table.
"""
ratio = load( hTable )
if ratio >= .75:
rehash( hTable)
index = hash_function( key, len( hTable.table ) )
if hTable.table[ index ] == []:
hTable.table[ index ] = [ mkEntry(key, value) ]
hTable.size += 1
else:
for i in range( len( hTable.table[ index ] ) ):
if hTable.table[ index ][ i ].key == key:
hTable.table[ index ][ i ].value = value
return True
hTable.table[ index ].append( mkEntry(key, value))
return True
def get( hTable, key ):
"""
Return the value associated with the given key in
the given hash table.
Precondition: contains(hTable, key)
This function takes the hash table and key as parameters. It uses the key and
tests for the index of each location to be empty, if it is then an exception
is raised and the key is not in the table. otherwise there is a list that is
constructed of the indices and the keys are used to assist with this as well.
If the key is not in the table, then another exception is raised as well.
"""
index = hash_function( key, len( hTable.table ) )
if hTable.table[ index ] == []:
raise Exception( "Hash table does not contain key." )
else:
lst = hTable.table[ index ]
for i in lst:
if i.key == key:
return i.value
raise Exception( "Hash table does not contain key." )
def imbalance( hTable ):
"""
Compute average length of all non-empty chains
This function takes the parameter of the hashtable that is used in the
program to store the key-value pairs.
Imbalance() measures the effectiveness of the hashing function, the lower the
number, the closer to zero, the better.
"""
numOfChains = 0
total = 0
for i in range( len( hTable.table ) ):
if hTable.table[ i ] != []:
total += len( hTable.table[ i ] )
numOfChains += 1
avg = (total / numOfChains)-1
return avg
def load( hTable ):
"""
This function checks ratio of items in table to table size, to determine the load on
the hashtable, for rehashing. It takes the hash table as a parameter. It performs
calculations with iterating over each index of the size and accumulates the total.
This total is divided by size to determine the load on the table, used in put()
to make the table perform a rehash if it becomes to full and needs resizing.
"""
total = 0
size = len( hTable.table )
for i in range( size ):
total += len( hTable.table[ i ] )
return total/size
def rehash( hTable):
"""
This function performs a rehash every time the table starts to fill up, determined by
the load on the table. This function takes the hash table as a parameter. It
"""
newN = ( 2 * len( hTable.table ) ) + 1
print(newN)#testing
#print( "New capacity: " + str( newN ) )
newTable = mkHashTable( newN )
for i in range( len( hTable.table ) ):
for item in hTable.table[ i ]:
myKey = item.key
myValue = item.value
#print("#####", myKey, myValue, "####")
put(newTable, myKey, myValue)
hTable.table = newTable.table
hTable.size = newTable.size
return newTable
| gpl-3.0 |
igoralmeida/tahoe-lafs | src/allmydata/scripts/tahoe_ls.py | 8 | 6315 |
import urllib, time
import simplejson
from allmydata.scripts.common import get_alias, DEFAULT_ALIAS, escape_path, \
UnknownAliasError
from allmydata.scripts.common_http import do_http, format_http_error
from allmydata.util.encodingutil import unicode_to_output, quote_output, is_printable_ascii, to_str
def list(options):
nodeurl = options['node-url']
aliases = options.aliases
where = options.where
stdout = options.stdout
stderr = options.stderr
if not nodeurl.endswith("/"):
nodeurl += "/"
if where.endswith("/"):
where = where[:-1]
try:
rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
except UnknownAliasError, e:
e.display(stderr)
return 1
url = nodeurl + "uri/%s" % urllib.quote(rootcap)
if path:
# move where.endswith check here?
url += "/" + escape_path(path)
assert not url.endswith("/")
url += "?t=json"
resp = do_http("GET", url)
if resp.status == 404:
print >>stderr, "No such file or directory"
return 2
if resp.status != 200:
print >>stderr, format_http_error("Error during GET", resp)
if resp.status == 0:
return 3
else:
return resp.status
data = resp.read()
if options['json']:
# The webapi server should always output printable ASCII.
if is_printable_ascii(data):
print >>stdout, data
return 0
else:
print >>stderr, "The JSON response contained unprintable characters:"
print >>stderr, quote_output(data, quotemarks=False)
return 1
try:
parsed = simplejson.loads(data)
except Exception, e:
print >>stderr, "error: %s" % quote_output(e.args[0], quotemarks=False)
print >>stderr, "Could not parse JSON response:"
print >>stderr, quote_output(data, quotemarks=False)
return 1
nodetype, d = parsed
children = {}
if nodetype == "dirnode":
children = d['children']
else:
# paths returned from get_alias are always valid UTF-8
childname = path.split("/")[-1].decode('utf-8')
children = {childname: (nodetype, d)}
if "metadata" not in d:
d["metadata"] = {}
childnames = sorted(children.keys())
now = time.time()
# we build up a series of rows, then we loop through them to compute a
# maxwidth so we can format them tightly. Size, filename, and URI are the
# variable-width ones.
rows = []
has_unknowns = False
for name in childnames:
child = children[name]
name = unicode(name)
childtype = child[0]
# See webapi.txt for a discussion of the meanings of unix local
# filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe
# linkmotime and linkcrtime.
ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime")
if not ctime:
ctime = child[1]["metadata"].get("ctime")
mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
if not mtime:
mtime = child[1]["metadata"].get("mtime")
rw_uri = to_str(child[1].get("rw_uri"))
ro_uri = to_str(child[1].get("ro_uri"))
if ctime:
# match for formatting that GNU 'ls' does
if (now - ctime) > 6*30*24*60*60:
# old files
fmt = "%b %d %Y"
else:
fmt = "%b %d %H:%M"
ctime_s = time.strftime(fmt, time.localtime(ctime))
else:
ctime_s = "-"
if childtype == "dirnode":
t0 = "d"
size = "-"
classify = "/"
elif childtype == "filenode":
t0 = "-"
size = str(child[1].get("size", "?"))
classify = ""
if rw_uri:
classify = "*"
else:
has_unknowns = True
t0 = "?"
size = "?"
classify = "?"
t1 = "-"
if ro_uri:
t1 = "r"
t2 = "-"
if rw_uri:
t2 = "w"
t3 = "-"
if childtype == "dirnode":
t3 = "x"
uri = rw_uri or ro_uri
line = []
if options["long"]:
line.append(t0+t1+t2+t3)
line.append(size)
line.append(ctime_s)
if not options["classify"]:
classify = ""
encoding_error = False
try:
line.append(unicode_to_output(name) + classify)
except UnicodeEncodeError:
encoding_error = True
line.append(quote_output(name) + classify)
if options["uri"]:
line.append(uri)
if options["readonly-uri"]:
line.append(quote_output(ro_uri or "-", quotemarks=False))
rows.append((encoding_error, line))
max_widths = []
left_justifys = []
for (encoding_error, row) in rows:
for i,cell in enumerate(row):
while len(max_widths) <= i:
max_widths.append(0)
while len(left_justifys) <= i:
left_justifys.append(False)
max_widths[i] = max(max_widths[i], len(cell))
if cell.startswith("URI"):
left_justifys[i] = True
if len(left_justifys) == 1:
left_justifys[0] = True
fmt_pieces = []
for i in range(len(max_widths)):
piece = "%"
if left_justifys[i]:
piece += "-"
piece += str(max_widths[i])
piece += "s"
fmt_pieces.append(piece)
fmt = " ".join(fmt_pieces)
rc = 0
for (encoding_error, row) in rows:
if encoding_error:
print >>stderr, (fmt % tuple(row)).rstrip()
rc = 1
else:
print >>stdout, (fmt % tuple(row)).rstrip()
if rc == 1:
print >>stderr, "\nThis listing included files whose names could not be converted to the terminal" \
"\noutput encoding. Their names are shown using backslash escapes and in quotes."
if has_unknowns:
print >>stderr, "\nThis listing included unknown objects. Using a webapi server that supports" \
"\na later version of Tahoe may help."
return rc
| gpl-2.0 |
01org/meta-security-isafw | lib/isafw/isaplugins/__init__.py | 2 | 1950 | #
# __init__.py - part of ISA FW
#
# Copyright (c) 2015 - 2016, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import glob
import keyword
import os
import sys
basedir = os.path.dirname(__file__)
__all__ = []
for name in glob.glob(os.path.join(basedir, '*.py')):
module = os.path.splitext(os.path.split(name)[-1])[0]
if not module.startswith('_') and not keyword.iskeyword(module):
__import__(__name__ + '.' + module)
__all__.append(module)
__all__.sort()
| mit |
bguillot/OpenUpgrade | addons/project/__openerp__.py | 62 | 2843 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Management',
'version': '1.1',
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'category': 'Project Management',
'sequence': 8,
'summary': 'Projects, Tasks',
'images': [
'images/gantt.png',
'images/project_task_tree.jpeg',
'images/project_task.jpeg',
'images/project.jpeg',
'images/task_analysis.jpeg',
'images/project_kanban.jpeg',
'images/task_kanban.jpeg',
'images/task_stages.jpeg'
],
'depends': [
'base_setup',
'product',
'analytic',
'board',
'mail',
'resource',
'web_kanban'
],
'description': """
Track multi-level projects, tasks, work done on tasks
=====================================================
This application allows an operational project management system to organize your activities into tasks and plan the work you need to get the tasks completed.
Gantt diagrams will give you a graphical representation of your project plans, as well as resources availability and workload.
Dashboard / Reports for Project Management will include:
--------------------------------------------------------
* My Tasks
* Open Tasks
* Tasks Analysis
* Cumulative Flow
""",
'data': [
'security/project_security.xml',
'wizard/project_task_delegate_view.xml',
'security/ir.model.access.csv',
'project_data.xml',
'project_view.xml',
'res_partner_view.xml',
'report/project_report_view.xml',
'report/project_cumulative.xml',
'res_config_view.xml',
'views/project.xml',
],
'demo': ['project_demo.xml'],
'test': [
],
'installable': True,
'auto_install': False,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
benthomasson/db-designer | db_designer/db_designer_fsm.py | 1 | 24076 |
import os
import yaml
import traceback
import logging
import random
import re
from models import Table, Column, ForeignKey
from widgets import Wheel, MagnifyingGlassMousePointer, MoveMousePointer
def snake_case(name):
'''
From: http://stackoverflow.com/questions/1175208/elegant-python-function-to-convert-camelcase-to-camel-case
'''
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def singleton(klass):
return klass()
def transition(new_state):
def called_on(fn):
transitions = getattr(fn, 'state_transitions', [])
if isinstance(new_state, basestring):
transitions.append(new_state)
elif isinstance(new_state, type):
transitions.append(new_state.__name__)
elif isinstance(type(new_state), type):
transitions.append(new_state.__class__.__name__)
else:
raise Exception('Unsupported type {0}'.format(new_state))
setattr(fn, 'state_transitions', transitions)
return fn
return called_on
class State(object):
def name(self):
return self.__class__.__name__
def start(self, controller):
pass
def end(self, controller):
pass
def mousePressed(self, controller):
pass
def mouseReleased(self, controller):
pass
def mouseDragged(self, controller):
pass
def keyPressed(self, controller):
pass
def keyReleased(self, controller):
pass
def keyTyped(self, controller):
pass
def fileSelected(self, controller, selected):
pass
@singleton
class Start(State):
@transition('ReadyState')
def start(self, controller):
controller.changeState(ReadyState)
@singleton
class MouseSelect(State):
@transition('SelectedTable')
@transition('ColumnEdit')
@transition('MenuWheel')
@transition('ReadyState')
@transition('ScaleAndPan')
def start(self, controller):
changed_state = False
if mouseButton == LEFT:
for table in controller.tables:
if table.is_selected(controller):
controller.selected_table = table
table.selected = True
controller.changeState(SelectedTable)
changed_state = True
else:
table.selected = False
if controller.selected_table:
table = controller.selected_table
for column in table.columns:
if column.is_selected(controller):
controller.editing_column = column
controller.changeState(ColumnEdit)
changed_state = True
elif controller.selected_table is None and mouseButton == RIGHT:
controller.changeState(MenuWheel)
changed_state = True
if changed_state is False and mouseButton == LEFT:
controller.changeState(ScaleAndPan)
if changed_state is False and mouseButton == RIGHT:
controller.changeState(ReadyState)
@singleton
class ReadyState(State):
@transition('MouseSelect')
def mousePressed(self, controller):
controller.mousePressedX = controller.mousePX
controller.mousePressedY = controller.mousePY
controller.selected_table = None
controller.changeState(MouseSelect)
@transition('MenuWheel')
def mouseDragged(self, controller):
if mouseButton == RIGHT or keyCode == CONTROL:
controller.changeState(MenuWheel)
def keyTyped(self, controller):
if key == CODED:
pass
elif key == "d":
controller.debug = not controller.debug
@singleton
class ScaleAndPan(State):
def start(self, controller):
controller.mousePressedX = mouseX
controller.mousePressedY = mouseY
controller.oldPanX = controller.panX
controller.oldPanY = controller.panY
controller.oldScaleXY = controller.scaleXY
if controller.lastKeyCode == ALT:
controller.mouse_pointer = MagnifyingGlassMousePointer()
else:
controller.mouse_pointer = MoveMousePointer()
def end(self, controller):
controller.mouse_pointer = None
def mouseDragged(self, controller):
if mouseButton == LEFT and controller.lastKeyCode == ALT:
controller.scaleXY = max(0.1, (controller.mousePressedY - mouseY) / 100.0 + controller.oldScaleXY)
controller.panX = controller.oldPanX + (-1 * controller.mousePressedX / controller.oldScaleXY) + (controller.mousePressedX / controller.scaleXY)
controller.panY = controller.oldPanY + (-1 * controller.mousePressedY / controller.oldScaleXY) + (controller.mousePressedY / controller.scaleXY)
elif mouseButton == LEFT:
controller.panX = (mouseX - controller.mousePressedX) / controller.scaleXY + controller.oldPanX
controller.panY = (mouseY - controller.mousePressedY) / controller.scaleXY + controller.oldPanY
@transition('ReadyState')
def mouseReleased(self, controller):
controller.lastKeyCode = 0
controller.mouse_pointer = None
controller.changeState(ReadyState)
def keyPressed(self, controller):
controller.lastKeyCode = keyCode
if controller.lastKeyCode == ALT:
controller.mouse_pointer = MagnifyingGlassMousePointer()
def keyReleased(self, controller):
controller.lastKeyCode = 0
controller.mouse_pointer = MoveMousePointer()
@singleton
class MenuWheel(State):
def start(self, controller):
controller.wheel = Wheel(mouseX, mouseY)
def end(self, controller):
controller.wheel = None
@transition('NewTable')
@transition('Save')
@transition('Load')
@transition('ReadyState')
def mouseReleased(self, controller):
menu_selection = controller.wheel.get_menu_selection()
if menu_selection == "New":
controller.changeState(NewTable)
elif menu_selection == "Save":
controller.changeState(Save)
elif menu_selection == "Load":
controller.changeState(Load)
else:
controller.changeState(ReadyState)
@singleton
class Load(State):
def start(self, controller):
selectInput("Input file", "fileSelected")
@transition('ReadyState')
def fileSelected(self, controller, selection):
try:
print selection, type(selection)
if selection:
new_tables = []
selection_file_name = selection.getAbsolutePath()
logging.debug(selection_file_name)
controller.directory = os.path.dirname(selection.getAbsolutePath())
with open(selection_file_name) as f:
d = yaml.load(f.read())
print d
controller.app_name = d.get('app', os.path.splitext(os.path.basename(selection_file_name))[0])
for model in d.get('external_models', []):
model['external'] = True
for model in d.get('external_models', []) + d.get('models', []):
table = Table(name=model.get('name'),
x=model.get('x', random.randrange(int(controller.panX), int(width*controller.scaleXY + controller.panX))),
y=model.get('y', random.randrange(int(controller.panY), int(height*controller.scaleXY + controller.panY))),
display=model.get('display'),
natural_key=model.get('natural_key'),
natural_keys=model.get('natural_keys', []),
ordering=model.get('ordering', []),
external=model.get('external', False),
view=model.get('view', False),
extra=model.get('extra', False))
new_tables.append(table)
print "new table:", table
for field in model.get('fields'):
name = field.get('name')
ftype = field.get('type')
flen = field.get('len')
fdefault = field.get('default')
pk = field.get('pk', False)
related_name = field.get('related_name', None)
column = Column(name=":".join(map(str, filter(lambda x: x is not None, [name, ftype, flen or fdefault]))),
x=model.get('x', 0),
y=model.get('y', 0),
pk=pk,
related_name=related_name,
table=table)
print name, related_name
table.columns.append(column)
for model in d.get('models'):
ts = [t for t in new_tables if t.name == model.get('name')]
assert len(ts) == 1, "Duplicate model %s" % model.get('name')
from_table = ts[0]
for field in model.get('fields'):
if field.get('ref') and field.get('ref_field'):
cs = [c for c in from_table.columns if c.name.partition(":")[0] == field.get('name')]
assert len(cs) == 1
from_column = cs[0]
ts = [t for t in new_tables if t.name == field.get('ref')]
if len(ts) == 0:
new_table = Table(name=field.get('ref'),
x=random.randrange(int(controller.panX), int(width*controller.scaleXY + controller.panX)),
y=random.randrange(int(controller.panX), int(width*controller.scaleXY + controller.panX)),
external=True)
new_tables.append(new_table)
print "new external table:", table
ts = [new_table]
assert len(ts) == 1, repr(ts)
to_table = ts[0]
cs = [c for c in to_table.columns if c.name.partition(":")[0] == field.get('ref_field')]
if len(cs) == 0:
new_column = Column(name=field.get('ref_field'), x=0, y=0, pk=True, table=to_table)
to_table.columns.append(new_column)
cs = [new_column]
assert len(cs) == 1
to_column = cs[0]
from_column.connectors = [ForeignKey(from_column=from_column, to_column=to_column)]
elif field.get('ref'):
cs = [c for c in from_table.columns if c.name.partition(":")[0] == field.get('name')]
if len(cs) == 1:
from_column = cs[0]
ts = [t for t in new_tables if t.name == field.get('ref')]
if len(ts) == 0:
to_column = Column(name='pk')
to_table = Table(name=field.get('ref'), columns=[to_column])
to_column.table = to_table
new_tables.append(to_table)
from_column.connectors = [ForeignKey(from_column=from_column, to_column=to_column)]
elif len(ts) == 1:
to_table = ts[0]
to_column = to_table.columns[0]
from_column.connectors = [ForeignKey(from_column=from_column, to_column=to_column)]
else:
print "When connecting {0}.{1} expected one table named {2} found {3}".format(from_table.name,
from_column.name,
field.get('ref'),
ts)
else:
print "When connecting {0}.{1} expected one column named {2} found {3}".format(from_table.name,
field.get('name'),
field.get('name'),
cs)
view_d = d.get('view', {})
controller.modules = d.get('modules', [])
controller.api = d.get('api', None)
controller.generate = d.get('generate', None)
controller.panX = view_d.get('panX', 0)
controller.panY = view_d.get('panY', 0)
controller.scaleXY = view_d.get('scaleXY', 1)
controller.tables = new_tables
print "Read from {0}".format(selection)
controller.changeState(ReadyState)
except Exception:
print traceback.format_exc()
@singleton
class Save(State):
def start(self, controller):
selectOutput("Output file", "fileSelected")
@transition('ReadyState')
def fileSelected(self, controller, selection):
try:
print selection, type(selection)
if selection:
app = {}
app['app'] = os.path.splitext(os.path.basename(selection.getAbsolutePath()))[0]
controller.directory = os.path.dirname(selection.getAbsolutePath())
app['view'] = dict(panX=controller.panX, panY=controller.panY, scaleXY=controller.scaleXY)
app['modules'] = controller.modules
if controller.api:
app['api'] = controller.api
if controller.generate:
app['generate'] = controller.generate
controller.app_name = app['app']
app['models'] = [t.to_dict() for t in controller.tables if not t.external]
app['external_models'] = [t.to_dict() for t in controller.tables if t.external]
with open(selection.getAbsolutePath(), 'w') as f:
f.write(yaml.safe_dump(app, default_flow_style=False))
print "Wrote to {0}".format(selection)
controller.changeState(ReadyState)
except Exception:
print traceback.format_exc()
@singleton
class SelectedTable(State):
def start(self, controller):
if controller.selected_table:
controller.selected_table.delete_empty_columns()
controller.selected_table.add_empty_column()
def end(self, controller):
if controller.selected_table:
controller.selected_table.delete_empty_columns()
@transition('NameEdit')
@transition('ColumnEdit')
@transition('MouseSelect')
def mousePressed(self, controller):
table = controller.selected_table
if not (controller.mousePX > table.left_extent and
controller.mousePX < table.right_extent and
controller.mousePY > table.top_extent and
controller.mousePY < table.bottom_extent):
self.end(controller)
table.selected = False
controller.selected_table = None
controller.changeState(MouseSelect)
return
if (controller.mousePX > table.left_title_extent and
controller.mousePX < table.right_title_extent and
controller.mousePY > table.top_title_extent and
controller.mousePY < table.bottom_title_extent):
controller.changeState(NameEdit)
return
for column in table.columns:
if (controller.mousePX > column.left_extent and
controller.mousePX < column.right_extent and
controller.mousePY > column.top_extent and
controller.mousePY < column.bottom_extent):
controller.editing_column = column
controller.changeState(ColumnEdit)
return
@transition('MoveTable')
def mouseDragged(self, controller):
controller.changeState(MoveTable)
@transition('Ready')
def keyReleased(self, controller):
if keyCode == 8:
controller.selected_table.selected = False
controller.tables.remove(controller.selected_table)
controller.changeState(ReadyState)
@transition('ReadyState')
def keyTyped(self, controller):
if key == DELETE or key == BACKSPACE:
controller.selected_table.selected = False
controller.tables.remove(controller.selected_table)
controller.changeState(ReadyState)
@singleton
class NewTable(State):
@transition('ReadyState')
def start(self, controller):
t = Table(name="New", x=controller.mousePX, y=controller.mousePY)
controller.tables.append(t)
controller.changeState(ReadyState)
@singleton
class MoveTable(State):
def start(self, controller):
controller.diffX = controller.mousePX - controller.selected_table.x
controller.diffY = controller.mousePY - controller.selected_table.y
def mouseDragged(self, controller):
if controller.selected_table:
controller.selected_table.x = controller.mousePX - controller.diffX
controller.selected_table.y = controller.mousePY - controller.diffY
@transition('SelectedTable')
def mouseReleased(self, controller):
controller.changeState(SelectedTable)
@singleton
class NameEdit(State):
def start(self, controller):
if controller.selected_table:
controller.selected_table.edit = True
def end(self, controller):
if controller.selected_table:
controller.selected_table.edit = False
if len(controller.selected_table.columns) == 0 or controller.selected_table.columns[0].name.strip() == "":
controller.selected_table.columns.insert(0, Column(name="{0}_id:AutoField".format(snake_case(controller.selected_table.name)),
table=controller.selected_table,
x=0,
y=0,
pk=True))
@transition('MouseSelect')
def mousePressed(self, controller):
table = controller.selected_table
if not (controller.mousePX > table.left_title_extent and
controller.mousePX < table.right_title_extent and
controller.mousePY > table.top_title_extent and
controller.mousePY < table.bottom_title_extent):
controller.selected_table.edit = False
controller.selected_table = None
controller.changeState(MouseSelect)
@transition('MoveTable')
def mouseDragged(self, controller):
controller.changeState(MoveTable)
def keyReleased(self, controller):
if keyCode == 8:
controller.selected_table.name = controller.selected_table.name[:-1]
@transition('SelectedTable')
def keyTyped(self, controller):
if key == CODED:
if keyCode == 8:
controller.selected_table.name = controller.selected_table.name[:-1]
else:
if key == RETURN:
controller.changeState(SelectedTable)
elif key == ENTER:
controller.changeState(SelectedTable)
elif key == BACKSPACE:
controller.selected_table.name = controller.selected_table.name[:-1]
elif key == DELETE:
controller.selected_table.name = controller.selected_table.name[:-1]
else:
controller.selected_table.name += key
@singleton
class ColumnEdit(State):
@transition('MouseSelect')
def mousePressed(self, controller):
column = controller.editing_column
if not (controller.mousePX > column.left_extent and
controller.mousePX < column.right_extent and
controller.mousePY > column.top_extent and
controller.mousePY < column.bottom_extent):
self.end(controller)
controller.selected_table = None
controller.changeState(MouseSelect)
def start(self, controller):
controller.editing_column.edit = True
def end(self, controller):
if controller.editing_column:
controller.editing_column.edit = False
controller.editing_column = None
if controller.selected_table:
controller.selected_table.delete_empty_columns()
def keyReleased(self, controller):
if keyCode == 8:
controller.editing_column.name = controller.editing_column.name[:-1]
@transition('SelectedTable')
def keyTyped(self, controller):
if key == CODED:
if keyCode == 8:
controller.editing_column.name = controller.editing_column.name[:-1]
else:
if key == RETURN:
controller.changeState(SelectedTable)
elif key == ENTER:
controller.changeState(SelectedTable)
elif key == BACKSPACE:
controller.editing_column.name = controller.editing_column.name[:-1]
elif key == DELETE:
controller.editing_column.name = controller.editing_column.name[:-1]
else:
controller.editing_column.name += key
@transition('Connect')
def mouseDragged(self, controller):
controller.connecting_column = controller.editing_column
controller.connecting_connector = ForeignKey(from_column=controller.connecting_column,
connecting=True)
controller.connecting_column.connectors = [controller.connecting_connector]
controller.changeState(Connect)
@singleton
class Connect(State):
def end(self, controller):
if (controller.connecting_connector and
controller.connecting_connector.to_column is None):
controller.connecting_column.connectors.remove(controller.connecting_connector)
controller.connecting_connector = None
controller.connecting_column = None
@transition('ReadyState')
def mouseReleased(self, controller):
for table in controller.tables:
for column in table.columns:
if (controller.mousePX > column.left_extent and
controller.mousePX < column.right_extent and
controller.mousePY > column.top_extent and
controller.mousePY < column.bottom_extent):
controller.connecting_connector.to_column = column
if controller.connecting_connector.from_column.name.strip() == "":
controller.connecting_connector.from_column.name = "{0}:ForeignKey".format(snake_case(controller.connecting_connector.to_column.table.name))
break
controller.changeState(ReadyState)
| gpl-2.0 |
iamjakob/lumiCalc | LumiDB/test/instlumiInTime.py | 1 | 1892 | import os,sys
import coral,datetime,time
from RecoLuminosity.LumiDB import lumiQueryAPI,lumiTime,csvReporter
def main(*args):
runnum=0
try:
runnum=args[1]
report=csvReporter.csvReporter('instlumibytime-'+str(runnum)+'.csv')
msg=coral.MessageStream('')
msg.setMsgVerbosity(coral.message_Level_Error)
os.environ['CORAL_AUTH_PATH']='/afs/cern.ch/cms/lumi'
svc = coral.ConnectionService()
connectstr='oracle://cms_orcoff_prod/cms_lumi_prod'
session=svc.connect(connectstr,accessMode=coral.access_ReadOnly)
session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")
session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)")
session.transaction().start(True)
schema=session.nominalSchema()
q=schema.newQuery()
runsummaryOut=lumiQueryAPI.runsummaryByrun(q,runnum)
del q
q=schema.newQuery()
lumisummaryOut=lumiQueryAPI.lumisummaryByrun(q,runnum,'0001')
del q
session.transaction().commit()
del session
del svc
#print runsummaryOut
starttimestr=runsummaryOut[3]
t=lumiTime.lumiTime()
report.writeRow(['cmslsnum','utctime','unixtimestamp','instlumi'])
for dataperls in lumisummaryOut:
cmslsnum=dataperls[0]
instlumi=dataperls[1]
startorbit=dataperls[3]
orbittime=t.OrbitToTime(starttimestr,startorbit)
orbittimestamp=time.mktime(orbittime.timetuple())+orbittime.microsecond/1e6
report.writeRow([cmslsnum,orbittime,orbittimestamp,instlumi])
except IndexError:
print 'runnumber should be provided'
return 1
except Exception, er:
print str(er)
return 2
else:
return 0
if __name__=='__main__':
sys.exit(main(*sys.argv))
| apache-2.0 |
kYc0o/RIOT | tests/socket_zep/tests/01-run.py | 32 | 2871 | #!/usr/bin/env python3
# Copyright (C) 2016 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
import socket
from testrunner import run
IEEE802154_FRAME_LEN_MAX = 127
ZEP_DATA_HEADER_SIZE = 32
FCS_LEN = 2
RCVBUF_LEN = IEEE802154_FRAME_LEN_MAX + ZEP_DATA_HEADER_SIZE + FCS_LEN
zep_params = {
"local_addr": "::",
"local_port": 12345,
"remote_addr": "::1",
"remote_port": 17754,
}
s = None
def testfunc(child):
child.expect_exact("Socket ZEP device driver test")
child.expect(r"Initializing socket ZEP with " +
r"\(local: \[(?P<local_addr>[:0-9a-f]+)\]:(?P<local_port>\d+), " +
r"remote: \[(?P<remote_addr>[:0-9a-f]+)\]:(?P<remote_port>\d+)\)")
assert(child.match.group('local_addr') == zep_params['local_addr'])
assert(int(child.match.group('local_port')) == zep_params['local_port'])
assert(child.match.group('remote_addr') == zep_params['remote_addr'])
assert(int(child.match.group('remote_port')) == zep_params['remote_port'])
child.expect(r"\(Hwaddrs: (?P<short_addr>[0-9a-f]{4}), (?P<long_addr>[0-9a-f]{16})\)")
child.expect_exact("Send zero-length packet")
data, addr = s.recvfrom(RCVBUF_LEN)
assert(len(data) == (ZEP_DATA_HEADER_SIZE + FCS_LEN))
child.expect_exact("Send 'Hello\\0World\\0'")
data, addr = s.recvfrom(RCVBUF_LEN)
assert(len(data) == (ZEP_DATA_HEADER_SIZE + len("Hello\0World\0") + FCS_LEN))
assert(b"Hello\0World\0" == data[ZEP_DATA_HEADER_SIZE:-2])
child.expect_exact("Waiting for an incoming message (use `make test`)")
s.sendto(b"\x45\x58\x02\x01\x1a\x44\xe0\x01\xff\xdb\xde\xa6\x1a\x00\x8b" +
b"\xfd\xae\x60\xd3\x21\xf1\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
b"\x00\x22\x41\xdc\x02\x23\x00\x38\x30\x00\x0a\x50\x45\x5a\x00" +
b"\x5b\x45\x00\x0a\x50\x45\x5a\x00Hello World\x3a\xf2",
("::1", zep_params['local_port']))
child.expect(r"RSSI: \d+, LQI: \d+, Data:")
child.expect_exact(r"00000000 41 DC 02 23 00 38 30 00 0A 50 45 5A 00 5B 45 00")
child.expect_exact(r"00000010 0A 50 45 5A 00 48 65 6C 6C 6F 20 57 6F 72 6C 64")
if __name__ == "__main__":
os.environ['TERMFLAGS'] = "-z [%s]:%d,[%s]:%d" % (
zep_params['local_addr'], zep_params['local_port'],
zep_params['remote_addr'], zep_params['remote_port'])
s = socket.socket(family=socket.AF_INET6, type=socket.SOCK_DGRAM)
s.bind(("::", zep_params['remote_port']))
res = run(testfunc, timeout=1, echo=True, traceback=True)
s.close()
if (res == 0):
print("Run tests successful")
else:
print("Run tests failed")
sys.exit(res)
| lgpl-2.1 |
tmr232/Sark | plugins/function_strings.py | 1 | 1860 | from contextlib import suppress
import idaapi
import idc
import sark
def show_function_strings(function):
idaapi.msg("\n\nString References in {}:0x{:08X}\n".format(function.name, function.start_ea))
idaapi.msg("From To String\n")
for xref in function.xrefs_from:
with suppress(sark.exceptions.SarkNoString):
string = sark.get_string(xref.to)
# Trim the string for easier display
string = string[:100]
idaapi.msg("0x{:08X} 0x{:08X} {}\n".format(xref.frm, xref.to, repr(string)))
def show_current_function_strings():
try:
function = sark.Function(idc.here())
show_function_strings(function)
except sark.exceptions.SarkNoFunction:
idaapi.msg("[FunctionStrings] No function at 0x{:08X}.\n".format(idc.here()))
def show_highlighted_function_strings():
identifier = sark.get_highlighted_identifier()
if not identifier:
return
try:
function = sark.Function(name=identifier)
show_function_strings(function)
except sark.exceptions.SarkNoFunction:
idaapi.msg("[FunctionStrings] {!r} is not a function.\n".format(identifier))
class FunctionStrings(idaapi.plugin_t):
flags = 0
comment = "Show Function Strings"
help = "Show all strings references by the function."
wanted_name = "FunctionStrings"
wanted_hotkey = ""
def init(self):
self.hotkeys = []
self.hotkeys.append(idaapi.add_hotkey("Alt+9", show_current_function_strings))
self.hotkeys.append(idaapi.add_hotkey("Ctrl+Alt+9", show_highlighted_function_strings))
return idaapi.PLUGIN_KEEP
def term(self):
for hotkey in self.hotkeys:
idaapi.del_hotkey(hotkey)
def run(self, arg):
pass
def PLUGIN_ENTRY():
return FunctionStrings()
| mit |
noroutine/ansible | lib/ansible/modules/cloud/misc/proxmox_template.py | 33 | 8580 | #!/usr/bin/python
#
# Copyright: Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: proxmox_template
short_description: management of OS templates in Proxmox VE cluster
description:
- allows you to upload/delete templates in Proxmox VE cluster
version_added: "2.0"
options:
api_host:
description:
- the host of the Proxmox VE cluster
required: true
api_user:
description:
- the user to authenticate with
required: true
api_password:
description:
- the password to authenticate with
- you can use PROXMOX_PASSWORD environment variable
default: null
required: false
validate_certs:
description:
- enable / disable https certificate verification
default: false
required: false
type: bool
node:
description:
- Proxmox VE node, when you will operate with template
default: null
required: true
src:
description:
- path to uploaded file
- required only for C(state=present)
default: null
required: false
aliases: ['path']
template:
description:
- the template name
- required only for states C(absent), C(info)
default: null
required: false
content_type:
description:
- content type
- required only for C(state=present)
default: 'vztmpl'
required: false
choices: ['vztmpl', 'iso']
storage:
description:
- target storage
default: 'local'
required: false
timeout:
description:
- timeout for operations
default: 30
required: false
force:
description:
- can be used only with C(state=present), exists template will be overwritten
default: false
required: false
type: bool
state:
description:
- Indicate desired state of the template
choices: ['present', 'absent']
default: present
notes:
- Requires proxmoxer and requests modules on host. This modules can be installed with pip.
requirements: [ "proxmoxer", "requests" ]
author: "Sergei Antipov @UnderGreen"
'''
EXAMPLES = '''
# Upload new openvz template with minimal options
- proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
# Upload new openvz template with minimal options use environment PROXMOX_PASSWORD variable(you should export it before)
- proxmox_template:
node: uk-mc02
api_user: root@pam
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
# Upload new openvz template with all options and force overwrite
- proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
storage: local
content_type: vztmpl
src: ~/ubuntu-14.04-x86_64.tar.gz
force: yes
# Delete template with minimal options
- proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
template: ubuntu-14.04-x86_64.tar.gz
state: absent
'''
import os
import time
try:
from proxmoxer import ProxmoxAPI
HAS_PROXMOXER = True
except ImportError:
HAS_PROXMOXER = False
from ansible.module_utils.basic import AnsibleModule
def get_template(proxmox, node, storage, content_type, template):
return [True for tmpl in proxmox.nodes(node).storage(storage).content.get()
if tmpl['volid'] == '%s:%s/%s' % (storage, content_type, template)]
def upload_template(module, proxmox, api_host, node, storage, content_type, realpath, timeout):
taskid = proxmox.nodes(node).storage(storage).upload.post(content=content_type, filename=open(realpath))
while timeout:
task_status = proxmox.nodes(api_host.split('.')[0]).tasks(taskid).status.get()
if task_status['status'] == 'stopped' and task_status['exitstatus'] == 'OK':
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for uploading template. Last line in task before timeout: %s'
% proxmox.node(node).tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def delete_template(module, proxmox, node, storage, content_type, template, timeout):
volid = '%s:%s/%s' % (storage, content_type, template)
proxmox.nodes(node).storage(storage).content.delete(volid)
while timeout:
if not get_template(proxmox, node, storage, content_type, template):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for deleting template.')
time.sleep(1)
return False
def main():
module = AnsibleModule(
argument_spec=dict(
api_host=dict(required=True),
api_user=dict(required=True),
api_password=dict(no_log=True),
validate_certs=dict(type='bool', default='no'),
node=dict(),
src=dict(),
template=dict(),
content_type=dict(default='vztmpl', choices=['vztmpl', 'iso']),
storage=dict(default='local'),
timeout=dict(type='int', default=30),
force=dict(type='bool', default='no'),
state=dict(default='present', choices=['present', 'absent']),
)
)
if not HAS_PROXMOXER:
module.fail_json(msg='proxmoxer required for this module')
state = module.params['state']
api_user = module.params['api_user']
api_host = module.params['api_host']
api_password = module.params['api_password']
validate_certs = module.params['validate_certs']
node = module.params['node']
storage = module.params['storage']
timeout = module.params['timeout']
# If password not set get it from PROXMOX_PASSWORD env
if not api_password:
try:
api_password = os.environ['PROXMOX_PASSWORD']
except KeyError as e:
module.fail_json(msg='You should set api_password param or use PROXMOX_PASSWORD environment variable')
try:
proxmox = ProxmoxAPI(api_host, user=api_user, password=api_password, verify_ssl=validate_certs)
except Exception as e:
module.fail_json(msg='authorization on proxmox cluster failed with exception: %s' % e)
if state == 'present':
try:
content_type = module.params['content_type']
src = module.params['src']
from ansible import utils
realpath = utils.path_dwim(None, src)
template = os.path.basename(realpath)
if get_template(proxmox, node, storage, content_type, template) and not module.params['force']:
module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already exists' % (storage, content_type, template))
elif not src:
module.fail_json(msg='src param to uploading template file is mandatory')
elif not (os.path.exists(realpath) and os.path.isfile(realpath)):
module.fail_json(msg='template file on path %s not exists' % realpath)
if upload_template(module, proxmox, api_host, node, storage, content_type, realpath, timeout):
module.exit_json(changed=True, msg='template with volid=%s:%s/%s uploaded' % (storage, content_type, template))
except Exception as e:
module.fail_json(msg="uploading of template %s failed with exception: %s" % (template, e))
elif state == 'absent':
try:
content_type = module.params['content_type']
template = module.params['template']
if not template:
module.fail_json(msg='template param is mandatory')
elif not get_template(proxmox, node, storage, content_type, template):
module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already deleted' % (storage, content_type, template))
if delete_template(module, proxmox, node, storage, content_type, template, timeout):
module.exit_json(changed=True, msg='template with volid=%s:%s/%s deleted' % (storage, content_type, template))
except Exception as e:
module.fail_json(msg="deleting of template %s failed with exception: %s" % (template, e))
if __name__ == '__main__':
main()
| gpl-3.0 |
uberfastman/yahoo-fantasy-football-metrics | utils/quickstart.py | 1 | 1463 | from __future__ import print_function
import os
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
# this scope allows the reading and writing of files to Google Drive
SCOPES = "https://www.googleapis.com/auth/drive"
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
google_auth_dir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "authentication", "google")
if not os.path.exists(google_auth_dir):
os.makedirs(google_auth_dir)
token_file_path = os.path.join(google_auth_dir, "token.json")
store = file.Storage(token_file_path)
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets(os.path.join(google_auth_dir, "credentials.json"), SCOPES)
creds = tools.run_flow(flow, store)
service = build("drive", "v3", http=creds.authorize(Http()))
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get("files", [])
if not items:
print("No files found.")
else:
print("Files:")
for item in items:
print("{0} ({1})".format(item["name"], item["id"]))
if __name__ == '__main__':
main()
| gpl-3.0 |
Kamp9/scipy | scipy/optimize/linesearch.py | 61 | 24200 | """
Functions
---------
.. autosummary::
:toctree: generated/
line_search_armijo
line_search_wolfe1
line_search_wolfe2
scalar_search_wolfe1
scalar_search_wolfe2
"""
from __future__ import division, print_function, absolute_import
from warnings import warn
from scipy.optimize import minpack2
import numpy as np
from scipy._lib.six import xrange
__all__ = ['LineSearchWarning', 'line_search_wolfe1', 'line_search_wolfe2',
'scalar_search_wolfe1', 'scalar_search_wolfe2',
'line_search_armijo']
class LineSearchWarning(RuntimeWarning):
pass
#------------------------------------------------------------------------------
# Minpack's Wolfe line and scalar searches
#------------------------------------------------------------------------------
def line_search_wolfe1(f, fprime, xk, pk, gfk=None,
old_fval=None, old_old_fval=None,
args=(), c1=1e-4, c2=0.9, amax=50, amin=1e-8,
xtol=1e-14):
"""
As `scalar_search_wolfe1` but do a line search to direction `pk`
Parameters
----------
f : callable
Function `f(x)`
fprime : callable
Gradient of `f`
xk : array_like
Current point
pk : array_like
Search direction
gfk : array_like, optional
Gradient of `f` at point `xk`
old_fval : float, optional
Value of `f` at point `xk`
old_old_fval : float, optional
Value of `f` at point preceding `xk`
The rest of the parameters are the same as for `scalar_search_wolfe1`.
Returns
-------
stp, f_count, g_count, fval, old_fval
As in `line_search_wolfe1`
gval : array
Gradient of `f` at the final point
"""
if gfk is None:
gfk = fprime(xk)
if isinstance(fprime, tuple):
eps = fprime[1]
fprime = fprime[0]
newargs = (f, eps) + args
gradient = False
else:
newargs = args
gradient = True
gval = [gfk]
gc = [0]
fc = [0]
def phi(s):
fc[0] += 1
return f(xk + s*pk, *args)
def derphi(s):
gval[0] = fprime(xk + s*pk, *newargs)
if gradient:
gc[0] += 1
else:
fc[0] += len(xk) + 1
return np.dot(gval[0], pk)
derphi0 = np.dot(gfk, pk)
stp, fval, old_fval = scalar_search_wolfe1(
phi, derphi, old_fval, old_old_fval, derphi0,
c1=c1, c2=c2, amax=amax, amin=amin, xtol=xtol)
return stp, fc[0], gc[0], fval, old_fval, gval[0]
def scalar_search_wolfe1(phi, derphi, phi0=None, old_phi0=None, derphi0=None,
c1=1e-4, c2=0.9,
amax=50, amin=1e-8, xtol=1e-14):
"""
Scalar function search for alpha that satisfies strong Wolfe conditions
alpha > 0 is assumed to be a descent direction.
Parameters
----------
phi : callable phi(alpha)
Function at point `alpha`
derphi : callable dphi(alpha)
Derivative `d phi(alpha)/ds`. Returns a scalar.
phi0 : float, optional
Value of `f` at 0
old_phi0 : float, optional
Value of `f` at the previous point
derphi0 : float, optional
Value `derphi` at 0
c1, c2 : float, optional
Wolfe parameters
amax, amin : float, optional
Maximum and minimum step size
xtol : float, optional
Relative tolerance for an acceptable step.
Returns
-------
alpha : float
Step size, or None if no suitable step was found
phi : float
Value of `phi` at the new point `alpha`
phi0 : float
Value of `phi` at `alpha=0`
Notes
-----
Uses routine DCSRCH from MINPACK.
"""
if phi0 is None:
phi0 = phi(0.)
if derphi0 is None:
derphi0 = derphi(0.)
if old_phi0 is not None and derphi0 != 0:
alpha1 = min(1.0, 1.01*2*(phi0 - old_phi0)/derphi0)
if alpha1 < 0:
alpha1 = 1.0
else:
alpha1 = 1.0
phi1 = phi0
derphi1 = derphi0
isave = np.zeros((2,), np.intc)
dsave = np.zeros((13,), float)
task = b'START'
maxiter = 30
for i in xrange(maxiter):
stp, phi1, derphi1, task = minpack2.dcsrch(alpha1, phi1, derphi1,
c1, c2, xtol, task,
amin, amax, isave, dsave)
if task[:2] == b'FG':
alpha1 = stp
phi1 = phi(stp)
derphi1 = derphi(stp)
else:
break
else:
# maxiter reached, the line search did not converge
stp = None
if task[:5] == b'ERROR' or task[:4] == b'WARN':
stp = None # failed
return stp, phi1, phi0
line_search = line_search_wolfe1
#------------------------------------------------------------------------------
# Pure-Python Wolfe line and scalar searches
#------------------------------------------------------------------------------
def line_search_wolfe2(f, myfprime, xk, pk, gfk=None, old_fval=None,
old_old_fval=None, args=(), c1=1e-4, c2=0.9, amax=50):
"""Find alpha that satisfies strong Wolfe conditions.
Parameters
----------
f : callable f(x,*args)
Objective function.
myfprime : callable f'(x,*args)
Objective function gradient.
xk : ndarray
Starting point.
pk : ndarray
Search direction.
gfk : ndarray, optional
Gradient value for x=xk (xk being the current parameter
estimate). Will be recomputed if omitted.
old_fval : float, optional
Function value for x=xk. Will be recomputed if omitted.
old_old_fval : float, optional
Function value for the point preceding x=xk
args : tuple, optional
Additional arguments passed to objective function.
c1 : float, optional
Parameter for Armijo condition rule.
c2 : float, optional
Parameter for curvature condition rule.
amax : float, optional
Maximum step size
Returns
-------
alpha : float or None
Alpha for which ``x_new = x0 + alpha * pk``,
or None if the line search algorithm did not converge.
fc : int
Number of function evaluations made.
gc : int
Number of gradient evaluations made.
new_fval : float or None
New function value ``f(x_new)=f(x0+alpha*pk)``,
or None if the line search algorithm did not converge.
old_fval : float
Old function value ``f(x0)``.
new_slope : float or None
The local slope along the search direction at the
new value ``<myfprime(x_new), pk>``,
or None if the line search algorithm did not converge.
Notes
-----
Uses the line search algorithm to enforce strong Wolfe
conditions. See Wright and Nocedal, 'Numerical Optimization',
1999, pg. 59-60.
For the zoom phase it uses an algorithm by [...].
"""
fc = [0]
gc = [0]
gval = [None]
def phi(alpha):
fc[0] += 1
return f(xk + alpha * pk, *args)
if isinstance(myfprime, tuple):
def derphi(alpha):
fc[0] += len(xk) + 1
eps = myfprime[1]
fprime = myfprime[0]
newargs = (f, eps) + args
gval[0] = fprime(xk + alpha * pk, *newargs) # store for later use
return np.dot(gval[0], pk)
else:
fprime = myfprime
def derphi(alpha):
gc[0] += 1
gval[0] = fprime(xk + alpha * pk, *args) # store for later use
return np.dot(gval[0], pk)
if gfk is None:
gfk = fprime(xk, *args)
derphi0 = np.dot(gfk, pk)
alpha_star, phi_star, old_fval, derphi_star = scalar_search_wolfe2(
phi, derphi, old_fval, old_old_fval, derphi0, c1, c2, amax)
if derphi_star is None:
warn('The line search algorithm did not converge', LineSearchWarning)
else:
# derphi_star is a number (derphi) -- so use the most recently
# calculated gradient used in computing it derphi = gfk*pk
# this is the gradient at the next step no need to compute it
# again in the outer loop.
derphi_star = gval[0]
return alpha_star, fc[0], gc[0], phi_star, old_fval, derphi_star
def scalar_search_wolfe2(phi, derphi=None, phi0=None,
old_phi0=None, derphi0=None,
c1=1e-4, c2=0.9, amax=50):
"""Find alpha that satisfies strong Wolfe conditions.
alpha > 0 is assumed to be a descent direction.
Parameters
----------
phi : callable f(x)
Objective scalar function.
derphi : callable f'(x), optional
Objective function derivative (can be None)
phi0 : float, optional
Value of phi at s=0
old_phi0 : float, optional
Value of phi at previous point
derphi0 : float, optional
Value of derphi at s=0
c1 : float, optional
Parameter for Armijo condition rule.
c2 : float, optional
Parameter for curvature condition rule.
amax : float, optional
Maximum step size
Returns
-------
alpha_star : float or None
Best alpha, or None if the line search algorithm did not converge.
phi_star : float
phi at alpha_star
phi0 : float
phi at 0
derphi_star : float or None
derphi at alpha_star, or None if the line search algorithm
did not converge.
Notes
-----
Uses the line search algorithm to enforce strong Wolfe
conditions. See Wright and Nocedal, 'Numerical Optimization',
1999, pg. 59-60.
For the zoom phase it uses an algorithm by [...].
"""
if phi0 is None:
phi0 = phi(0.)
if derphi0 is None and derphi is not None:
derphi0 = derphi(0.)
alpha0 = 0
if old_phi0 is not None and derphi0 != 0:
alpha1 = min(1.0, 1.01*2*(phi0 - old_phi0)/derphi0)
else:
alpha1 = 1.0
if alpha1 < 0:
alpha1 = 1.0
if alpha1 == 0:
# This shouldn't happen. Perhaps the increment has slipped below
# machine precision? For now, set the return variables skip the
# useless while loop, and raise warnflag=2 due to possible imprecision.
alpha_star = None
phi_star = phi0
phi0 = old_phi0
derphi_star = None
phi_a1 = phi(alpha1)
#derphi_a1 = derphi(alpha1) evaluated below
phi_a0 = phi0
derphi_a0 = derphi0
i = 1
maxiter = 10
for i in xrange(maxiter):
if alpha1 == 0:
break
if (phi_a1 > phi0 + c1 * alpha1 * derphi0) or \
((phi_a1 >= phi_a0) and (i > 1)):
alpha_star, phi_star, derphi_star = \
_zoom(alpha0, alpha1, phi_a0,
phi_a1, derphi_a0, phi, derphi,
phi0, derphi0, c1, c2)
break
derphi_a1 = derphi(alpha1)
if (abs(derphi_a1) <= -c2*derphi0):
alpha_star = alpha1
phi_star = phi_a1
derphi_star = derphi_a1
break
if (derphi_a1 >= 0):
alpha_star, phi_star, derphi_star = \
_zoom(alpha1, alpha0, phi_a1,
phi_a0, derphi_a1, phi, derphi,
phi0, derphi0, c1, c2)
break
alpha2 = 2 * alpha1 # increase by factor of two on each iteration
i = i + 1
alpha0 = alpha1
alpha1 = alpha2
phi_a0 = phi_a1
phi_a1 = phi(alpha1)
derphi_a0 = derphi_a1
else:
# stopping test maxiter reached
alpha_star = alpha1
phi_star = phi_a1
derphi_star = None
warn('The line search algorithm did not converge', LineSearchWarning)
return alpha_star, phi_star, phi0, derphi_star
def _cubicmin(a, fa, fpa, b, fb, c, fc):
"""
Finds the minimizer for a cubic polynomial that goes through the
points (a,fa), (b,fb), and (c,fc) with derivative at a of fpa.
If no minimizer can be found return None
"""
# f(x) = A *(x-a)^3 + B*(x-a)^2 + C*(x-a) + D
with np.errstate(divide='raise', over='raise', invalid='raise'):
try:
C = fpa
db = b - a
dc = c - a
denom = (db * dc) ** 2 * (db - dc)
d1 = np.empty((2, 2))
d1[0, 0] = dc ** 2
d1[0, 1] = -db ** 2
d1[1, 0] = -dc ** 3
d1[1, 1] = db ** 3
[A, B] = np.dot(d1, np.asarray([fb - fa - C * db,
fc - fa - C * dc]).flatten())
A /= denom
B /= denom
radical = B * B - 3 * A * C
xmin = a + (-B + np.sqrt(radical)) / (3 * A)
except ArithmeticError:
return None
if not np.isfinite(xmin):
return None
return xmin
def _quadmin(a, fa, fpa, b, fb):
"""
Finds the minimizer for a quadratic polynomial that goes through
the points (a,fa), (b,fb) with derivative at a of fpa,
"""
# f(x) = B*(x-a)^2 + C*(x-a) + D
with np.errstate(divide='raise', over='raise', invalid='raise'):
try:
D = fa
C = fpa
db = b - a * 1.0
B = (fb - D - C * db) / (db * db)
xmin = a - C / (2.0 * B)
except ArithmeticError:
return None
if not np.isfinite(xmin):
return None
return xmin
def _zoom(a_lo, a_hi, phi_lo, phi_hi, derphi_lo,
phi, derphi, phi0, derphi0, c1, c2):
"""
Part of the optimization algorithm in `scalar_search_wolfe2`.
"""
maxiter = 10
i = 0
delta1 = 0.2 # cubic interpolant check
delta2 = 0.1 # quadratic interpolant check
phi_rec = phi0
a_rec = 0
while True:
# interpolate to find a trial step length between a_lo and
# a_hi Need to choose interpolation here. Use cubic
# interpolation and then if the result is within delta *
# dalpha or outside of the interval bounded by a_lo or a_hi
# then use quadratic interpolation, if the result is still too
# close, then use bisection
dalpha = a_hi - a_lo
if dalpha < 0:
a, b = a_hi, a_lo
else:
a, b = a_lo, a_hi
# minimizer of cubic interpolant
# (uses phi_lo, derphi_lo, phi_hi, and the most recent value of phi)
#
# if the result is too close to the end points (or out of the
# interval) then use quadratic interpolation with phi_lo,
# derphi_lo and phi_hi if the result is stil too close to the
# end points (or out of the interval) then use bisection
if (i > 0):
cchk = delta1 * dalpha
a_j = _cubicmin(a_lo, phi_lo, derphi_lo, a_hi, phi_hi,
a_rec, phi_rec)
if (i == 0) or (a_j is None) or (a_j > b - cchk) or (a_j < a + cchk):
qchk = delta2 * dalpha
a_j = _quadmin(a_lo, phi_lo, derphi_lo, a_hi, phi_hi)
if (a_j is None) or (a_j > b-qchk) or (a_j < a+qchk):
a_j = a_lo + 0.5*dalpha
# Check new value of a_j
phi_aj = phi(a_j)
if (phi_aj > phi0 + c1*a_j*derphi0) or (phi_aj >= phi_lo):
phi_rec = phi_hi
a_rec = a_hi
a_hi = a_j
phi_hi = phi_aj
else:
derphi_aj = derphi(a_j)
if abs(derphi_aj) <= -c2*derphi0:
a_star = a_j
val_star = phi_aj
valprime_star = derphi_aj
break
if derphi_aj*(a_hi - a_lo) >= 0:
phi_rec = phi_hi
a_rec = a_hi
a_hi = a_lo
phi_hi = phi_lo
else:
phi_rec = phi_lo
a_rec = a_lo
a_lo = a_j
phi_lo = phi_aj
derphi_lo = derphi_aj
i += 1
if (i > maxiter):
# Failed to find a conforming step size
a_star = None
val_star = None
valprime_star = None
break
return a_star, val_star, valprime_star
#------------------------------------------------------------------------------
# Armijo line and scalar searches
#------------------------------------------------------------------------------
def line_search_armijo(f, xk, pk, gfk, old_fval, args=(), c1=1e-4, alpha0=1):
"""Minimize over alpha, the function ``f(xk+alpha pk)``.
Parameters
----------
f : callable
Function to be minimized.
xk : array_like
Current point.
pk : array_like
Search direction.
gfk : array_like
Gradient of `f` at point `xk`.
old_fval : float
Value of `f` at point `xk`.
args : tuple, optional
Optional arguments.
c1 : float, optional
Value to control stopping criterion.
alpha0 : scalar, optional
Value of `alpha` at start of the optimization.
Returns
-------
alpha
f_count
f_val_at_alpha
Notes
-----
Uses the interpolation algorithm (Armijo backtracking) as suggested by
Wright and Nocedal in 'Numerical Optimization', 1999, pg. 56-57
"""
xk = np.atleast_1d(xk)
fc = [0]
def phi(alpha1):
fc[0] += 1
return f(xk + alpha1*pk, *args)
if old_fval is None:
phi0 = phi(0.)
else:
phi0 = old_fval # compute f(xk) -- done in past loop
derphi0 = np.dot(gfk, pk)
alpha, phi1 = scalar_search_armijo(phi, phi0, derphi0, c1=c1,
alpha0=alpha0)
return alpha, fc[0], phi1
def line_search_BFGS(f, xk, pk, gfk, old_fval, args=(), c1=1e-4, alpha0=1):
"""
Compatibility wrapper for `line_search_armijo`
"""
r = line_search_armijo(f, xk, pk, gfk, old_fval, args=args, c1=c1,
alpha0=alpha0)
return r[0], r[1], 0, r[2]
def scalar_search_armijo(phi, phi0, derphi0, c1=1e-4, alpha0=1, amin=0):
"""Minimize over alpha, the function ``phi(alpha)``.
Uses the interpolation algorithm (Armijo backtracking) as suggested by
Wright and Nocedal in 'Numerical Optimization', 1999, pg. 56-57
alpha > 0 is assumed to be a descent direction.
Returns
-------
alpha
phi1
"""
phi_a0 = phi(alpha0)
if phi_a0 <= phi0 + c1*alpha0*derphi0:
return alpha0, phi_a0
# Otherwise compute the minimizer of a quadratic interpolant:
alpha1 = -(derphi0) * alpha0**2 / 2.0 / (phi_a0 - phi0 - derphi0 * alpha0)
phi_a1 = phi(alpha1)
if (phi_a1 <= phi0 + c1*alpha1*derphi0):
return alpha1, phi_a1
# Otherwise loop with cubic interpolation until we find an alpha which
# satifies the first Wolfe condition (since we are backtracking, we will
# assume that the value of alpha is not too small and satisfies the second
# condition.
while alpha1 > amin: # we are assuming alpha>0 is a descent direction
factor = alpha0**2 * alpha1**2 * (alpha1-alpha0)
a = alpha0**2 * (phi_a1 - phi0 - derphi0*alpha1) - \
alpha1**2 * (phi_a0 - phi0 - derphi0*alpha0)
a = a / factor
b = -alpha0**3 * (phi_a1 - phi0 - derphi0*alpha1) + \
alpha1**3 * (phi_a0 - phi0 - derphi0*alpha0)
b = b / factor
alpha2 = (-b + np.sqrt(abs(b**2 - 3 * a * derphi0))) / (3.0*a)
phi_a2 = phi(alpha2)
if (phi_a2 <= phi0 + c1*alpha2*derphi0):
return alpha2, phi_a2
if (alpha1 - alpha2) > alpha1 / 2.0 or (1 - alpha2/alpha1) < 0.96:
alpha2 = alpha1 / 2.0
alpha0 = alpha1
alpha1 = alpha2
phi_a0 = phi_a1
phi_a1 = phi_a2
# Failed to find a suitable step length
return None, phi_a1
#------------------------------------------------------------------------------
# Non-monotone line search for DF-SANE
#------------------------------------------------------------------------------
def _nonmonotone_line_search_cruz(f, x_k, d, prev_fs, eta,
gamma=1e-4, tau_min=0.1, tau_max=0.5):
"""
Nonmonotone backtracking line search as described in [1]_
Parameters
----------
f : callable
Function returning a tuple ``(f, F)`` where ``f`` is the value
of a merit function and ``F`` the residual.
x_k : ndarray
Initial position
d : ndarray
Search direction
prev_fs : float
List of previous merit function values. Should have ``len(prev_fs) <= M``
where ``M`` is the nonmonotonicity window parameter.
eta : float
Allowed merit function increase, see [1]_
gamma, tau_min, tau_max : float, optional
Search parameters, see [1]_
Returns
-------
alpha : float
Step length
xp : ndarray
Next position
fp : float
Merit function value at next position
Fp : ndarray
Residual at next position
References
----------
[1] "Spectral residual method without gradient information for solving
large-scale nonlinear systems of equations." W. La Cruz,
J.M. Martinez, M. Raydan. Math. Comp. **75**, 1429 (2006).
"""
f_k = prev_fs[-1]
f_bar = max(prev_fs)
alpha_p = 1
alpha_m = 1
alpha = 1
while True:
xp = x_k + alpha_p * d
fp, Fp = f(xp)
if fp <= f_bar + eta - gamma * alpha_p**2 * f_k:
alpha = alpha_p
break
alpha_tp = alpha_p**2 * f_k / (fp + (2*alpha_p - 1)*f_k)
xp = x_k - alpha_m * d
fp, Fp = f(xp)
if fp <= f_bar + eta - gamma * alpha_m**2 * f_k:
alpha = -alpha_m
break
alpha_tm = alpha_m**2 * f_k / (fp + (2*alpha_m - 1)*f_k)
alpha_p = np.clip(alpha_tp, tau_min * alpha_p, tau_max * alpha_p)
alpha_m = np.clip(alpha_tm, tau_min * alpha_m, tau_max * alpha_m)
return alpha, xp, fp, Fp
def _nonmonotone_line_search_cheng(f, x_k, d, f_k, C, Q, eta,
gamma=1e-4, tau_min=0.1, tau_max=0.5,
nu=0.85):
"""
Nonmonotone line search from [1]
Parameters
----------
f : callable
Function returning a tuple ``(f, F)`` where ``f`` is the value
of a merit function and ``F`` the residual.
x_k : ndarray
Initial position
d : ndarray
Search direction
f_k : float
Initial merit function value
C, Q : float
Control parameters. On the first iteration, give values
Q=1.0, C=f_k
eta : float
Allowed merit function increase, see [1]_
nu, gamma, tau_min, tau_max : float, optional
Search parameters, see [1]_
Returns
-------
alpha : float
Step length
xp : ndarray
Next position
fp : float
Merit function value at next position
Fp : ndarray
Residual at next position
C : float
New value for the control parameter C
Q : float
New value for the control parameter Q
References
----------
.. [1] W. Cheng & D.-H. Li, ''A derivative-free nonmonotone line
search and its application to the spectral residual
method'', IMA J. Numer. Anal. 29, 814 (2009).
"""
alpha_p = 1
alpha_m = 1
alpha = 1
while True:
xp = x_k + alpha_p * d
fp, Fp = f(xp)
if fp <= C + eta - gamma * alpha_p**2 * f_k:
alpha = alpha_p
break
alpha_tp = alpha_p**2 * f_k / (fp + (2*alpha_p - 1)*f_k)
xp = x_k - alpha_m * d
fp, Fp = f(xp)
if fp <= C + eta - gamma * alpha_m**2 * f_k:
alpha = -alpha_m
break
alpha_tm = alpha_m**2 * f_k / (fp + (2*alpha_m - 1)*f_k)
alpha_p = np.clip(alpha_tp, tau_min * alpha_p, tau_max * alpha_p)
alpha_m = np.clip(alpha_tm, tau_min * alpha_m, tau_max * alpha_m)
# Update C and Q
Q_next = nu * Q + 1
C = (nu * Q * (C + eta) + fp) / Q_next
Q = Q_next
return alpha, xp, fp, Fp, C, Q
| bsd-3-clause |
vxgmichel/python-sequence | sequence/widget/runner/control.py | 2 | 9858 | # -*- coding: utf-8 -*-
""" Widget to control a Sequence Engine """
#-------------------------------------------------------------------------------
# Name: ControlWidget
# Purpose: Widget to control a sequence engine
#
# Author: michel.vincent
#
# Created: 21/10/2013
# Copyright: (c) michel.vincent 2013
# Licence: GPL
#-------------------------------------------------------------------------------
# Imports
import os, sys, logging
from PyQt4 import QtGui, QtCore
# Imports from sequence
from sequence.resource.pyqt import control_icons_rc
from sequence.core.engine import SequenceEngine
# MainWidget Class Definition
class ControlWidget(QtGui.QWidget):
"""
Widget to control a Sequence Engine
"""
# Class signals
path_requested = QtCore.pyqtSignal()
log_signal = QtCore.pyqtSignal([unicode, unicode])
execution_started = QtCore.pyqtSignal()
execution_finished = QtCore.pyqtSignal()
def __init__(self, *args, **kwargs):
# Init widget
super(ControlWidget, self).__init__(*args, **kwargs)
# Create load button
self.load_button = QtGui.QToolButton(self)
url = u":/control_icons/icons/go-bottom.png"
self.load_button.setIcon(QtGui.QIcon(url))
self.load_button.setIconSize(QtCore.QSize(32, 32))
self.load_button.clicked.connect(self.on_load)
self.load_button.setShortcut(QtGui.QKeySequence("F5"))
self.load_button.setToolTip("Load the sequence (F5)")
# Create run button
self.run_button = QtGui.QToolButton(self)
url = u":/control_icons/icons/go-next.png"
self.run_button.setIcon(QtGui.QIcon(url))
self.run_button.setIconSize(QtCore.QSize(32, 32))
self.run_button.clicked.connect(self.on_run)
self.run_button.setShortcut(QtGui.QKeySequence("F6"))
self.run_button.setToolTip("Run the sequence (F6)")
# Create stop button
self.stop_button = QtGui.QToolButton(self)
url = u":/control_icons/icons/process-stop.png"
self.stop_button.setIcon(QtGui.QIcon(url))
self.stop_button.setIconSize(QtCore.QSize(32, 32))
self.stop_button.clicked.connect(self.on_stop)
self.stop_button.setShortcut(QtGui.QKeySequence("F7"))
self.stop_button.setToolTip("Stop the sequence (F7)")
# Create layout
self.layout = QtGui.QHBoxLayout(self)
self.layout.addWidget(self.load_button)
self.layout.addWidget(self.run_button)
self.layout.addWidget(self.stop_button)
# Init attributes
self.layout.setMargin(0)
self.engine = SequenceEngine()
self.enabled = False
self.file_path = ""
self.path_request_enabled = False
self.disable()
#### Base methods ####
def enable_path_request(self):
""" Enable the path requested signal to load file """
self.path_request_enabled = True
def disable_path_request(self):
""" Disable the path requested signal to load file """
self.path_request_enabled = False
def set_path(self, path):
""" Set the path of the file to load """
if path:
self.file_path = unicode(path)
def disable(self):
"""
Disable the control of the sequence engine
"""
self.enabled = False
self.load_button.setEnabled(False)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(False)
def enable(self):
"""
Enable the control of the sequence engine
"""
if not self.enabled:
self.enabled = True
self.load_button.setEnabled(True)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(False)
def reset(self):
"""
Reset the sequence engine
"""
# Started engine case
if self.engine.is_started():
msg = "Engine is still running \n"
msg += "Use the STOP command (F7) \n"
msg += "Then wait for it to terminate "
QtGui.QMessageBox.warning(self, 'Engine still running',
msg, QtGui.QMessageBox.Ok)
return False
# Loaded sequence case
if self.engine.is_loaded():
msg = "A sequence has been loaded \n"
msg += "Quit anyway? "
res = QtGui.QMessageBox.question(self, 'Sequence loaded', msg,
QtGui.QMessageBox.Yes,
QtGui.QMessageBox.Cancel)
if res == QtGui.QMessageBox.Cancel:
return False
self.log('UNLOAD')
self.engine.interrupt()
# Set the buttons status
self.load_button.setEnabled(True)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(False)
return True
def load(self, file_path=None):
"""
Load a file (with the file_path argument if given)
"""
# Set path
if file_path is not None:
self.file_path = unicode(file_path)
# Test file path
if not self.file_path:
self.log(u"NO FILE TO LOAD")
return
# Test if the file exists
file_name = os.path.basename(self.file_path)
if not os.path.isfile(self.file_path):
self.log(u"FILE NOT FOUND : {}".format(file_name))
return
# Load the sequence
self.log(u'LOAD : {}'.format(file_name))
try :
self.engine.load(self.file_path)
except StandardError as error:
self.log(u'ERROR : '+ unicode(error))
return
sequence_id = self.engine.sequence.xml_sequence.sequence_id
self.log(u'SEQUENCE LOADED : {}'.format(sequence_id))
# Set the buttons status
self.load_button.setEnabled(False)
self.run_button.setEnabled(True)
self.stop_button.setEnabled(True)
def log(self, msg, end="\n"):
"""
Method to log a message with an end character
"""
self.log_signal.emit(unicode(msg), unicode(end))
#### Sequence Engine Handling ####
class EngineRunner(QtCore.QThread):
"""
Thread to handle the sequence engine while it's running
"""
def run(self):
"""
Wait for the sequence execution to terminate
"""
engine = self.parent().engine
engine.start()
engine.wait()
class EngineLogger(QtCore.QThread):
"""
Thread to print dots while the engine is running
"""
def run(self):
"""
Print dots every half of a second
"""
tick = 500
while self.parent().run_thread.isRunning():
self.parent().log(".", end="")
self.msleep(tick)
#### Signals target ####
def on_load(self):
"""
Load the sequence in the Sequence Enginguest_teste
"""
# Empty sequence or already started engine case
if not self.enabled or self.engine.is_started():
return
# Require a file path
if self.path_request_enabled:
self.path_requested.emit()
return
# Load
self.load()
def on_run(self):
"""
Execute the loaded sequence
"""
if self.engine.is_loaded():
# Set the buttons status
self.load_button.setEnabled(False)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(True)
# Start the engine handler thread
self.log("RUN")
self.log(".", end="")
self.execution_started.emit()
self.run_thread = self.EngineRunner(self, finished=self.on_finished)
self.run_thread.start()
self.log_thread = self.EngineLogger(self)
self.log_thread.start()
def on_stop(self):
"""
Send a stop signal to the engine
"""
# Sequence not loaded case
if not self.engine.is_loaded():
return
# Sequence started case : USER STOP
if self.engine.is_started():
self.log('.')
self.log('USER STOP')
self.engine.interrupt()
# Sequence not started case : UNLOAD
else:
self.log('UNLOAD')
self.engine.interrupt()
# Set the buttons status
self.load_button.setEnabled(True)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(False)
def on_finished(self):
"""
Update the main widget since the execution is over
"""
self.log_thread.wait()
self.log('.')
self.log('FINISHED')
self.execution_finished.emit()
# Set the buttons status
self.load_button.setEnabled(True)
self.run_button.setEnabled(False)
self.stop_button.setEnabled(False)
#### Close Event ####
def closeEvent(self, event):
"""
Override closeEvent to handle secial cases
"""
if not self.reset():
event.ignore()
# Main execution
if __name__ == '__main__':
# Imports to test the widget
import sequence
from sequence.core.engine import stream_sequence_logs
# Create the widget
stream_sequence_logs(sys.stdout)
app = QtGui.QApplication(sys.argv)
def print_in_console(msg, end):
print(msg)
ui = ControlWidget(log_signal=print_in_console)
path = os.path.join(
os.path.dirname(sequence.__file__), os.pardir,
"examples", "BranchTest.xml")
ui.set_path(path)
ui.enable()
ui.show()
# Run the widget
sys.exit(app.exec_())
| gpl-3.0 |
Russell-IO/ansible | lib/ansible/playbook/block.py | 15 | 15979 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleParserError
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_tasks
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
class Block(Base, Become, Conditional, Taggable):
# main block fields containing the task lists
_block = FieldAttribute(isa='list', default=[], inherit=False)
_rescue = FieldAttribute(isa='list', default=[], inherit=False)
_always = FieldAttribute(isa='list', default=[], inherit=False)
# other fields
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
# for future consideration? this would be functionally
# similar to the 'else' clause for exceptions
# _otherwise = FieldAttribute(isa='list')
def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
self._play = play
self._role = role
self._parent = None
self._dep_chain = None
self._use_handlers = use_handlers
self._implicit = implicit
# end of role flag
self._eor = False
if task_include:
self._parent = task_include
elif parent_block:
self._parent = parent_block
super(Block, self).__init__()
def __repr__(self):
return "BLOCK(uuid=%s)(id=%s)(parent=%s)" % (self._uuid, id(self), self._parent)
def __eq__(self, other):
'''object comparison based on _uuid'''
return self._uuid == other._uuid
def __ne__(self, other):
'''object comparison based on _uuid'''
return self._uuid != other._uuid
def get_vars(self):
'''
Blocks do not store variables directly, however they may be a member
of a role or task include which does, so return those if present.
'''
all_vars = self.vars.copy()
if self._parent:
all_vars.update(self._parent.get_vars())
return all_vars
@staticmethod
def load(data, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
implicit = not Block.is_block(data)
b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers, implicit=implicit)
return b.load_data(data, variable_manager=variable_manager, loader=loader)
@staticmethod
def is_block(ds):
is_block = False
if isinstance(ds, dict):
for attr in ('block', 'rescue', 'always'):
if attr in ds:
is_block = True
break
return is_block
def preprocess_data(self, ds):
'''
If a simple task is given, an implicit block for that single task
is created, which goes in the main portion of the block
'''
if not Block.is_block(ds):
if isinstance(ds, list):
return super(Block, self).preprocess_data(dict(block=ds))
else:
return super(Block, self).preprocess_data(dict(block=[ds]))
return super(Block, self).preprocess_data(ds)
def _load_block(self, attr, ds):
try:
return load_list_of_tasks(
ds,
play=self._play,
block=self,
role=self._role,
task_include=None,
variable_manager=self._variable_manager,
loader=self._loader,
use_handlers=self._use_handlers,
)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading a block", obj=self._ds, orig_exc=e)
def _load_rescue(self, attr, ds):
try:
return load_list_of_tasks(
ds,
play=self._play,
block=self,
role=self._role,
task_include=None,
variable_manager=self._variable_manager,
loader=self._loader,
use_handlers=self._use_handlers,
)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading rescue.", obj=self._ds, orig_exc=e)
def _load_always(self, attr, ds):
try:
return load_list_of_tasks(
ds,
play=self._play,
block=self,
role=self._role,
task_include=None,
variable_manager=self._variable_manager,
loader=self._loader,
use_handlers=self._use_handlers,
)
except AssertionError as e:
raise AnsibleParserError("A malformed block was encountered while loading always", obj=self._ds, orig_exc=e)
def get_dep_chain(self):
if self._dep_chain is None:
if self._parent:
return self._parent.get_dep_chain()
else:
return None
else:
return self._dep_chain[:]
def copy(self, exclude_parent=False, exclude_tasks=False):
def _dupe_task_list(task_list, new_block):
new_task_list = []
for task in task_list:
new_task = task.copy(exclude_parent=True)
if task._parent:
new_task._parent = task._parent.copy(exclude_tasks=True)
if task._parent == new_block:
# If task._parent is the same as new_block, just replace it
new_task._parent = new_block
else:
# task may not be a direct child of new_block, search for the correct place to insert new_block
cur_obj = new_task._parent
while cur_obj._parent and cur_obj._parent != new_block:
cur_obj = cur_obj._parent
cur_obj._parent = new_block
else:
new_task._parent = new_block
new_task_list.append(new_task)
return new_task_list
new_me = super(Block, self).copy()
new_me._play = self._play
new_me._use_handlers = self._use_handlers
new_me._eor = self._eor
if self._dep_chain is not None:
new_me._dep_chain = self._dep_chain[:]
new_me._parent = None
if self._parent and not exclude_parent:
new_me._parent = self._parent.copy(exclude_tasks=True)
if not exclude_tasks:
new_me.block = _dupe_task_list(self.block or [], new_me)
new_me.rescue = _dupe_task_list(self.rescue or [], new_me)
new_me.always = _dupe_task_list(self.always or [], new_me)
new_me._role = None
if self._role:
new_me._role = self._role
new_me.validate()
return new_me
def serialize(self):
'''
Override of the default serialize method, since when we're serializing
a task we don't want to include the attribute list of tasks.
'''
data = dict()
for attr in self._valid_attrs:
if attr not in ('block', 'rescue', 'always'):
data[attr] = getattr(self, attr)
data['dep_chain'] = self.get_dep_chain()
data['eor'] = self._eor
if self._role is not None:
data['role'] = self._role.serialize()
if self._parent is not None:
data['parent'] = self._parent.copy(exclude_tasks=True).serialize()
data['parent_type'] = self._parent.__class__.__name__
return data
def deserialize(self, data):
'''
Override of the default deserialize method, to match the above overridden
serialize method
'''
# import is here to avoid import loops
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.handler_task_include import HandlerTaskInclude
# we don't want the full set of attributes (the task lists), as that
# would lead to a serialize/deserialize loop
for attr in self._valid_attrs:
if attr in data and attr not in ('block', 'rescue', 'always'):
setattr(self, attr, data.get(attr))
self._dep_chain = data.get('dep_chain', None)
self._eor = data.get('eor', False)
# if there was a serialized role, unpack it too
role_data = data.get('role')
if role_data:
r = Role()
r.deserialize(role_data)
self._role = r
parent_data = data.get('parent')
if parent_data:
parent_type = data.get('parent_type')
if parent_type == 'Block':
p = Block()
elif parent_type == 'TaskInclude':
p = TaskInclude()
elif parent_type == 'HandlerTaskInclude':
p = HandlerTaskInclude()
p.deserialize(parent_data)
self._parent = p
self._dep_chain = self._parent.get_dep_chain()
def set_loader(self, loader):
self._loader = loader
if self._parent:
self._parent.set_loader(loader)
elif self._role:
self._role.set_loader(loader)
dep_chain = self.get_dep_chain()
if dep_chain:
for dep in dep_chain:
dep.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a block value.
'''
extend = self._valid_attrs[attr].extend
prepend = self._valid_attrs[attr].prepend
try:
value = self._attributes[attr]
# If parent is static, we can grab attrs from the parent
# otherwise, defer to the grandparent
if getattr(self._parent, 'statically_loaded', True):
_parent = self._parent
else:
_parent = self._parent._parent
if _parent and (value is None or extend):
try:
if getattr(_parent, 'statically_loaded', True):
if hasattr(_parent, '_get_parent_attribute'):
parent_value = _parent._get_parent_attribute(attr)
else:
parent_value = _parent._attributes.get(attr, None)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except AttributeError:
pass
if self._role and (value is None or extend):
try:
if hasattr(self._role, '_get_parent_attribute'):
parent_value = self._role.get_parent_attribute(attr)
else:
parent_value = self._role._attributes.get(attr, None)
if extend:
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
dep_chain = self.get_dep_chain()
if dep_chain and (value is None or extend):
dep_chain.reverse()
for dep in dep_chain:
if hasattr(dep, '_get_parent_attribute'):
dep_value = dep._get_parent_attribute(attr)
else:
dep_value = dep._attributes.get(attr, None)
if extend:
value = self._extend_value(value, dep_value, prepend)
else:
value = dep_value
if value is not None and not extend:
break
except AttributeError:
pass
if self._play and (value is None or extend):
try:
play_value = self._play._attributes.get(attr, None)
if play_value is not None:
if extend:
value = self._extend_value(value, play_value, prepend)
else:
value = play_value
except AttributeError:
pass
except KeyError:
pass
return value
def filter_tagged_tasks(self, play_context, all_vars):
'''
Creates a new block, with task lists filtered based on the tags contained
within the play_context object.
'''
def evaluate_and_append_task(target):
tmp_list = []
for task in target:
if isinstance(task, Block):
tmp_list.append(evaluate_block(task))
elif (task.action == 'meta' or
(task.action == 'include' and task.evaluate_tags([], play_context.skip_tags, all_vars=all_vars)) or
task.evaluate_tags(play_context.only_tags, play_context.skip_tags, all_vars=all_vars)):
tmp_list.append(task)
return tmp_list
def evaluate_block(block):
new_block = self.copy(exclude_tasks=True)
new_block.block = evaluate_and_append_task(block.block)
new_block.rescue = evaluate_and_append_task(block.rescue)
new_block.always = evaluate_and_append_task(block.always)
return new_block
return evaluate_block(self)
def has_tasks(self):
return len(self.block) > 0 or len(self.rescue) > 0 or len(self.always) > 0
def get_include_params(self):
if self._parent:
return self._parent.get_include_params()
else:
return dict()
def all_parents_static(self):
'''
Determine if all of the parents of this block were statically loaded
or not. Since Task/TaskInclude objects may be in the chain, they simply
call their parents all_parents_static() method. Only Block objects in
the chain check the statically_loaded value of the parent.
'''
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude) and not self._parent.statically_loaded:
return False
return self._parent.all_parents_static()
return True
def get_first_parent_include(self):
from ansible.playbook.task_include import TaskInclude
if self._parent:
if isinstance(self._parent, TaskInclude):
return self._parent
return self._parent.get_first_parent_include()
return None
| gpl-3.0 |
nkgilley/home-assistant | homeassistant/components/local_ip/sensor.py | 16 | 1143 | """Sensor platform for local_ip."""
from homeassistant.const import CONF_NAME
from homeassistant.helpers.entity import Entity
from homeassistant.util import get_local_ip
from .const import DOMAIN, SENSOR
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the platform from config_entry."""
name = config_entry.data.get(CONF_NAME) or DOMAIN
async_add_entities([IPSensor(name)], True)
class IPSensor(Entity):
"""A simple sensor."""
def __init__(self, name):
"""Initialize the sensor."""
self._state = None
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id of the sensor."""
return SENSOR
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Return the icon of the sensor."""
return "mdi:ip"
def update(self):
"""Fetch new state data for the sensor."""
self._state = get_local_ip()
| apache-2.0 |
Mistobaan/tensorflow | tensorflow/python/kernel_tests/batchtospace_op_test.py | 23 | 11767 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for BatchToSpace op.
Additional tests are included in spacetobatch_op_test.py, where the BatchToSpace
op is tested in tandem with its reverse SpaceToBatch op.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.platform import test
class PythonOpImpl(object):
@staticmethod
def batch_to_space(*args, **kwargs):
return array_ops.batch_to_space(*args, **kwargs)
class CppOpImpl(object):
@staticmethod
def batch_to_space(*args, **kwargs):
return gen_array_ops._batch_to_space(*args, **kwargs)
class BatchToSpaceDepthToSpace(test.TestCase, PythonOpImpl):
# Verifies that: batch_to_space(x) = transpose(depth_to_space(transpose(x)))
def testDepthToSpaceTranspose(self):
x = np.arange(20 * 5 * 8 * 7, dtype=np.float32).reshape([20, 5, 8, 7])
block_size = 2
for crops_dtype in [dtypes.int64, dtypes.int32]:
crops = array_ops.zeros((2, 2), dtype=crops_dtype)
y1 = self.batch_to_space(x, crops, block_size=block_size)
y2 = array_ops.transpose(
array_ops.depth_to_space(
array_ops.transpose(x, [3, 1, 2, 0]), block_size=block_size),
[3, 1, 2, 0])
with self.test_session():
self.assertAllEqual(y1.eval(), y2.eval())
class BatchToSpaceDepthToSpaceCpp(BatchToSpaceDepthToSpace, CppOpImpl):
pass
class BatchToSpaceErrorHandlingTest(test.TestCase, PythonOpImpl):
def testInputWrongDimMissingBatch(self):
# The input is missing the first dimension ("batch")
x_np = [[[1], [2]], [[3], [4]]]
crops = np.zeros((2, 2), dtype=np.int32)
block_size = 2
with self.assertRaises(ValueError):
_ = self.batch_to_space(x_np, crops, block_size)
def testBlockSize0(self):
# The block size is 0.
x_np = [[[[1], [2]], [[3], [4]]]]
crops = np.zeros((2, 2), dtype=np.int32)
block_size = 0
with self.assertRaises(ValueError):
out_tf = self.batch_to_space(x_np, crops, block_size)
out_tf.eval()
def testBlockSizeOne(self):
# The block size is 1. The block size needs to be > 1.
x_np = [[[[1], [2]], [[3], [4]]]]
crops = np.zeros((2, 2), dtype=np.int32)
block_size = 1
with self.assertRaises(ValueError):
out_tf = self.batch_to_space(x_np, crops, block_size)
out_tf.eval()
def testBlockSizeLarger(self):
# The block size is too large for this input.
x_np = [[[[1], [2]], [[3], [4]]]]
crops = np.zeros((2, 2), dtype=np.int32)
block_size = 10
with self.assertRaises(ValueError):
out_tf = self.batch_to_space(x_np, crops, block_size)
out_tf.eval()
def testBlockSizeSquaredNotDivisibleBatch(self):
# The block size squared does not divide the batch.
x_np = [[[[1], [2], [3]], [[3], [4], [7]]]]
crops = np.zeros((2, 2), dtype=np.int32)
block_size = 3
with self.assertRaises(ValueError):
_ = self.batch_to_space(x_np, crops, block_size)
def testUnknownShape(self):
t = self.batch_to_space(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
block_size=4)
self.assertEqual(4, t.get_shape().ndims)
class BatchToSpaceErrorHandlingCppTest(BatchToSpaceErrorHandlingTest,
CppOpImpl):
pass
class BatchToSpaceNDErrorHandlingTest(test.TestCase):
def _testStaticShape(self, input_shape, block_shape, paddings, error):
block_shape = np.array(block_shape)
paddings = np.array(paddings)
# Try with sizes known at graph construction time.
with self.assertRaises(error):
_ = array_ops.batch_to_space_nd(
np.zeros(input_shape, np.float32), block_shape, paddings)
def _testDynamicShape(self, input_shape, block_shape, paddings):
block_shape = np.array(block_shape)
paddings = np.array(paddings)
# Try with sizes unknown at graph construction time.
input_placeholder = array_ops.placeholder(dtypes.float32)
block_shape_placeholder = array_ops.placeholder(
dtypes.int32, shape=block_shape.shape)
paddings_placeholder = array_ops.placeholder(dtypes.int32)
t = array_ops.batch_to_space_nd(input_placeholder, block_shape_placeholder,
paddings_placeholder)
with self.assertRaises(ValueError):
_ = t.eval({
input_placeholder: np.zeros(input_shape, np.float32),
block_shape_placeholder: block_shape,
paddings_placeholder: paddings
})
def _testShape(self, input_shape, block_shape, paddings, error):
self._testStaticShape(input_shape, block_shape, paddings, error)
self._testDynamicShape(input_shape, block_shape, paddings)
def testInputWrongDimMissingBatch(self):
self._testShape([2, 2], [2, 2], [[0, 0], [0, 0]], ValueError)
self._testShape([2, 2, 3], [2, 2, 3], [[0, 0], [0, 0]], ValueError)
def testBlockSize0(self):
# The block size is 0.
self._testShape([1, 2, 2, 1], [0, 1], [[0, 0], [0, 0]], ValueError)
def testBlockSizeNegative(self):
self._testShape([1, 2, 2, 1], [-1, 1], [[0, 0], [0, 0]], ValueError)
def testNegativePadding(self):
self._testShape([1, 2, 2], [1, 1], [[0, -1], [0, 0]], ValueError)
def testCropTooLarge(self):
# The amount to crop exceeds the padded size.
self._testShape([1 * 2 * 2, 2, 3, 1], [2, 2], [[3, 2], [0, 0]], ValueError)
def testBlockSizeSquaredNotDivisibleBatch(self):
# The batch dimension is not divisible by the product of the block_shape.
self._testShape([3, 1, 1, 1], [2, 3], [[0, 0], [0, 0]], ValueError)
def testUnknownShape(self):
# Verify that input shape and paddings shape can be unknown.
_ = array_ops.batch_to_space_nd(
array_ops.placeholder(dtypes.float32),
array_ops.placeholder(
dtypes.int32, shape=(2,)),
array_ops.placeholder(dtypes.int32))
# Only number of input dimensions is known.
t = array_ops.batch_to_space_nd(
array_ops.placeholder(
dtypes.float32, shape=(None, None, None, None)),
array_ops.placeholder(
dtypes.int32, shape=(2,)),
array_ops.placeholder(dtypes.int32))
self.assertEqual(4, t.get_shape().ndims)
# Dimensions are partially known.
t = array_ops.batch_to_space_nd(
array_ops.placeholder(
dtypes.float32, shape=(None, None, None, 2)),
array_ops.placeholder(
dtypes.int32, shape=(2,)),
array_ops.placeholder(dtypes.int32))
self.assertEqual([None, None, None, 2], t.get_shape().as_list())
# Dimensions are partially known.
t = array_ops.batch_to_space_nd(
array_ops.placeholder(
dtypes.float32, shape=(3 * 2 * 3, None, None, 2)), [2, 3],
array_ops.placeholder(dtypes.int32))
self.assertEqual([3, None, None, 2], t.get_shape().as_list())
# Dimensions are partially known.
t = array_ops.batch_to_space_nd(
array_ops.placeholder(
dtypes.float32, shape=(3 * 2 * 3, None, 2, 2)), [2, 3],
[[1, 1], [0, 1]])
self.assertEqual([3, None, 5, 2], t.get_shape().as_list())
# Dimensions are fully known.
t = array_ops.batch_to_space_nd(
array_ops.placeholder(
dtypes.float32, shape=(3 * 2 * 3, 2, 1, 2)), [2, 3],
[[1, 1], [0, 0]])
self.assertEqual([3, 2, 3, 2], t.get_shape().as_list())
class BatchToSpaceGradientTest(test.TestCase, PythonOpImpl):
# Check the gradients.
def _checkGrad(self, x, crops, block_size):
assert 4 == x.ndim
with self.test_session():
tf_x = ops.convert_to_tensor(x)
tf_y = self.batch_to_space(tf_x, crops, block_size)
epsilon = 1e-5
((x_jacob_t, x_jacob_n)) = gradient_checker.compute_gradient(
tf_x,
x.shape,
tf_y,
tf_y.get_shape().as_list(),
x_init_value=x,
delta=epsilon)
self.assertAllClose(x_jacob_t, x_jacob_n, rtol=1e-2, atol=epsilon)
# Tests a gradient for batch_to_space of x which is a four dimensional
# tensor of shape [b * block_size * block_size, h, w, d].
def _compare(self, b, h, w, d, block_size, crop_beg, crop_end):
block_size_sq = block_size * block_size
x = np.random.normal(0, 1, b * h * w * d *
block_size_sq).astype(np.float32).reshape(
[b * block_size * block_size, h, w, d])
crops = np.array(
[[crop_beg, crop_end], [crop_beg, crop_end]], dtype=np.int32)
self._checkGrad(x, crops, block_size)
# Don't use very large numbers as dimensions here as the result is tensor
# with cartesian product of the dimensions.
def testSmall(self):
block_size = 2
crop_beg = 0
crop_end = 0
self._compare(1, 2, 3, 5, block_size, crop_beg, crop_end)
def testSmall2(self):
block_size = 2
crop_beg = 0
crop_end = 0
self._compare(2, 4, 3, 2, block_size, crop_beg, crop_end)
def testSmallCrop1x1(self):
block_size = 2
crop_beg = 1
crop_end = 1
self._compare(1, 2, 3, 5, block_size, crop_beg, crop_end)
class BatchToSpaceGradientCppTest(BatchToSpaceGradientTest, CppOpImpl):
pass
class BatchToSpaceNDGradientTest(test.TestCase):
# Check the gradients.
def _checkGrad(self, x, block_shape, crops, crops_dtype):
block_shape = np.array(block_shape)
crops = constant_op.constant(
np.array(crops).reshape((len(block_shape), 2)), crops_dtype)
with self.test_session():
tf_x = ops.convert_to_tensor(x)
tf_y = array_ops.batch_to_space_nd(tf_x, block_shape, crops)
epsilon = 1e-5
((x_jacob_t, x_jacob_n)) = gradient_checker.compute_gradient(
tf_x,
x.shape,
tf_y,
tf_y.get_shape().as_list(),
x_init_value=x,
delta=epsilon)
self.assertAllClose(x_jacob_t, x_jacob_n, rtol=1e-2, atol=epsilon)
def _compare(self, input_shape, block_shape, crops, crops_dtype):
input_shape = list(input_shape)
input_shape[0] *= np.prod(block_shape)
x = np.random.normal(
0, 1, np.prod(input_shape)).astype(np.float32).reshape(input_shape)
self._checkGrad(x, block_shape, crops, crops_dtype)
# Don't use very large numbers as dimensions here as the result is tensor
# with cartesian product of the dimensions.
def testSmall(self):
for dtype in [dtypes.int64, dtypes.int32]:
self._compare([1, 2, 3, 5], [2, 2], [[0, 0], [0, 0]], dtype)
def testSmall2(self):
for dtype in [dtypes.int64, dtypes.int32]:
self._compare([2, 4, 3, 2], [2, 2], [[0, 0], [0, 0]], dtype)
def testSmallCrop1x1(self):
for dtype in [dtypes.int64, dtypes.int32]:
self._compare([1, 2, 3, 5], [2, 2], [[1, 1], [1, 1]], dtype)
if __name__ == "__main__":
test.main()
| apache-2.0 |
Opentrons/labware | api/src/opentrons/protocols/execution/execute_python.py | 2 | 2604 | import asyncio
import inspect
import logging
import traceback
import sys
from typing import Any, Dict
from opentrons.drivers.smoothie_drivers.driver_3_0 import SmoothieAlarm
from opentrons.protocol_api.contexts import ProtocolContext
from opentrons.protocols.execution.errors import ExceptionInProtocolError
from opentrons.protocols.types import PythonProtocol, MalformedProtocolError
from opentrons.hardware_control import ExecutionCancelledError
MODULE_LOG = logging.getLogger(__name__)
def _runfunc_ok(run_func: Any):
if not callable(run_func):
raise SyntaxError("No function 'run(ctx)' defined")
sig = inspect.Signature.from_callable(run_func)
if not sig.parameters:
raise SyntaxError("Function 'run()' does not take any parameters")
if len(sig.parameters) > 1:
for name, param in list(sig.parameters.items())[1:]:
if param.default == inspect.Parameter.empty:
raise SyntaxError(
"Function 'run{}' must be called with more than one "
"argument but would be called as 'run(ctx)'"
.format(str(sig)))
def _find_protocol_error(tb, proto_name):
"""Return the FrameInfo for the lowest frame in the traceback from the
protocol.
"""
tb_info = traceback.extract_tb(tb)
for frame in reversed(tb_info):
if frame.filename == proto_name:
return frame
else:
raise KeyError
def run_python(
proto: PythonProtocol, context: ProtocolContext):
new_globs: Dict[Any, Any] = {}
exec(proto.contents, new_globs)
# If the protocol is written correctly, it will have defined a function
# like run(context: ProtocolContext). If so, that function is now in the
# current scope.
if proto.filename and proto.filename.endswith('zip'):
filename = 'protocol.ot2.py'
else:
filename = proto.filename or '<protocol>'
try:
_runfunc_ok(new_globs.get('run'))
except SyntaxError as se:
raise MalformedProtocolError(str(se))
new_globs['__context'] = context
try:
exec('run(__context)', new_globs)
except (SmoothieAlarm, asyncio.CancelledError, ExecutionCancelledError):
# this is a protocol cancel and shouldn't have special logging
raise
except Exception as e:
exc_type, exc_value, tb = sys.exc_info()
try:
frame = _find_protocol_error(tb, filename)
except KeyError:
# No pretty names, just raise it
raise e
raise ExceptionInProtocolError(e, tb, str(e), frame.lineno)
| apache-2.0 |
gameview/WareCocos2dx | cocos2dx/platform/third_party/marmalade/freetype/src/tools/docmaker/formatter.py | 515 | 4962 | # Formatter (c) 2002, 2004, 2007, 2008 David Turner <david@freetype.org>
#
from sources import *
from content import *
from utils import *
# This is the base Formatter class. Its purpose is to convert
# a content processor's data into specific documents (i.e., table of
# contents, global index, and individual API reference indices).
#
# You need to sub-class it to output anything sensible. For example,
# the file tohtml.py contains the definition of the HtmlFormatter sub-class
# used to output -- you guessed it -- HTML.
#
class Formatter:
def __init__( self, processor ):
self.processor = processor
self.identifiers = {}
self.chapters = processor.chapters
self.sections = processor.sections.values()
self.block_index = []
# store all blocks in a dictionary
self.blocks = []
for section in self.sections:
for block in section.blocks.values():
self.add_identifier( block.name, block )
# add enumeration values to the index, since this is useful
for markup in block.markups:
if markup.tag == 'values':
for field in markup.fields:
self.add_identifier( field.name, block )
self.block_index = self.identifiers.keys()
self.block_index.sort( index_sort )
def add_identifier( self, name, block ):
if self.identifiers.has_key( name ):
# duplicate name!
sys.stderr.write( \
"WARNING: duplicate definition for '" + name + "' in " + \
block.location() + ", previous definition in " + \
self.identifiers[name].location() + "\n" )
else:
self.identifiers[name] = block
#
# Formatting the table of contents
#
def toc_enter( self ):
pass
def toc_chapter_enter( self, chapter ):
pass
def toc_section_enter( self, section ):
pass
def toc_section_exit( self, section ):
pass
def toc_chapter_exit( self, chapter ):
pass
def toc_index( self, index_filename ):
pass
def toc_exit( self ):
pass
def toc_dump( self, toc_filename = None, index_filename = None ):
output = None
if toc_filename:
output = open_output( toc_filename )
self.toc_enter()
for chap in self.processor.chapters:
self.toc_chapter_enter( chap )
for section in chap.sections:
self.toc_section_enter( section )
self.toc_section_exit( section )
self.toc_chapter_exit( chap )
self.toc_index( index_filename )
self.toc_exit()
if output:
close_output( output )
#
# Formatting the index
#
def index_enter( self ):
pass
def index_name_enter( self, name ):
pass
def index_name_exit( self, name ):
pass
def index_exit( self ):
pass
def index_dump( self, index_filename = None ):
output = None
if index_filename:
output = open_output( index_filename )
self.index_enter()
for name in self.block_index:
self.index_name_enter( name )
self.index_name_exit( name )
self.index_exit()
if output:
close_output( output )
#
# Formatting a section
#
def section_enter( self, section ):
pass
def block_enter( self, block ):
pass
def markup_enter( self, markup, block = None ):
pass
def field_enter( self, field, markup = None, block = None ):
pass
def field_exit( self, field, markup = None, block = None ):
pass
def markup_exit( self, markup, block = None ):
pass
def block_exit( self, block ):
pass
def section_exit( self, section ):
pass
def section_dump( self, section, section_filename = None ):
output = None
if section_filename:
output = open_output( section_filename )
self.section_enter( section )
for name in section.block_names:
block = self.identifiers[name]
self.block_enter( block )
for markup in block.markups[1:]: # always ignore first markup!
self.markup_enter( markup, block )
for field in markup.fields:
self.field_enter( field, markup, block )
self.field_exit( field, markup, block )
self.markup_exit( markup, block )
self.block_exit( block )
self.section_exit( section )
if output:
close_output( output )
def section_dump_all( self ):
for section in self.sections:
self.section_dump( section )
# eof
| lgpl-3.0 |
jakirkham/kenjutsu | tests/test_format.py | 1 | 16058 | __author__ = "John Kirkham <kirkhamj@janelia.hhmi.org>"
__date__ = "$Dec 08, 2016 14:20:52 GMT-0500$"
import doctest
import itertools
import math
import operator
import unittest
from kenjutsu import format
try:
irange = xrange
except NameError:
irange = range
# Load doctests from `format`.
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(format))
return tests
class TestFormat(unittest.TestCase):
def setUp(self):
pass
def test_index_to_slice(self):
with self.assertRaises(TypeError) as e:
format.index_to_slice(None)
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `None`."
)
with self.assertRaises(TypeError) as e:
format.index_to_slice(2.5)
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `2.5`."
)
with self.assertRaises(TypeError) as e:
format.index_to_slice((0,))
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `(0,)`."
)
with self.assertRaises(TypeError) as e:
format.index_to_slice([0, 1])
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `[0, 1]`."
)
with self.assertRaises(TypeError) as e:
format.index_to_slice(slice(None))
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `slice(None, None, None)`."
)
with self.assertRaises(TypeError) as e:
format.index_to_slice(Ellipsis)
self.assertEqual(
str(e.exception),
"Expected an integral type. Instead got `Ellipsis`."
)
for size in [10, 11, 12]:
excess = size + 3
each_range = range(size)
for index in itertools.chain(irange(-excess, excess)):
expected_result = []
try:
expected_result = [each_range[index]]
except IndexError:
pass
rf_slice = format.index_to_slice(index)
self.assertIsInstance(rf_slice, slice)
result = list(each_range[rf_slice])
self.assertEqual(result, expected_result)
start = rf_slice.start
stop = rf_slice.stop
step = rf_slice.step
self.assertEqual(int(math.copysign(1, index)), step)
l = float(stop - start)/float(step)
self.assertEqual(
int(math.ceil(l)),
1
)
def test_reformat_slice(self):
with self.assertRaises(TypeError) as e:
format.reformat_slice(None)
self.assertEqual(
str(e.exception),
"Expected an index acceptable type."
" Instead got, `None`."
)
with self.assertRaises(ValueError) as e:
format.reformat_slice(slice(None, None, 0))
self.assertEqual(
str(e.exception),
"Slice cannot have a step size of `0`."
)
with self.assertRaises(TypeError) as e:
format.reformat_slice([None])
self.assertEqual(
str(e.exception),
"Arbitrary sequences not permitted."
" All elements must be of integral type."
)
for size in [10, 11, 12]:
excess = size + 3
each_range = range(size)
for start in itertools.chain([None], irange(-excess, excess)):
for stop in itertools.chain([None], irange(-excess, excess)):
for step in itertools.chain(irange(-excess, excess)):
step = None if step == 0 else step
a_slice = slice(start, stop, step)
rf_slice = format.reformat_slice(a_slice)
self.assertEqual(
each_range[a_slice],
each_range[rf_slice]
)
rf_slice = format.reformat_slice(a_slice, size)
self.assertEqual(
each_range[a_slice],
each_range[rf_slice]
)
new_start = rf_slice.start
new_stop = rf_slice.stop
new_step = rf_slice.step
if (new_step is not None and
new_step < 0 and
new_stop is None):
new_stop = -1
l = float(new_stop - new_start)/float(new_step)
self.assertEqual(
int(math.ceil(l)),
len(each_range[a_slice])
)
a_slice = list()
a_slice.append(0 if start is None else start)
a_slice.append(0 if stop is None else stop)
a_slice.append(0 if step is None else step)
a_op = operator.itemgetter(*a_slice)
expected_result = None
try:
expected_result = a_op(each_range)
except IndexError:
pass
if expected_result is not None:
rf_slice = format.reformat_slice(a_slice)
rf_op = operator.itemgetter(*rf_slice)
self.assertEqual(
expected_result,
rf_op(each_range)
)
rf_slice = format.reformat_slice(a_slice, size)
rf_op = operator.itemgetter(*rf_slice)
self.assertEqual(
expected_result,
rf_op(each_range)
)
else:
format.reformat_slice(a_slice)
with self.assertRaises(IndexError):
format.reformat_slice(a_slice, size)
if start is not None:
a_slice = start
expected_result = None
try:
expected_result = each_range[a_slice]
except IndexError:
pass
if expected_result is not None:
rf_slice = format.reformat_slice(a_slice)
self.assertEqual(
expected_result,
each_range[rf_slice]
)
rf_slice = format.reformat_slice(a_slice, size)
self.assertEqual(
expected_result,
each_range[rf_slice]
)
else:
format.reformat_slice(a_slice)
with self.assertRaises(IndexError):
format.reformat_slice(a_slice, size)
rf_slice = format.reformat_slice(Ellipsis)
self.assertEqual(
each_range[:],
each_range[rf_slice]
)
rf_slice = format.reformat_slice(Ellipsis, size)
self.assertEqual(
each_range[:],
each_range[rf_slice]
)
rf_slice = format.reformat_slice(tuple())
self.assertEqual(
each_range[:],
each_range[rf_slice]
)
rf_slice = format.reformat_slice(tuple(), size)
self.assertEqual(
each_range[:],
each_range[rf_slice]
)
start = rf_slice.start
stop = rf_slice.stop
step = rf_slice.step
if step is not None and step < 0 and stop is None:
stop = -1
l = float(stop - start)/float(step)
self.assertEqual(
int(math.ceil(l)),
len(each_range[:])
)
def test_reformat_slices(self):
with self.assertRaises(ValueError) as e:
format.reformat_slices(
(slice(None), slice(None)), (1,)
)
self.assertEqual(
str(e.exception),
"Shape must be as large or larger than the number of slices."
)
with self.assertRaises(ValueError) as e:
format.reformat_slices(
(slice(None), slice(None), Ellipsis), (1,)
)
self.assertEqual(
str(e.exception),
"Shape must be as large or larger than the number of slices"
" without the Ellipsis."
)
with self.assertRaises(ValueError) as e:
format.reformat_slices(
(Ellipsis, Ellipsis), (1,)
)
self.assertEqual(
str(e.exception),
"Only one Ellipsis is permitted. Found multiple."
)
with self.assertRaises(ValueError) as e:
format.reformat_slices(
([0, 1], [0, 1]),
)
self.assertEqual(
str(e.exception),
"Only one integral sequence supported. Instead got `2`."
)
rf_slice = format.reformat_slices(slice(None))
self.assertEqual(
rf_slice,
(slice(0, None, 1),)
)
rf_slice = format.reformat_slices((slice(None),))
self.assertEqual(
rf_slice,
(slice(0, None, 1),)
)
rf_slice = format.reformat_slices(Ellipsis)
self.assertEqual(
rf_slice,
(Ellipsis,)
)
rf_slice = format.reformat_slices(Ellipsis, 10)
self.assertEqual(
rf_slice,
(slice(0, 10, 1),)
)
rf_slice = format.reformat_slices(tuple())
self.assertEqual(
rf_slice,
(Ellipsis,)
)
rf_slice = format.reformat_slices(tuple(), 10)
self.assertEqual(
rf_slice,
(slice(0, 10, 1),)
)
rf_slice = format.reformat_slices(slice(None), 10)
self.assertEqual(
rf_slice,
(slice(0, 10, 1),)
)
rf_slice = format.reformat_slices(slice(None), (1, 2))
self.assertEqual(
rf_slice,
(slice(0, 1, 1), slice(0, 2, 1))
)
rf_slice = format.reformat_slices((slice(None),), 10)
self.assertEqual(
rf_slice,
(slice(0, 10, 1),)
)
rf_slice = format.reformat_slices((slice(None),), (1, 2))
self.assertEqual(
rf_slice,
(slice(0, 1, 1), slice(0, 2, 1))
)
rf_slice = format.reformat_slices((
-1,
slice(None),
slice(3, None),
slice(None, 5),
slice(None, None, 2)
))
self.assertEqual(
rf_slice,
(
-1,
slice(0, None, 1),
slice(3, None, 1),
slice(0, 5, 1),
slice(0, None, 2)
)
)
rf_slice = format.reformat_slices(
(
-1,
slice(None),
slice(3, None),
slice(None, 5),
slice(None, None, 2),
[-1, -2, -1, 1, 5]
),
(12, 10, 13, 15, 20, 10)
)
self.assertEqual(
rf_slice,
(
11,
slice(0, 10, 1),
slice(3, 13, 1),
slice(0, 5, 1),
slice(0, 20, 2),
[9, 8, 9, 1, 5]
)
)
rf_slice = format.reformat_slices(
Ellipsis,
(2, 3, 4, 5)
)
self.assertEqual(
rf_slice,
(
slice(0, 2, 1),
slice(0, 3, 1),
slice(0, 4, 1),
slice(0, 5, 1)
)
)
rf_slice = format.reformat_slices(
(
Ellipsis,
slice(0, 1)
),
(2, 3, 4, 5)
)
self.assertEqual(
rf_slice,
(
slice(0, 2, 1),
slice(0, 3, 1),
slice(0, 4, 1),
slice(0, 1, 1)
)
)
rf_slice = format.reformat_slices(
(
slice(0, 1),
Ellipsis
),
(2, 3, 4, 5)
)
self.assertEqual(
rf_slice,
(
slice(0, 1, 1),
slice(0, 3, 1),
slice(0, 4, 1),
slice(0, 5, 1)
)
)
rf_slice = format.reformat_slices(
(
slice(0, 1),
Ellipsis,
slice(0, 1)
),
(2, 3, 4, 5)
)
self.assertEqual(
rf_slice,
(
slice(0, 1, 1),
slice(0, 3, 1),
slice(0, 4, 1),
slice(0, 1, 1)
)
)
rf_slice = format.reformat_slices(
(
slice(0, 1),
Ellipsis,
slice(0, 1),
slice(0, 1),
slice(0, 1)
),
(2, 3, 4, 5)
)
self.assertEqual(
rf_slice,
(
slice(0, 1, 1),
slice(0, 1, 1),
slice(0, 1, 1),
slice(0, 1, 1)
)
)
def test_split_indices(self):
with self.assertRaises(ValueError) as e:
format.split_indices(
([0, 1], [0, 1]),
)
self.assertEqual(
str(e.exception),
"Only one integral sequence supported. Instead got `2`."
)
sp_slice = format.split_indices(
(3, Ellipsis, 0, slice(2, 5, 1), -1)
)
self.assertEqual(
sp_slice,
[
(3, Ellipsis, 0, slice(2, 5, 1), -1)
]
)
sp_slice = format.split_indices(
(3, Ellipsis, 0, slice(2, 5, 1), [-1])
)
self.assertEqual(
sp_slice,
[
(3, Ellipsis, 0, slice(2, 5, 1), slice(-1, -2, -1))
]
)
sp_slice = format.split_indices(
(3, Ellipsis, [0], slice(2, 5, 1), -1)
)
self.assertEqual(
sp_slice,
[
(3, Ellipsis, slice(0, 1, 1), slice(2, 5, 1), -1)
]
)
sp_slice = format.split_indices(
(3, Ellipsis, [0, 1, 2], slice(2, 5, 1), -1)
)
self.assertEqual(
sp_slice,
[
(3, Ellipsis, slice(0, 1, 1), slice(2, 5, 1), -1),
(3, Ellipsis, slice(1, 2, 1), slice(2, 5, 1), -1),
(3, Ellipsis, slice(2, 3, 1), slice(2, 5, 1), -1)
]
)
sp_slice = format.split_indices(
(3, Ellipsis, [2, 0, 1, 2], slice(2, 5, 1), -1)
)
self.assertEqual(
sp_slice,
[
(3, Ellipsis, slice(2, 3, 1), slice(2, 5, 1), -1),
(3, Ellipsis, slice(0, 1, 1), slice(2, 5, 1), -1),
(3, Ellipsis, slice(1, 2, 1), slice(2, 5, 1), -1),
(3, Ellipsis, slice(2, 3, 1), slice(2, 5, 1), -1)
]
)
| bsd-3-clause |
Tomtomgo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/abstractstep.py | 129 | 3437 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from webkitpy.common.system.executive import ScriptError
from webkitpy.tool.steps.options import Options
class AbstractStep(object):
def __init__(self, tool, options):
self._tool = tool
self._options = options
def _exit(self, code):
sys.exit(code)
def _changed_files(self, state):
return self.cached_lookup(state, "changed_files")
_well_known_keys = {
# FIXME: Should this use state.get('bug_id') or state.get('patch').bug_id() like UpdateChangeLogsWithReviewer does?
"bug": lambda self, state: self._tool.bugs.fetch_bug(state["bug_id"]),
# bug_title can either be a new title given by the user, or one from an existing bug.
"bug_title": lambda self, state: self.cached_lookup(state, 'bug').title(),
"changed_files": lambda self, state: self._tool.scm().changed_files(self._options.git_commit),
"diff": lambda self, state: self._tool.scm().create_patch(self._options.git_commit, changed_files=self._changed_files(state)),
# Absolute path to ChangeLog files.
"changelogs": lambda self, state: self._tool.checkout().modified_changelogs(self._options.git_commit, changed_files=self._changed_files(state)),
}
def cached_lookup(self, state, key, promise=None):
if state.get(key):
return state[key]
if not promise:
promise = self._well_known_keys.get(key)
state[key] = promise(self, state)
return state[key]
def did_modify_checkout(self, state):
state["diff"] = None
state["changelogs"] = None
state["changed_files"] = None
@classmethod
def options(cls):
return [
# We need this option here because cached_lookup uses it. :(
Options.git_commit,
]
def run(self, state):
raise NotImplementedError, "subclasses must implement"
| bsd-3-clause |
wwj718/django-rest-framework | setup.py | 32 | 2853 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import sys
from setuptools import setup
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
"""
Return all files under the root package, that are not in a
package themselves.
"""
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
version = get_version('rest_framework')
if sys.argv[-1] == 'publish':
if os.system("pip freeze | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip freeze | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version))
print(" git push --tags")
sys.exit()
setup(
name='djangorestframework',
version=version,
url='http://www.django-rest-framework.org',
license='BSD',
description='Web APIs for Django, made easy.',
author='Tom Christie',
author_email='tom@tomchristie.com', # SEE NOTE BELOW (*)
packages=get_packages('rest_framework'),
package_data=get_package_data('rest_framework'),
install_requires=[],
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
]
)
# (*) Please direct queries to the discussion group, rather than to me directly
# Doing so helps ensure your question is helpful to other users.
# Queries directly to my email are likely to receive a canned response.
#
# Many thanks for your understanding.
| bsd-2-clause |
erudit/zenon | eruditorg/apps/userspace/journal/editor/forms.py | 1 | 4017 | # -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.utils.translation import gettext as _
from resumable_uploads.forms import PlUploadFormField
from resumable_uploads.models import ResumableFile
from core.editor.models import IssueSubmission
class ContactModelChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "{fullname}".format(
fullname=obj.get_full_name() or obj.username
)
class IssueSubmissionForm(forms.ModelForm):
required_css_class = 'required'
class Meta:
model = IssueSubmission
fields = [
'year',
'volume',
'number',
'contact',
'comment',
]
field_classes = {
'contact': ContactModelChoiceField,
}
def disable_form(self):
""" Disable all the fields of this form """
fields = (
'year', 'contact', 'number',
'volume', 'comment',
'submissions',
)
for field in fields:
self.fields[field].widget.attrs['disabled'] = True
def __init__(self, *args, **kwargs):
self.journal = kwargs.pop('journal')
self.user = kwargs.pop('user')
kwargs.setdefault('label_suffix', '')
super(IssueSubmissionForm, self).__init__(*args, **kwargs)
self.populate_select(self.user)
self.instance.journal = self.journal
def populate_select(self, user):
journals_members = self.journal.members.all()
member_first = journals_members.first()
self.fields['contact'].queryset = journals_members
if member_first:
self.fields['contact'].initial = member_first.id
class IssueSubmissionUploadForm(IssueSubmissionForm):
class Meta(IssueSubmissionForm.Meta):
fields = (
'year',
'volume',
'number',
'contact',
'comment',
'submissions',
)
submissions = PlUploadFormField(
path=settings.UPLOAD_ROOT,
label=_("Fichier"),
options={
'max_file_size': '3000mb',
'multi_selection': False,
'auto_upload': True,
},
)
file_comment = forms.CharField(
label=_('Commentaires sur le fichier'),
required=False,
widget=forms.Textarea,
)
def __init__(self, *args, **kwargs):
super(IssueSubmissionUploadForm, self).__init__(*args, **kwargs)
# Update some fields
initial_files = self.instance.last_files_version.submissions.all() \
.values_list('id', flat=True)
self.fields['submissions'].initial = ','.join(map(str, initial_files))
def save(self, commit=True):
submissions = self.cleaned_data.pop('submissions', '')
instance = super(IssueSubmissionUploadForm, self).save(commit)
# Automatically submit the submission when a new upload is saved.
instance.submit()
instance.save()
# Saves the resumable files associated to the submission
if commit:
fversion = instance.last_files_version
fversion.submissions.clear()
if submissions:
file_ids = submissions.split(',')
for fid in file_ids:
try:
rfile = ResumableFile.objects.get(id=fid)
except ResumableFile.DoesNotExist:
pass
else:
fversion.submissions.add(rfile)
# Saves the comment associated with the submission
status_track = instance.last_status_track
status_track.comment = self.cleaned_data.get('file_comment')
status_track.save()
return instance
class IssueSubmissionTransitionCommentForm(forms.Form):
comment = forms.CharField(
label=_('Commentaires'),
required=False, widget=forms.Textarea)
| gpl-3.0 |
japeto/Vigtech-Services | env/lib/python2.7/site-packages/django/core/management/commands/shell.py | 492 | 3951 | import os
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Runs a Python interactive interpreter. Tries to use IPython or bpython, if one of them is available."
requires_system_checks = False
shells = ['ipython', 'bpython']
def add_arguments(self, parser):
parser.add_argument('--plain', action='store_true', dest='plain',
help='Tells Django to use plain Python, not IPython or bpython.')
parser.add_argument('--no-startup', action='store_true', dest='no_startup',
help='When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.')
parser.add_argument('-i', '--interface', choices=self.shells, dest='interface',
help='Specify an interactive interpreter interface. Available options: "ipython" and "bpython"')
def _ipython_pre_011(self):
"""Start IPython pre-0.11"""
from IPython.Shell import IPShell
shell = IPShell(argv=[])
shell.mainloop()
def _ipython_pre_100(self):
"""Start IPython pre-1.0.0"""
from IPython.frontend.terminal.ipapp import TerminalIPythonApp
app = TerminalIPythonApp.instance()
app.initialize(argv=[])
app.start()
def _ipython(self):
"""Start IPython >= 1.0"""
from IPython import start_ipython
start_ipython(argv=[])
def ipython(self):
"""Start any version of IPython"""
for ip in (self._ipython, self._ipython_pre_100, self._ipython_pre_011):
try:
ip()
except ImportError:
pass
else:
return
# no IPython, raise ImportError
raise ImportError("No IPython")
def bpython(self):
import bpython
bpython.embed()
def run_shell(self, shell=None):
available_shells = [shell] if shell else self.shells
for shell in available_shells:
try:
return getattr(self, shell)()
except ImportError:
pass
raise ImportError
def handle(self, **options):
try:
if options['plain']:
# Don't bother loading IPython, because the user wants plain Python.
raise ImportError
self.run_shell(shell=options['interface'])
except ImportError:
import code
# Set up a dictionary to serve as the environment for the shell, so
# that tab completion works on objects that are imported at runtime.
# See ticket 5082.
imported_objects = {}
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
readline.parse_and_bind("tab:complete")
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
# conventions and get $PYTHONSTARTUP first then .pythonrc.py.
if not options['no_startup']:
for pythonrc in (os.environ.get("PYTHONSTARTUP"), '~/.pythonrc.py'):
if not pythonrc:
continue
pythonrc = os.path.expanduser(pythonrc)
if not os.path.isfile(pythonrc):
continue
try:
with open(pythonrc) as handle:
exec(compile(handle.read(), pythonrc, 'exec'), imported_objects)
except NameError:
pass
code.interact(local=imported_objects)
| lgpl-3.0 |
lafayette/JBTT | framework/python/Lib/encodings/iso8859_11.py | 593 | 12591 | """ Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-11',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0e01' # 0xA1 -> THAI CHARACTER KO KAI
u'\u0e02' # 0xA2 -> THAI CHARACTER KHO KHAI
u'\u0e03' # 0xA3 -> THAI CHARACTER KHO KHUAT
u'\u0e04' # 0xA4 -> THAI CHARACTER KHO KHWAI
u'\u0e05' # 0xA5 -> THAI CHARACTER KHO KHON
u'\u0e06' # 0xA6 -> THAI CHARACTER KHO RAKHANG
u'\u0e07' # 0xA7 -> THAI CHARACTER NGO NGU
u'\u0e08' # 0xA8 -> THAI CHARACTER CHO CHAN
u'\u0e09' # 0xA9 -> THAI CHARACTER CHO CHING
u'\u0e0a' # 0xAA -> THAI CHARACTER CHO CHANG
u'\u0e0b' # 0xAB -> THAI CHARACTER SO SO
u'\u0e0c' # 0xAC -> THAI CHARACTER CHO CHOE
u'\u0e0d' # 0xAD -> THAI CHARACTER YO YING
u'\u0e0e' # 0xAE -> THAI CHARACTER DO CHADA
u'\u0e0f' # 0xAF -> THAI CHARACTER TO PATAK
u'\u0e10' # 0xB0 -> THAI CHARACTER THO THAN
u'\u0e11' # 0xB1 -> THAI CHARACTER THO NANGMONTHO
u'\u0e12' # 0xB2 -> THAI CHARACTER THO PHUTHAO
u'\u0e13' # 0xB3 -> THAI CHARACTER NO NEN
u'\u0e14' # 0xB4 -> THAI CHARACTER DO DEK
u'\u0e15' # 0xB5 -> THAI CHARACTER TO TAO
u'\u0e16' # 0xB6 -> THAI CHARACTER THO THUNG
u'\u0e17' # 0xB7 -> THAI CHARACTER THO THAHAN
u'\u0e18' # 0xB8 -> THAI CHARACTER THO THONG
u'\u0e19' # 0xB9 -> THAI CHARACTER NO NU
u'\u0e1a' # 0xBA -> THAI CHARACTER BO BAIMAI
u'\u0e1b' # 0xBB -> THAI CHARACTER PO PLA
u'\u0e1c' # 0xBC -> THAI CHARACTER PHO PHUNG
u'\u0e1d' # 0xBD -> THAI CHARACTER FO FA
u'\u0e1e' # 0xBE -> THAI CHARACTER PHO PHAN
u'\u0e1f' # 0xBF -> THAI CHARACTER FO FAN
u'\u0e20' # 0xC0 -> THAI CHARACTER PHO SAMPHAO
u'\u0e21' # 0xC1 -> THAI CHARACTER MO MA
u'\u0e22' # 0xC2 -> THAI CHARACTER YO YAK
u'\u0e23' # 0xC3 -> THAI CHARACTER RO RUA
u'\u0e24' # 0xC4 -> THAI CHARACTER RU
u'\u0e25' # 0xC5 -> THAI CHARACTER LO LING
u'\u0e26' # 0xC6 -> THAI CHARACTER LU
u'\u0e27' # 0xC7 -> THAI CHARACTER WO WAEN
u'\u0e28' # 0xC8 -> THAI CHARACTER SO SALA
u'\u0e29' # 0xC9 -> THAI CHARACTER SO RUSI
u'\u0e2a' # 0xCA -> THAI CHARACTER SO SUA
u'\u0e2b' # 0xCB -> THAI CHARACTER HO HIP
u'\u0e2c' # 0xCC -> THAI CHARACTER LO CHULA
u'\u0e2d' # 0xCD -> THAI CHARACTER O ANG
u'\u0e2e' # 0xCE -> THAI CHARACTER HO NOKHUK
u'\u0e2f' # 0xCF -> THAI CHARACTER PAIYANNOI
u'\u0e30' # 0xD0 -> THAI CHARACTER SARA A
u'\u0e31' # 0xD1 -> THAI CHARACTER MAI HAN-AKAT
u'\u0e32' # 0xD2 -> THAI CHARACTER SARA AA
u'\u0e33' # 0xD3 -> THAI CHARACTER SARA AM
u'\u0e34' # 0xD4 -> THAI CHARACTER SARA I
u'\u0e35' # 0xD5 -> THAI CHARACTER SARA II
u'\u0e36' # 0xD6 -> THAI CHARACTER SARA UE
u'\u0e37' # 0xD7 -> THAI CHARACTER SARA UEE
u'\u0e38' # 0xD8 -> THAI CHARACTER SARA U
u'\u0e39' # 0xD9 -> THAI CHARACTER SARA UU
u'\u0e3a' # 0xDA -> THAI CHARACTER PHINTHU
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0e3f' # 0xDF -> THAI CURRENCY SYMBOL BAHT
u'\u0e40' # 0xE0 -> THAI CHARACTER SARA E
u'\u0e41' # 0xE1 -> THAI CHARACTER SARA AE
u'\u0e42' # 0xE2 -> THAI CHARACTER SARA O
u'\u0e43' # 0xE3 -> THAI CHARACTER SARA AI MAIMUAN
u'\u0e44' # 0xE4 -> THAI CHARACTER SARA AI MAIMALAI
u'\u0e45' # 0xE5 -> THAI CHARACTER LAKKHANGYAO
u'\u0e46' # 0xE6 -> THAI CHARACTER MAIYAMOK
u'\u0e47' # 0xE7 -> THAI CHARACTER MAITAIKHU
u'\u0e48' # 0xE8 -> THAI CHARACTER MAI EK
u'\u0e49' # 0xE9 -> THAI CHARACTER MAI THO
u'\u0e4a' # 0xEA -> THAI CHARACTER MAI TRI
u'\u0e4b' # 0xEB -> THAI CHARACTER MAI CHATTAWA
u'\u0e4c' # 0xEC -> THAI CHARACTER THANTHAKHAT
u'\u0e4d' # 0xED -> THAI CHARACTER NIKHAHIT
u'\u0e4e' # 0xEE -> THAI CHARACTER YAMAKKAN
u'\u0e4f' # 0xEF -> THAI CHARACTER FONGMAN
u'\u0e50' # 0xF0 -> THAI DIGIT ZERO
u'\u0e51' # 0xF1 -> THAI DIGIT ONE
u'\u0e52' # 0xF2 -> THAI DIGIT TWO
u'\u0e53' # 0xF3 -> THAI DIGIT THREE
u'\u0e54' # 0xF4 -> THAI DIGIT FOUR
u'\u0e55' # 0xF5 -> THAI DIGIT FIVE
u'\u0e56' # 0xF6 -> THAI DIGIT SIX
u'\u0e57' # 0xF7 -> THAI DIGIT SEVEN
u'\u0e58' # 0xF8 -> THAI DIGIT EIGHT
u'\u0e59' # 0xF9 -> THAI DIGIT NINE
u'\u0e5a' # 0xFA -> THAI CHARACTER ANGKHANKHU
u'\u0e5b' # 0xFB -> THAI CHARACTER KHOMUT
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
PetrDlouhy/django-import-export | tests/core/migrations/0005_addparentchild.py | 4 | 1084 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-19 17:11
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0004_bookwithchapters'),
]
operations = [
migrations.CreateModel(
name='Child',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Parent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='child',
name='parent',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Parent'),
),
]
| bsd-2-clause |
timopulkkinen/BubbleFish | third_party/bintrees/bintrees/treeslice.py | 156 | 1950 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman -- <mozman@gmx.at>
# Purpose: TreeSlice
# Created: 11.04.2011
# Copyright (c) 2010-2013 by Manfred Moitzi
# License: MIT License
class TreeSlice(object):
__slots__ = ['_tree', '_start', '_stop']
def __init__(self, tree, start, stop):
self._tree = tree
self._start = start
self._stop = stop
def __repr__(self):
tpl = "%s({%s})" % (self._tree.__class__.__name__, '%s')
return tpl % ", ".join( ("%r: %r" % item for item in self.items()) )
def __contains__(self, key):
if self._inrange(key):
return key in self._tree
else:
return False
def _inrange(self, key):
if self._start is not None and key < self._start:
return False
if self._stop is not None and key >= self._stop:
return False
return True
def __getitem__(self, key):
if isinstance(key, slice):
return self._subslice(key.start, key.stop)
if self._inrange(key):
return self._tree[key]
else:
raise KeyError(key)
def _subslice(self, start, stop):
def newstart():
if start is None:
return self._start
elif self._start is None:
return start
else:
return max(start, self._start)
def newstop():
if stop is None:
return self._stop
elif self._stop is None:
return stop
else:
return min(stop, self._stop)
return TreeSlice(self._tree, newstart(), newstop())
def keys(self):
return self._tree.keyslice(self._start, self._stop)
__iter__ = keys
def values(self):
return self._tree.valueslice(self._start, self._stop)
def items(self):
return self._tree.itemslice(self._start, self._stop)
| bsd-3-clause |
ikool/metact06-djan | lib/Crypto/SelfTest/Util/test_asn1.py | 113 | 10239 | # -*- coding: utf-8 -*-
#
# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-tests for Crypto.Util.asn1"""
__revision__ = "$Id$"
import unittest
import sys
from Crypto.Util.py3compat import *
from Crypto.Util.asn1 import DerSequence, DerObject
class DerObjectTests(unittest.TestCase):
def testObjEncode1(self):
# No payload
der = DerObject(b('\x33'))
self.assertEquals(der.encode(), b('\x33\x00'))
# Small payload
der.payload = b('\x45')
self.assertEquals(der.encode(), b('\x33\x01\x45'))
# Invariant
self.assertEquals(der.encode(), b('\x33\x01\x45'))
# Initialize with numerical tag
der = DerObject(b(0x33))
der.payload = b('\x45')
self.assertEquals(der.encode(), b('\x33\x01\x45'))
def testObjEncode2(self):
# Known types
der = DerObject('SEQUENCE')
self.assertEquals(der.encode(), b('\x30\x00'))
der = DerObject('BIT STRING')
self.assertEquals(der.encode(), b('\x03\x00'))
def testObjEncode3(self):
# Long payload
der = DerObject(b('\x34'))
der.payload = b("0")*128
self.assertEquals(der.encode(), b('\x34\x81\x80' + "0"*128))
def testObjDecode1(self):
# Decode short payload
der = DerObject()
der.decode(b('\x20\x02\x01\x02'))
self.assertEquals(der.payload, b("\x01\x02"))
self.assertEquals(der.typeTag, 0x20)
def testObjDecode2(self):
# Decode short payload
der = DerObject()
der.decode(b('\x22\x81\x80' + "1"*128))
self.assertEquals(der.payload, b("1")*128)
self.assertEquals(der.typeTag, 0x22)
class DerSequenceTests(unittest.TestCase):
def testEncode1(self):
# Empty sequence
der = DerSequence()
self.assertEquals(der.encode(), b('0\x00'))
self.failIf(der.hasOnlyInts())
# One single-byte integer (zero)
der.append(0)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x00'))
self.failUnless(der.hasOnlyInts())
# Invariant
self.assertEquals(der.encode(), b('0\x03\x02\x01\x00'))
def testEncode2(self):
# One single-byte integer (non-zero)
der = DerSequence()
der.append(127)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x7f'))
# Indexing
der[0] = 1
self.assertEquals(len(der),1)
self.assertEquals(der[0],1)
self.assertEquals(der[-1],1)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x01'))
#
der[:] = [1]
self.assertEquals(len(der),1)
self.assertEquals(der[0],1)
self.assertEquals(der.encode(), b('0\x03\x02\x01\x01'))
def testEncode3(self):
# One multi-byte integer (non-zero)
der = DerSequence()
der.append(0x180L)
self.assertEquals(der.encode(), b('0\x04\x02\x02\x01\x80'))
def testEncode4(self):
# One very long integer
der = DerSequence()
der.append(2**2048)
self.assertEquals(der.encode(), b('0\x82\x01\x05')+
b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00'))
def testEncode5(self):
# One single-byte integer (looks negative)
der = DerSequence()
der.append(0xFFL)
self.assertEquals(der.encode(), b('0\x04\x02\x02\x00\xff'))
def testEncode6(self):
# Two integers
der = DerSequence()
der.append(0x180L)
der.append(0xFFL)
self.assertEquals(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff'))
self.failUnless(der.hasOnlyInts())
#
der.append(0x01)
der[1:] = [9,8]
self.assertEquals(len(der),3)
self.assertEqual(der[1:],[9,8])
self.assertEqual(der[1:-1],[9])
self.assertEquals(der.encode(), b('0\x0A\x02\x02\x01\x80\x02\x01\x09\x02\x01\x08'))
def testEncode6(self):
# One integer and another type (no matter what it is)
der = DerSequence()
der.append(0x180L)
der.append(b('\x00\x02\x00\x00'))
self.assertEquals(der.encode(), b('0\x08\x02\x02\x01\x80\x00\x02\x00\x00'))
self.failIf(der.hasOnlyInts())
####
def testDecode1(self):
# Empty sequence
der = DerSequence()
der.decode(b('0\x00'))
self.assertEquals(len(der),0)
# One single-byte integer (zero)
der.decode(b('0\x03\x02\x01\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0)
# Invariant
der.decode(b('0\x03\x02\x01\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0)
def testDecode2(self):
# One single-byte integer (non-zero)
der = DerSequence()
der.decode(b('0\x03\x02\x01\x7f'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],127)
def testDecode3(self):
# One multi-byte integer (non-zero)
der = DerSequence()
der.decode(b('0\x04\x02\x02\x01\x80'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0x180L)
def testDecode4(self):
# One very long integer
der = DerSequence()
der.decode(b('0\x82\x01\x05')+
b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+
b('\x00\x00\x00\x00\x00\x00\x00\x00\x00'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],2**2048)
def testDecode5(self):
# One single-byte integer (looks negative)
der = DerSequence()
der.decode(b('0\x04\x02\x02\x00\xff'))
self.assertEquals(len(der),1)
self.assertEquals(der[0],0xFFL)
def testDecode6(self):
# Two integers
der = DerSequence()
der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff'))
self.assertEquals(len(der),2)
self.assertEquals(der[0],0x180L)
self.assertEquals(der[1],0xFFL)
def testDecode7(self):
# One integer and 2 other types
der = DerSequence()
der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00'))
self.assertEquals(len(der),3)
self.assertEquals(der[0],0x180L)
self.assertEquals(der[1],b('\x24\x02\xb6\x63'))
self.assertEquals(der[2],b('\x12\x00'))
def testDecode8(self):
# Only 2 other types
der = DerSequence()
der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00'))
self.assertEquals(len(der),2)
self.assertEquals(der[0],b('\x24\x02\xb6\x63'))
self.assertEquals(der[1],b('\x12\x00'))
def testErrDecode1(self):
# Not a sequence
der = DerSequence()
self.assertRaises(ValueError, der.decode, b(''))
self.assertRaises(ValueError, der.decode, b('\x00'))
self.assertRaises(ValueError, der.decode, b('\x30'))
def testErrDecode2(self):
# Wrong payload type
der = DerSequence()
self.assertRaises(ValueError, der.decode, b('\x30\x00\x00'), True)
def testErrDecode3(self):
# Wrong length format
der = DerSequence()
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00'))
self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01'))
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01'))
def testErrDecode4(self):
# Wrong integer format
der = DerSequence()
# Multi-byte encoding for zero
#self.assertRaises(ValueError, der.decode, '\x30\x04\x02\x02\x00\x00')
# Negative integer
self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\xFF'))
def get_tests(config={}):
from Crypto.SelfTest.st_common import list_test_cases
listTests = []
listTests += list_test_cases(DerObjectTests)
listTests += list_test_cases(DerSequenceTests)
return listTests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| bsd-3-clause |
orbitfp7/nova | nova/objects/instance_numa_topology.py | 4 | 7761 | # Copyright 2014 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova import db
from nova import exception
from nova.objects import base
from nova.objects import fields as obj_fields
from nova.virt import hardware
# TODO(berrange): Remove NovaObjectDictCompat
class InstanceNUMACell(base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Add pagesize field
# Version 1.2: Add cpu_pinning_raw and topology fields
VERSION = '1.2'
fields = {
'id': obj_fields.IntegerField(),
'cpuset': obj_fields.SetOfIntegersField(),
'memory': obj_fields.IntegerField(),
'pagesize': obj_fields.IntegerField(nullable=True),
'cpu_topology': obj_fields.ObjectField('VirtCPUTopology',
nullable=True),
'cpu_pinning_raw': obj_fields.DictOfIntegersField(nullable=True)
}
obj_relationships = {
'cpu_topology': [('1.2', '1.0')]
}
cpu_pinning = obj_fields.DictProxyField('cpu_pinning_raw')
def __init__(self, **kwargs):
super(InstanceNUMACell, self).__init__(**kwargs)
if 'pagesize' not in kwargs:
self.pagesize = None
self.obj_reset_changes(['pagesize'])
if 'cpu_topology' not in kwargs:
self.cpu_topology = None
self.obj_reset_changes(['cpu_topology'])
if 'cpu_pinning' not in kwargs:
self.cpu_pinning = None
self.obj_reset_changes(['cpu_pinning_raw'])
def __len__(self):
return len(self.cpuset)
def _to_dict(self):
# NOTE(sahid): Used as legacy, could be renamed in
# _legacy_to_dict_ to the future to avoid confusing.
return {'cpus': hardware.format_cpu_spec(self.cpuset,
allow_ranges=False),
'mem': {'total': self.memory},
'id': self.id,
'pagesize': self.pagesize}
@classmethod
def _from_dict(cls, data_dict):
# NOTE(sahid): Used as legacy, could be renamed in
# _legacy_from_dict_ to the future to avoid confusing.
cpuset = hardware.parse_cpu_spec(data_dict.get('cpus', ''))
memory = data_dict.get('mem', {}).get('total', 0)
cell_id = data_dict.get('id')
pagesize = data_dict.get('pagesize')
return cls(id=cell_id, cpuset=cpuset,
memory=memory, pagesize=pagesize)
@property
def siblings(self):
cpu_list = sorted(list(self.cpuset))
threads = 0
if self.cpu_topology:
threads = self.cpu_topology.threads
if threads == 1:
threads = 0
return map(set, zip(*[iter(cpu_list)] * threads))
@property
def cpu_pinning_requested(self):
return self.cpu_pinning is not None
def pin(self, vcpu, pcpu):
if vcpu not in self.cpuset:
return
pinning_dict = self.cpu_pinning or {}
pinning_dict[vcpu] = pcpu
self.cpu_pinning = pinning_dict
def pin_vcpus(self, *cpu_pairs):
for vcpu, pcpu in cpu_pairs:
self.pin(vcpu, pcpu)
# TODO(berrange): Remove NovaObjectDictCompat
class InstanceNUMATopology(base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Takes into account pagesize
VERSION = '1.1'
fields = {
# NOTE(danms): The 'id' field is no longer used and should be
# removed in the future when convenient
'id': obj_fields.IntegerField(),
'instance_uuid': obj_fields.UUIDField(),
'cells': obj_fields.ListOfObjectsField('InstanceNUMACell'),
}
obj_relationships = {
'cells': [('1.0', '1.0')],
}
@classmethod
def obj_from_primitive(cls, primitive):
if 'nova_object.name' in primitive:
obj_topology = super(InstanceNUMATopology, cls).obj_from_primitive(
primitive)
else:
# NOTE(sahid): This compatibility code needs to stay until we can
# guarantee that there are no cases of the old format stored in
# the database (or forever, if we can never guarantee that).
obj_topology = InstanceNUMATopology._from_dict(primitive)
obj_topology.id = 0
return obj_topology
@classmethod
def obj_from_db_obj(cls, instance_uuid, db_obj):
primitive = jsonutils.loads(db_obj)
obj_topology = cls.obj_from_primitive(primitive)
if 'nova_object.name' not in db_obj:
obj_topology.instance_uuid = instance_uuid
# No benefit to store a list of changed fields
obj_topology.obj_reset_changes()
return obj_topology
# TODO(ndipanov) Remove this method on the major version bump to 2.0
@base.remotable
def create(self, context):
self._save(context)
# NOTE(ndipanov): We can't rename create and want to avoid version bump
# as this needs to be backported to stable so this is not a @remotable
# That's OK since we only call it from inside Instance.save() which is.
def _save(self, context):
values = {'numa_topology': self._to_json()}
db.instance_extra_update_by_uuid(context, self.instance_uuid,
values)
self.obj_reset_changes()
# NOTE(ndipanov): We want to avoid version bump
# as this needs to be backported to stable so this is not a @remotable
# That's OK since we only call it from inside Instance.save() which is.
@classmethod
def delete_by_instance_uuid(cls, context, instance_uuid):
values = {'numa_topology': None}
db.instance_extra_update_by_uuid(context, instance_uuid,
values)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_extra = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['numa_topology'])
if not db_extra:
raise exception.NumaTopologyNotFound(instance_uuid=instance_uuid)
if db_extra['numa_topology'] is None:
return None
return cls.obj_from_db_obj(instance_uuid, db_extra['numa_topology'])
def _to_json(self):
return jsonutils.dumps(self.obj_to_primitive())
def __len__(self):
"""Defined so that boolean testing works the same as for lists."""
return len(self.cells)
def _to_dict(self):
# NOTE(sahid): Used as legacy, could be renamed in _legacy_to_dict_
# in the future to avoid confusing.
return {'cells': [cell._to_dict() for cell in self.cells]}
@classmethod
def _from_dict(cls, data_dict):
# NOTE(sahid): Used as legacy, could be renamed in _legacy_from_dict_
# in the future to avoid confusing.
return cls(cells=[
InstanceNUMACell._from_dict(cell_dict)
for cell_dict in data_dict.get('cells', [])])
@property
def cpu_pinning_requested(self):
return all(cell.cpu_pinning_requested for cell in self.cells)
| apache-2.0 |
isnnn/Sick-Beard-TPB | lib/html5lib/tests/test_tokenizer.py | 72 | 6826 | import sys
import os
import unittest
import cStringIO
import warnings
import re
try:
import json
except ImportError:
import simplejson as json
from support import html5lib_test_files
from html5lib.tokenizer import HTMLTokenizer
from html5lib import constants
class TokenizerTestParser(object):
def __init__(self, initialState, lastStartTag=None):
self.tokenizer = HTMLTokenizer
self._state = initialState
self._lastStartTag = lastStartTag
def parse(self, stream, encoding=None, innerHTML=False):
tokenizer = self.tokenizer(stream, encoding)
self.outputTokens = []
tokenizer.state = getattr(tokenizer, self._state)
if self._lastStartTag is not None:
tokenizer.currentToken = {"type": "startTag",
"name":self._lastStartTag}
types = dict((v,k) for k,v in constants.tokenTypes.iteritems())
for token in tokenizer:
getattr(self, 'process%s' % types[token["type"]])(token)
return self.outputTokens
def processDoctype(self, token):
self.outputTokens.append([u"DOCTYPE", token["name"], token["publicId"],
token["systemId"], token["correct"]])
def processStartTag(self, token):
self.outputTokens.append([u"StartTag", token["name"],
dict(token["data"][::-1]), token["selfClosing"]])
def processEmptyTag(self, token):
if token["name"] not in constants.voidElements:
self.outputTokens.append(u"ParseError")
self.outputTokens.append([u"StartTag", token["name"], dict(token["data"][::-1])])
def processEndTag(self, token):
self.outputTokens.append([u"EndTag", token["name"],
token["selfClosing"]])
def processComment(self, token):
self.outputTokens.append([u"Comment", token["data"]])
def processSpaceCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
self.processSpaceCharacters = self.processCharacters
def processCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
def processEOF(self, token):
pass
def processParseError(self, token):
self.outputTokens.append([u"ParseError", token["data"]])
def concatenateCharacterTokens(tokens):
outputTokens = []
for token in tokens:
if not "ParseError" in token and token[0] == "Character":
if (outputTokens and not "ParseError" in outputTokens[-1] and
outputTokens[-1][0] == "Character"):
outputTokens[-1][1] += token[1]
else:
outputTokens.append(token)
else:
outputTokens.append(token)
return outputTokens
def normalizeTokens(tokens):
# TODO: convert tests to reflect arrays
for i, token in enumerate(tokens):
if token[0] == u'ParseError':
tokens[i] = token[0]
return tokens
def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
ignoreErrors=False):
"""Test whether the test has passed or failed
If the ignoreErrorOrder flag is set to true we don't test the relative
positions of parse errors and non parse errors
"""
checkSelfClosing= False
for token in expectedTokens:
if (token[0] == "StartTag" and len(token) == 4
or token[0] == "EndTag" and len(token) == 3):
checkSelfClosing = True
break
if not checkSelfClosing:
for token in receivedTokens:
if token[0] == "StartTag" or token[0] == "EndTag":
token.pop()
if not ignoreErrorOrder and not ignoreErrors:
return expectedTokens == receivedTokens
else:
#Sort the tokens into two groups; non-parse errors and parse errors
tokens = {"expected":[[],[]], "received":[[],[]]}
for tokenType, tokenList in zip(tokens.keys(),
(expectedTokens, receivedTokens)):
for token in tokenList:
if token != "ParseError":
tokens[tokenType][0].append(token)
else:
if not ignoreErrors:
tokens[tokenType][1].append(token)
return tokens["expected"] == tokens["received"]
def unescape_test(test):
def decode(inp):
return inp.decode("unicode-escape")
test["input"] = decode(test["input"])
for token in test["output"]:
if token == "ParseError":
continue
else:
token[1] = decode(token[1])
if len(token) > 2:
for key, value in token[2]:
del token[2][key]
token[2][decode(key)] = decode(value)
return test
def runTokenizerTest(test):
#XXX - move this out into the setup function
#concatenate all consecutive character tokens into a single token
if 'doubleEscaped' in test:
test = unescape_test(test)
expected = concatenateCharacterTokens(test['output'])
if 'lastStartTag' not in test:
test['lastStartTag'] = None
outBuffer = cStringIO.StringIO()
stdout = sys.stdout
sys.stdout = outBuffer
parser = TokenizerTestParser(test['initialState'],
test['lastStartTag'])
tokens = parser.parse(test['input'])
tokens = concatenateCharacterTokens(tokens)
received = normalizeTokens(tokens)
errorMsg = u"\n".join(["\n\nInitial state:",
test['initialState'] ,
"\nInput:", unicode(test['input']),
"\nExpected:", unicode(expected),
"\nreceived:", unicode(tokens)])
errorMsg = errorMsg.encode("utf-8")
ignoreErrorOrder = test.get('ignoreErrorOrder', False)
assert tokensMatch(expected, received, ignoreErrorOrder), errorMsg
def _doCapitalize(match):
return match.group(1).upper()
_capitalizeRe = re.compile(r"\W+(\w)").sub
def capitalize(s):
s = s.lower()
s = _capitalizeRe(_doCapitalize, s)
return s
def test_tokenizer():
for filename in html5lib_test_files('tokenizer', '*.test'):
tests = json.load(file(filename))
testName = os.path.basename(filename).replace(".test","")
if 'tests' in tests:
for index,test in enumerate(tests['tests']):
#Skip tests with a self closing flag
skip = False
if 'initialStates' not in test:
test["initialStates"] = ["Data state"]
for initialState in test["initialStates"]:
test["initialState"] = capitalize(initialState)
yield runTokenizerTest, test
| gpl-3.0 |
pepetreshere/odoo | addons/website_event/tests/common.py | 2 | 4308 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime, timedelta, time
from unittest.mock import patch
from odoo.addons.event.tests.common import TestEventCommon
from odoo.addons.mail.tests.common import mail_new_test_user
from odoo.fields import Datetime as FieldsDatetime, Date as FieldsDate
from odoo.tests.common import SavepointCase
class EventDtPatcher(SavepointCase):
@classmethod
def setUpClass(cls):
super(EventDtPatcher, cls).setUpClass()
cls.reference_now = datetime(2020, 7, 6, 10, 0, 0)
cls.reference_today = datetime(2020, 7, 6)
cls.event_dt = patch(
'odoo.addons.event.models.event_event.fields.Datetime',
wraps=FieldsDatetime
)
cls.wevent_dt = patch(
'odoo.addons.website_event.models.event_event.fields.Datetime',
wraps=FieldsDatetime
)
cls.wevent_main_dt = patch(
'odoo.addons.website_event.controllers.main.fields.Datetime',
wraps=FieldsDatetime
)
cls.event_date = patch(
'odoo.addons.event.models.event_event.fields.Date',
wraps=FieldsDate
)
cls.wevent_main_date = patch(
'odoo.addons.website_event.controllers.main.fields.Date',
wraps=FieldsDate
)
cls.mock_event_dt = cls.event_dt.start()
cls.mock_wevent_dt = cls.wevent_dt.start()
cls.mock_wevent_main_dt = cls.wevent_main_dt.start()
cls.mock_event_date = cls.event_date.start()
cls.mock_wevent_main_date = cls.wevent_main_date.start()
cls.mock_event_dt.now.return_value = cls.reference_now
cls.mock_wevent_dt.now.return_value = cls.reference_now
cls.mock_wevent_main_dt.now.return_value = cls.reference_now
cls.mock_event_date.today.return_value = cls.reference_today
cls.mock_wevent_main_date.today.return_value = cls.reference_today
cls.addClassCleanup(cls.event_dt.stop)
cls.addClassCleanup(cls.wevent_dt.stop)
cls.addClassCleanup(cls.wevent_main_dt.stop)
cls.addClassCleanup(cls.event_date.stop)
cls.addClassCleanup(cls.wevent_main_date.stop)
class TestWebsiteEventCommon(TestEventCommon):
@classmethod
def setUpClass(cls):
super(TestWebsiteEventCommon, cls).setUpClass()
cls.company_main = cls.env.user.company_id
cls.user_event_web_manager = mail_new_test_user(
cls.env, login='user_event_web_manager',
name='Martin Sales Manager', email='crm_manager@test.example.com',
company_id=cls.company_main.id,
notification_type='inbox',
groups='event.group_event_manager,website.group_website_designer',
)
def _get_menus(self):
return set(['Introduction', 'Location', 'Register'])
def _assert_website_menus(self, event, menu_entries=None):
self.assertTrue(event.menu_id)
if menu_entries is None:
menu_entries = self._get_menus()
menus = self.env['website.menu'].search([('parent_id', '=', event.menu_id.id)])
self.assertEqual(len(menus), len(menu_entries))
self.assertEqual(set(menus.mapped('name')), menu_entries)
for page_specific in ['Introduction', 'Location']:
view = self.env['ir.ui.view'].search(
[('name', '=', page_specific + ' ' + event.name)]
)
if page_specific in menu_entries:
self.assertTrue(bool(view))
# TDE FIXME: page deletion not done in 13.3 for Introduction/Location, difficult to fix
# without website.event.menu model (or crappy code based on name)
# else:
# self.assertFalse(bool(view))
class TestEventOnlineCommon(TestEventCommon, EventDtPatcher):
@classmethod
def setUpClass(cls):
super(TestEventOnlineCommon, cls).setUpClass()
# event if 8-18 in Europe/Brussels (DST) (first day: begins at 9, last day: ends at 15)
cls.event_0.write({
'date_begin': datetime.combine(cls.reference_now, time(7, 0)) - timedelta(days=1),
'date_end': datetime.combine(cls.reference_now, time(13, 0)) + timedelta(days=1),
})
| agpl-3.0 |
superstack/nova | nova/virt/xenapi/network_utils.py | 8 | 2193 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper methods for operations related to the management of network
records and their attributes like bridges, PIFs, QoS, as well as
their lookup functions.
"""
from nova.virt.xenapi import HelperBase
class NetworkHelper(HelperBase):
"""
The class that wraps the helper methods together.
"""
@classmethod
def find_network_with_name_label(cls, session, name_label):
networks = session.call_xenapi('network.get_by_name_label', name_label)
if len(networks) == 1:
return networks[0]
elif len(networks) > 1:
raise Exception(_('Found non-unique network'
' for name_label %s') % name_label)
else:
return None
@classmethod
def find_network_with_bridge(cls, session, bridge):
"""
Return the network on which the bridge is attached, if found.
The bridge is defined in the nova db and can be found either in the
'bridge' or 'name_label' fields of the XenAPI network record.
"""
expr = 'field "name__label" = "%s" or ' \
'field "bridge" = "%s"' % (bridge, bridge)
networks = session.call_xenapi('network.get_all_records_where', expr)
if len(networks) == 1:
return networks.keys()[0]
elif len(networks) > 1:
raise Exception(_('Found non-unique network'
' for bridge %s') % bridge)
else:
raise Exception(_('Found no network for bridge %s') % bridge)
| apache-2.0 |
sinkuri256/python-for-android | python3-alpha/python3-src/Lib/test/test_queue.py | 51 | 13039 | # Some simple queue module tests, plus some failure conditions
# to ensure the Queue locks remain stable.
import queue
import time
import unittest
from test import support
threading = support.import_module('threading')
QUEUE_SIZE = 5
def qfull(q):
return q.maxsize > 0 and q.qsize() == q.maxsize
# A thread to run a function that unclogs a blocked Queue.
class _TriggerThread(threading.Thread):
def __init__(self, fn, args):
self.fn = fn
self.args = args
self.startedEvent = threading.Event()
threading.Thread.__init__(self)
def run(self):
# The sleep isn't necessary, but is intended to give the blocking
# function in the main thread a chance at actually blocking before
# we unclog it. But if the sleep is longer than the timeout-based
# tests wait in their blocking functions, those tests will fail.
# So we give them much longer timeout values compared to the
# sleep here (I aimed at 10 seconds for blocking functions --
# they should never actually wait that long - they should make
# progress as soon as we call self.fn()).
time.sleep(0.1)
self.startedEvent.set()
self.fn(*self.args)
# Execute a function that blocks, and in a separate thread, a function that
# triggers the release. Returns the result of the blocking function. Caution:
# block_func must guarantee to block until trigger_func is called, and
# trigger_func must guarantee to change queue state so that block_func can make
# enough progress to return. In particular, a block_func that just raises an
# exception regardless of whether trigger_func is called will lead to
# timing-dependent sporadic failures, and one of those went rarely seen but
# undiagnosed for years. Now block_func must be unexceptional. If block_func
# is supposed to raise an exception, call do_exceptional_blocking_test()
# instead.
class BlockingTestMixin:
def do_blocking_test(self, block_func, block_args, trigger_func, trigger_args):
self.t = _TriggerThread(trigger_func, trigger_args)
self.t.start()
self.result = block_func(*block_args)
# If block_func returned before our thread made the call, we failed!
if not self.t.startedEvent.is_set():
self.fail("blocking function '%r' appeared not to block" %
block_func)
self.t.join(10) # make sure the thread terminates
if self.t.is_alive():
self.fail("trigger function '%r' appeared to not return" %
trigger_func)
return self.result
# Call this instead if block_func is supposed to raise an exception.
def do_exceptional_blocking_test(self,block_func, block_args, trigger_func,
trigger_args, expected_exception_class):
self.t = _TriggerThread(trigger_func, trigger_args)
self.t.start()
try:
try:
block_func(*block_args)
except expected_exception_class:
raise
else:
self.fail("expected exception of kind %r" %
expected_exception_class)
finally:
self.t.join(10) # make sure the thread terminates
if self.t.is_alive():
self.fail("trigger function '%r' appeared to not return" %
trigger_func)
if not self.t.startedEvent.is_set():
self.fail("trigger thread ended but event never set")
class BaseQueueTest(unittest.TestCase, BlockingTestMixin):
def setUp(self):
self.cum = 0
self.cumlock = threading.Lock()
def simple_queue_test(self, q):
if q.qsize():
raise RuntimeError("Call this function with an empty queue")
self.assertTrue(q.empty())
self.assertFalse(q.full())
# I guess we better check things actually queue correctly a little :)
q.put(111)
q.put(333)
q.put(222)
target_order = dict(Queue = [111, 333, 222],
LifoQueue = [222, 333, 111],
PriorityQueue = [111, 222, 333])
actual_order = [q.get(), q.get(), q.get()]
self.assertEqual(actual_order, target_order[q.__class__.__name__],
"Didn't seem to queue the correct data!")
for i in range(QUEUE_SIZE-1):
q.put(i)
self.assertTrue(q.qsize(), "Queue should not be empty")
self.assertTrue(not qfull(q), "Queue should not be full")
last = 2 * QUEUE_SIZE
full = 3 * 2 * QUEUE_SIZE
q.put(last)
self.assertTrue(qfull(q), "Queue should be full")
self.assertFalse(q.empty())
self.assertTrue(q.full())
try:
q.put(full, block=0)
self.fail("Didn't appear to block with a full queue")
except queue.Full:
pass
try:
q.put(full, timeout=0.01)
self.fail("Didn't appear to time-out with a full queue")
except queue.Full:
pass
# Test a blocking put
self.do_blocking_test(q.put, (full,), q.get, ())
self.do_blocking_test(q.put, (full, True, 10), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
try:
q.get(block=0)
self.fail("Didn't appear to block with an empty queue")
except queue.Empty:
pass
try:
q.get(timeout=0.01)
self.fail("Didn't appear to time-out with an empty queue")
except queue.Empty:
pass
# Test a blocking get
self.do_blocking_test(q.get, (), q.put, ('empty',))
self.do_blocking_test(q.get, (True, 10), q.put, ('empty',))
def worker(self, q):
while True:
x = q.get()
if x < 0:
q.task_done()
return
with self.cumlock:
self.cum += x
q.task_done()
def queue_join_test(self, q):
self.cum = 0
for i in (0,1):
threading.Thread(target=self.worker, args=(q,)).start()
for i in range(100):
q.put(i)
q.join()
self.assertEqual(self.cum, sum(range(100)),
"q.join() did not block until all tasks were done")
for i in (0,1):
q.put(-1) # instruct the threads to close
q.join() # verify that you can join twice
def test_queue_task_done(self):
# Test to make sure a queue task completed successfully.
q = self.type2test()
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_queue_join(self):
# Test that a queue join()s successfully, and before anything else
# (done twice for insurance).
q = self.type2test()
self.queue_join_test(q)
self.queue_join_test(q)
try:
q.task_done()
except ValueError:
pass
else:
self.fail("Did not detect task count going negative")
def test_simple_queue(self):
# Do it a couple of times on the same queue.
# Done twice to make sure works with same instance reused.
q = self.type2test(QUEUE_SIZE)
self.simple_queue_test(q)
self.simple_queue_test(q)
def test_negative_timeout_raises_exception(self):
q = self.type2test(QUEUE_SIZE)
with self.assertRaises(ValueError):
q.put(1, timeout=-1)
with self.assertRaises(ValueError):
q.get(1, timeout=-1)
def test_nowait(self):
q = self.type2test(QUEUE_SIZE)
for i in range(QUEUE_SIZE):
q.put_nowait(1)
with self.assertRaises(queue.Full):
q.put_nowait(1)
for i in range(QUEUE_SIZE):
q.get_nowait()
with self.assertRaises(queue.Empty):
q.get_nowait()
def test_shrinking_queue(self):
# issue 10110
q = self.type2test(3)
q.put(1)
q.put(2)
q.put(3)
with self.assertRaises(queue.Full):
q.put_nowait(4)
self.assertEqual(q.qsize(), 3)
q.maxsize = 2 # shrink the queue
with self.assertRaises(queue.Full):
q.put_nowait(4)
class QueueTest(BaseQueueTest):
type2test = queue.Queue
class LifoQueueTest(BaseQueueTest):
type2test = queue.LifoQueue
class PriorityQueueTest(BaseQueueTest):
type2test = queue.PriorityQueue
# A Queue subclass that can provoke failure at a moment's notice :)
class FailingQueueException(Exception):
pass
class FailingQueue(queue.Queue):
def __init__(self, *args):
self.fail_next_put = False
self.fail_next_get = False
queue.Queue.__init__(self, *args)
def _put(self, item):
if self.fail_next_put:
self.fail_next_put = False
raise FailingQueueException("You Lose")
return queue.Queue._put(self, item)
def _get(self):
if self.fail_next_get:
self.fail_next_get = False
raise FailingQueueException("You Lose")
return queue.Queue._get(self)
class FailingQueueTest(unittest.TestCase, BlockingTestMixin):
def failing_queue_test(self, q):
if q.qsize():
raise RuntimeError("Call this function with an empty queue")
for i in range(QUEUE_SIZE-1):
q.put(i)
# Test a failing non-blocking put.
q.fail_next_put = True
try:
q.put("oops", block=0)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.fail_next_put = True
try:
q.put("oops", timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
# Test a failing blocking put
q.fail_next_put = True
try:
self.do_blocking_test(q.put, ("full",), q.get, ())
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
# Test a failing timeout put
q.fail_next_put = True
try:
self.do_exceptional_blocking_test(q.put, ("full", True, 10), q.get, (),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# Check the Queue isn't damaged.
# put failed, but get succeeded - re-add
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
q.get()
self.assertTrue(not qfull(q), "Queue should not be full")
q.put("last")
self.assertTrue(qfull(q), "Queue should be full")
# Test a blocking put
self.do_blocking_test(q.put, ("full",), q.get, ())
# Empty it
for i in range(QUEUE_SIZE):
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
q.put("first")
q.fail_next_get = True
try:
q.get()
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assertTrue(q.qsize(), "Queue should not be empty")
q.fail_next_get = True
try:
q.get(timeout=0.1)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
self.assertTrue(q.qsize(), "Queue should not be empty")
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
q.fail_next_get = True
try:
self.do_exceptional_blocking_test(q.get, (), q.put, ('empty',),
FailingQueueException)
self.fail("The queue didn't fail when it should have")
except FailingQueueException:
pass
# put succeeded, but get failed.
self.assertTrue(q.qsize(), "Queue should not be empty")
q.get()
self.assertTrue(not q.qsize(), "Queue should be empty")
def test_failing_queue(self):
# Test to make sure a queue is functioning correctly.
# Done twice to the same instance.
q = FailingQueue(QUEUE_SIZE)
self.failing_queue_test(q)
self.failing_queue_test(q)
def test_main():
support.run_unittest(QueueTest, LifoQueueTest, PriorityQueueTest,
FailingQueueTest)
if __name__ == "__main__":
test_main()
| apache-2.0 |
vmanoria/bluemix-hue-filebrowser | hue-3.8.1-bluemix/desktop/core/ext-py/Mako-0.8.1/test/test_def.py | 36 | 16341 | from mako.template import Template
from mako import lookup
from test import TemplateTest
from test.util import flatten_result, result_lines
from test import eq_, assert_raises
from mako import compat
class DefTest(TemplateTest):
def test_def_noargs(self):
template = Template("""
${mycomp()}
<%def name="mycomp()">
hello mycomp ${variable}
</%def>
""")
eq_(
template.render(variable='hi').strip(),
"""hello mycomp hi"""
)
def test_def_blankargs(self):
template = Template("""
<%def name="mycomp()">
hello mycomp ${variable}
</%def>
${mycomp()}""")
eq_(
template.render(variable='hi').strip(),
"hello mycomp hi"
)
def test_def_args(self):
template = Template("""
<%def name="mycomp(a, b)">
hello mycomp ${variable}, ${a}, ${b}
</%def>
${mycomp(5, 6)}""")
eq_(
template.render(variable='hi', a=5, b=6).strip(),
"""hello mycomp hi, 5, 6"""
)
def test_inter_def(self):
"""test defs calling each other"""
template = Template("""
${b()}
<%def name="a()">\
im a
</%def>
<%def name="b()">
im b
and heres a: ${a()}
</%def>
<%def name="c()">
im c
</%def>
""")
# check that "a" is declared in "b", but not in "c"
if compat.py3k:
assert "a" not in template.module.render_c.__code__.co_varnames
assert "a" in template.module.render_b.__code__.co_varnames
else:
assert "a" not in template.module.render_c.func_code.co_varnames
assert "a" in template.module.render_b.func_code.co_varnames
# then test output
eq_(
flatten_result(template.render()),
"im b and heres a: im a"
)
def test_toplevel(self):
"""test calling a def from the top level"""
template = Template("""
this is the body
<%def name="a()">
this is a
</%def>
<%def name="b(x, y)">
this is b, ${x} ${y}
</%def>
""")
self._do_test(template.get_def("a"),
"this is a",
filters=flatten_result)
self._do_test(template.get_def("b"),
"this is b, 10 15",
template_args={'x': 10, 'y': 15},
filters=flatten_result)
self._do_test(template.get_def("body"),
"this is the body",
filters=flatten_result)
# test that args outside of the dict can be used
self._do_test(template.get_def("a"), "this is a",
filters=flatten_result,
template_args={'q': 5, 'zq': 'test'})
class ScopeTest(TemplateTest):
"""test scoping rules. The key is, enclosing
scope always takes precedence over contextual scope."""
def test_scope_one(self):
self._do_memory_test("""
<%def name="a()">
this is a, and y is ${y}
</%def>
${a()}
<%
y = 7
%>
${a()}
""",
"this is a, and y is None this is a, and y is 7",
filters=flatten_result,
template_args={'y': None}
)
def test_scope_two(self):
t = Template("""
y is ${y}
<%
y = 7
%>
y is ${y}
""")
try:
t.render(y=None)
assert False
except UnboundLocalError:
assert True
def test_scope_four(self):
"""test that variables are pulled
from 'enclosing' scope before context."""
t = Template("""
<%
x = 5
%>
<%def name="a()">
this is a. x is ${x}.
</%def>
<%def name="b()">
<%
x = 9
%>
this is b. x is ${x}.
calling a. ${a()}
</%def>
${b()}
""")
eq_(
flatten_result(t.render()),
"this is b. x is 9. calling a. this is a. x is 5."
)
def test_scope_five(self):
"""test that variables are pulled from
'enclosing' scope before context."""
# same as test four, but adds a scope around it.
t = Template("""
<%def name="enclosing()">
<%
x = 5
%>
<%def name="a()">
this is a. x is ${x}.
</%def>
<%def name="b()">
<%
x = 9
%>
this is b. x is ${x}.
calling a. ${a()}
</%def>
${b()}
</%def>
${enclosing()}
""")
eq_(
flatten_result(t.render()),
"this is b. x is 9. calling a. this is a. x is 5."
)
def test_scope_six(self):
"""test that the initial context counts
as 'enclosing' scope, for plain defs"""
t = Template("""
<%def name="a()">
a: x is ${x}
</%def>
<%def name="b()">
<%
x = 10
%>
b. x is ${x}. ${a()}
</%def>
${b()}
""")
eq_(
flatten_result(t.render(x=5)),
"b. x is 10. a: x is 5"
)
def test_scope_seven(self):
"""test that the initial context counts
as 'enclosing' scope, for nested defs"""
t = Template("""
<%def name="enclosing()">
<%def name="a()">
a: x is ${x}
</%def>
<%def name="b()">
<%
x = 10
%>
b. x is ${x}. ${a()}
</%def>
${b()}
</%def>
${enclosing()}
""")
eq_(
flatten_result(t.render(x=5)),
"b. x is 10. a: x is 5"
)
def test_scope_eight(self):
"""test that the initial context counts
as 'enclosing' scope, for nested defs"""
t = Template("""
<%def name="enclosing()">
<%def name="a()">
a: x is ${x}
</%def>
<%def name="b()">
<%
x = 10
%>
b. x is ${x}. ${a()}
</%def>
${b()}
</%def>
${enclosing()}
""")
eq_(
flatten_result(t.render(x=5)),
"b. x is 10. a: x is 5"
)
def test_scope_nine(self):
"""test that 'enclosing scope' doesnt
get exported to other templates"""
l = lookup.TemplateLookup()
l.put_string('main', """
<%
x = 5
%>
this is main. <%include file="secondary"/>
""")
l.put_string('secondary', """
this is secondary. x is ${x}
""")
eq_(
flatten_result(l.get_template('main').render(x=2)),
"this is main. this is secondary. x is 2"
)
def test_scope_ten(self):
t = Template("""
<%def name="a()">
<%def name="b()">
<%
y = 19
%>
b/c: ${c()}
b/y: ${y}
</%def>
<%def name="c()">
c/y: ${y}
</%def>
<%
# we assign to "y". but the 'enclosing
# scope' of "b" and "c" is from
# the "y" on the outside
y = 10
%>
a/y: ${y}
a/b: ${b()}
</%def>
<%
y = 7
%>
main/a: ${a()}
main/y: ${y}
""")
eq_(
flatten_result(t.render()),
"main/a: a/y: 10 a/b: b/c: c/y: 10 b/y: 19 main/y: 7"
)
def test_scope_eleven(self):
t = Template("""
x is ${x}
<%def name="a(x)">
this is a, ${b()}
<%def name="b()">
this is b, x is ${x}
</%def>
</%def>
${a(x=5)}
""")
eq_(
result_lines(t.render(x=10)),
[
"x is 10",
"this is a,",
"this is b, x is 5"
])
def test_unbound_scope(self):
t = Template("""
<%
y = 10
%>
<%def name="a()">
y is: ${y}
<%
# should raise error ?
y = 15
%>
y is ${y}
</%def>
${a()}
""")
assert_raises(
UnboundLocalError,
t.render
)
def test_unbound_scope_two(self):
t = Template("""
<%def name="enclosing()">
<%
y = 10
%>
<%def name="a()">
y is: ${y}
<%
# should raise error ?
y = 15
%>
y is ${y}
</%def>
${a()}
</%def>
${enclosing()}
""")
try:
print(t.render())
assert False
except UnboundLocalError:
assert True
def test_canget_kwargs(self):
"""test that arguments passed to the body()
function are accessible by top-level defs"""
l = lookup.TemplateLookup()
l.put_string("base", """
${next.body(x=12)}
""")
l.put_string("main", """
<%inherit file="base"/>
<%page args="x"/>
this is main. x is ${x}
${a()}
<%def name="a(**args)">
this is a, x is ${x}
</%def>
""")
# test via inheritance
eq_(
result_lines(l.get_template("main").render()),
[
"this is main. x is 12",
"this is a, x is 12"
])
l.put_string("another", """
<%namespace name="ns" file="main"/>
${ns.body(x=15)}
""")
# test via namespace
eq_(
result_lines(l.get_template("another").render()),
[
"this is main. x is 15",
"this is a, x is 15"
])
def test_inline_expression_from_arg_one(self):
"""test that cache_key=${foo} gets its value from
the 'foo' argument in the <%def> tag,
and strict_undefined doesn't complain.
this is #191.
"""
t = Template("""
<%def name="layout(foo)" cached="True" cache_key="${foo}">
foo: ${foo}
</%def>
${layout(3)}
""", strict_undefined=True,
cache_impl="plain")
eq_(
result_lines(t.render()),
["foo: 3"]
)
def test_interpret_expression_from_arg_two(self):
"""test that cache_key=${foo} gets its value from
the 'foo' argument regardless of it being passed
from the context.
This is here testing that there's no change
to existing behavior before and after #191.
"""
t = Template("""
<%def name="layout(foo)" cached="True" cache_key="${foo}">
foo: ${value}
</%def>
${layout(3)}
""", cache_impl="plain")
eq_(
result_lines(t.render(foo='foo', value=1)),
["foo: 1"]
)
eq_(
result_lines(t.render(foo='bar', value=2)),
["foo: 1"]
)
class NestedDefTest(TemplateTest):
def test_nested_def(self):
t = Template("""
${hi()}
<%def name="hi()">
hey, im hi.
and heres ${foo()}, ${bar()}
<%def name="foo()">
this is foo
</%def>
<%def name="bar()">
this is bar
</%def>
</%def>
""")
eq_(
flatten_result(t.render()),
"hey, im hi. and heres this is foo , this is bar"
)
def test_nested_2(self):
t = Template("""
x is ${x}
<%def name="a()">
this is a, x is ${x}
${b()}
<%def name="b()">
this is b: ${x}
</%def>
</%def>
${a()}
""")
eq_(
flatten_result(t.render(x=10)),
"x is 10 this is a, x is 10 this is b: 10"
)
def test_nested_with_args(self):
t = Template("""
${a()}
<%def name="a()">
<%def name="b(x, y=2)">
b x is ${x} y is ${y}
</%def>
a ${b(5)}
</%def>
""")
eq_(
flatten_result(t.render()),
"a b x is 5 y is 2"
)
def test_nested_def_2(self):
template = Template("""
${a()}
<%def name="a()">
<%def name="b()">
<%def name="c()">
comp c
</%def>
${c()}
</%def>
${b()}
</%def>
""")
eq_(
flatten_result(template.render()),
"comp c"
)
def test_nested_nested_def(self):
t = Template("""
${a()}
<%def name="a()">
a
<%def name="b1()">
a_b1
</%def>
<%def name="b2()">
a_b2 ${c1()}
<%def name="c1()">
a_b2_c1
</%def>
</%def>
<%def name="b3()">
a_b3 ${c1()}
<%def name="c1()">
a_b3_c1 heres x: ${x}
<%
y = 7
%>
y is ${y}
</%def>
<%def name="c2()">
a_b3_c2
y is ${y}
c1 is ${c1()}
</%def>
${c2()}
</%def>
${b1()} ${b2()} ${b3()}
</%def>
""")
eq_(
flatten_result(t.render(x=5, y=None)),
"a a_b1 a_b2 a_b2_c1 a_b3 a_b3_c1 "
"heres x: 5 y is 7 a_b3_c2 y is "
"None c1 is a_b3_c1 heres x: 5 y is 7"
)
def test_nested_nested_def_2(self):
t = Template("""
<%def name="a()">
this is a ${b()}
<%def name="b()">
this is b
${c()}
</%def>
<%def name="c()">
this is c
</%def>
</%def>
${a()}
""")
eq_(
flatten_result(t.render()),
"this is a this is b this is c"
)
def test_outer_scope(self):
t = Template("""
<%def name="a()">
a: x is ${x}
</%def>
<%def name="b()">
<%def name="c()">
<%
x = 10
%>
c. x is ${x}. ${a()}
</%def>
b. ${c()}
</%def>
${b()}
x is ${x}
""")
eq_(
flatten_result(t.render(x=5)),
"b. c. x is 10. a: x is 5 x is 5"
)
class ExceptionTest(TemplateTest):
def test_raise(self):
template = Template("""
<%
raise Exception("this is a test")
%>
""", format_exceptions=False)
assert_raises(
Exception,
template.render
)
def test_handler(self):
def handle(context, error):
context.write("error message is " + str(error))
return True
template = Template("""
<%
raise Exception("this is a test")
%>
""", error_handler=handle)
eq_(
template.render().strip(),
"error message is this is a test"
)
| gpl-2.0 |
Prestoroasters/artisan | artisanlib/list_ports_osx.py | 9 | 6908 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# list_ports_osx.py
#
# Copyright (c) 2013, Paul Holleis, Marko Luther
# All rights reserved.
#
#
# LICENSE
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# List all of the callout devices in OS/X by querying IOKit.
# See the following for a reference of how to do this:
# http://developer.apple.com/library/mac/#documentation/DeviceDrivers/Conceptual/WorkingWSerial/WWSerial_SerialDevs/SerialDevices.html#//apple_ref/doc/uid/TP30000384-CIHGEAFD
# More help from darwin_hid.py
# Also see the 'IORegistryExplorer' for an idea of what we are actually searching
import ctypes
from ctypes import util
import re
iokit = ctypes.cdll.LoadLibrary(ctypes.util.find_library('IOKit'))
cf = ctypes.cdll.LoadLibrary(ctypes.util.find_library('CoreFoundation'))
kIOMasterPortDefault = ctypes.c_void_p.in_dll(iokit, "kIOMasterPortDefault")
kCFAllocatorDefault = ctypes.c_void_p.in_dll(cf, "kCFAllocatorDefault")
kCFStringEncodingMacRoman = 0
iokit.IOServiceMatching.restype = ctypes.c_void_p
iokit.IOServiceGetMatchingServices.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
iokit.IOServiceGetMatchingServices.restype = ctypes.c_void_p
iokit.IORegistryEntryGetParentEntry.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
iokit.IORegistryEntryCreateCFProperty.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint32]
iokit.IORegistryEntryCreateCFProperty.restype = ctypes.c_void_p
iokit.IORegistryEntryGetPath.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
iokit.IORegistryEntryGetPath.restype = ctypes.c_void_p
iokit.IORegistryEntryGetName.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
iokit.IORegistryEntryGetName.restype = ctypes.c_void_p
iokit.IOObjectGetClass.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
iokit.IOObjectGetClass.restype = ctypes.c_void_p
iokit.IOObjectRelease.argtypes = [ctypes.c_void_p]
cf.CFStringCreateWithCString.argtypes = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int32]
cf.CFStringCreateWithCString.restype = ctypes.c_void_p
cf.CFStringGetCStringPtr.argtypes = [ctypes.c_void_p, ctypes.c_uint32]
cf.CFStringGetCStringPtr.restype = ctypes.c_char_p
cf.CFNumberGetValue.argtypes = [ctypes.c_void_p, ctypes.c_uint32, ctypes.c_void_p]
cf.CFNumberGetValue.restype = ctypes.c_void_p
def get_string_property(device_t, prop):
""" Search the given device for the specified string property
@param device_t Device to search
@param prop String to search for.
@return Python string containing the value, or None if not found.
"""
key = cf.CFStringCreateWithCString(
kCFAllocatorDefault,
prop.encode("mac_roman"),
kCFStringEncodingMacRoman
)
CFContainer = iokit.IORegistryEntryCreateCFProperty(
device_t,
key,
kCFAllocatorDefault,
0
);
output = None
if CFContainer:
output = cf.CFStringGetCStringPtr(CFContainer, 0)
if output:
return output.decode("utf-8")
else:
return None
def get_int_property(device_t, prop):
""" Search the given device for the specified string prop
@param device_t Device to search
@param prop String to search for.
@return Python string containing the value, or None if not found.
"""
key = cf.CFStringCreateWithCString(
kCFAllocatorDefault,
prop.encode("mac_roman"),
kCFStringEncodingMacRoman
)
CFContainer = iokit.IORegistryEntryCreateCFProperty(
device_t,
key,
kCFAllocatorDefault,
0
);
number = ctypes.c_uint16()
if CFContainer:
cf.CFNumberGetValue(CFContainer, 2, ctypes.byref(number))
return number.value
def IORegistryEntryGetName(device):
pathname = ctypes.create_string_buffer(100) # TODO: Is this ok?
iokit.IOObjectGetClass(
device,
ctypes.byref(pathname)
)
return pathname.value
def GetParentDeviceByType(device, parent_type):
""" Find the first parent of a device that implements the parent_type
@param IOService Service to inspect
@return Pointer to the parent type, or None if it was not found.
"""
# First, try to walk up the IOService tree to find a parent of this device that is a IOUSBDevice.
while IORegistryEntryGetName(device) != parent_type:
parent = ctypes.c_void_p()
response = iokit.IORegistryEntryGetParentEntry(
device,
"IOService".encode("mac_roman"),
ctypes.byref(parent)
)
# If we weren't able to find a parent for the device, we're done.
if response != 0:
return None
device = parent
return device
def GetIOServicesByType(service_type):
"""
"""
serial_port_iterator = ctypes.c_void_p()
iokit.IOServiceGetMatchingServices(
kIOMasterPortDefault,
iokit.IOServiceMatching(service_type),
ctypes.byref(serial_port_iterator)
)
services = []
while iokit.IOIteratorIsValid(serial_port_iterator):
service = iokit.IOIteratorNext(serial_port_iterator)
if not service:
break
services.append(service)
iokit.IOObjectRelease(serial_port_iterator)
return services
def comports():
# Scan for all iokit serial ports
services = GetIOServicesByType(b'IOSerialBSDClient')
ports = []
for service in services:
info = []
# First, add the callout device file.
info.append(get_string_property(service, "IOCalloutDevice"))
# If the serial port is implemented by a
usb_device = GetParentDeviceByType(service, b"IOUSBDevice")
if usb_device != None:
info.append(get_string_property(usb_device, "USB Product Name"))
info.append(
"USB VID:PID=%x:%x SNR=%s"%(
get_int_property(usb_device, "idVendor"),
get_int_property(usb_device, "idProduct"),
get_string_property(usb_device, "USB Serial Number"))
)
else:
info.append('')
info.append('')
ports.append(info)
return ports
# test
if __name__ == '__main__':
for port, desc, hwid in sorted(comports()):
print("%s: %s [%s]") % (port, desc, hwid)
| gpl-3.0 |
wallnerryan/flocker-profiles | flocker/node/testtools.py | 6 | 12953 | # Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Testing utilities for ``flocker.node``.
"""
from functools import wraps
import os
import pwd
from unittest import skipIf
from uuid import uuid4
from distutils.version import LooseVersion
import psutil
from zope.interface import implementer
from characteristic import attributes
from twisted.trial.unittest import TestCase, SkipTest
from twisted.internet.defer import succeed
from zope.interface.verify import verifyObject
from eliot import Logger, ActionType, MessageType, fields
from . import IDeployer, IStateChange, sequentially
from ..testtools import loop_until, find_free_port
from ..control import (
IClusterStateChange, Node, NodeState, Deployment, DeploymentState)
from ..control._model import ip_to_uuid, Leases
from ._docker import AddressInUse, DockerClient
def docker_accessible():
"""
Attempt to connect to the Docker control socket.
:return: A ``bytes`` string describing the reason Docker is not
accessible or ``None`` if it appears to be accessible.
"""
try:
client = DockerClient()
client._client.ping()
except Exception as e:
return str(e)
return None
_docker_reason = docker_accessible()
if_docker_configured = skipIf(
_docker_reason,
"User {!r} cannot access Docker: {}".format(
pwd.getpwuid(os.geteuid()).pw_name,
_docker_reason,
))
def require_docker_version(minimum_docker_version, message):
"""
Skip the wrapped test if the actual Docker version is less than
``minimum_docker_version``.
:param str minimum_docker_version: The minimum version required by the
test.
:param str message: An explanatory message which will be printed when
skipping the test.
"""
minimum_docker_version = LooseVersion(
minimum_docker_version
)
def decorator(wrapped):
@wraps(wrapped)
def wrapper(*args, **kwargs):
client = DockerClient()
docker_version = LooseVersion(
client._client.version()['Version']
)
if docker_version < minimum_docker_version:
raise SkipTest(
'Minimum required Docker version: {}. '
'Actual Docker version: {}. '
'Details: {}'.format(
minimum_docker_version,
docker_version,
message,
)
)
return wrapped(*args, **kwargs)
return wrapper
return decorator
def wait_for_unit_state(docker_client, unit_name, expected_activation_states):
"""
Wait until a unit is in the requested state.
:param docker_client: A ``DockerClient`` instance.
:param unicode unit_name: The name of the unit.
:param expected_activation_states: Activation states to wait for.
:return: ``Deferred`` that fires when required state has been reached.
"""
def is_in_states(units):
for unit in units:
if unit.name == unit_name:
if unit.activation_state in expected_activation_states:
return True
def check_if_in_states():
responded = docker_client.list()
responded.addCallback(is_in_states)
return responded
return loop_until(check_if_in_states)
CONTROLLABLE_ACTION_TYPE = ActionType(u"test:controllableaction", [], [])
@implementer(IStateChange)
@attributes(['result'])
class ControllableAction(object):
"""
``IStateChange`` whose results can be controlled.
"""
called = False
deployer = None
_logger = Logger()
@property
def eliot_action(self):
return CONTROLLABLE_ACTION_TYPE(self._logger)
def run(self, deployer):
self.called = True
self.deployer = deployer
return self.result
@implementer(IDeployer)
class DummyDeployer(object):
"""
A non-implementation of ``IDeployer``.
"""
hostname = u"127.0.0.1"
node_uuid = uuid4()
def discover_state(self, node_stat):
return succeed(())
def calculate_changes(self, desired_configuration, cluster_state):
return sequentially(changes=[])
@implementer(IDeployer)
class ControllableDeployer(object):
"""
``IDeployer`` whose results can be controlled.
"""
def __init__(self, hostname, local_states, calculated_actions):
self.node_uuid = ip_to_uuid(hostname)
self.hostname = hostname
self.local_states = local_states
self.calculated_actions = calculated_actions
self.calculate_inputs = []
def discover_state(self, node_state):
return self.local_states.pop(0).addCallback(lambda val: (val,))
def calculate_changes(self, desired_configuration, cluster_state):
self.calculate_inputs.append(
(cluster_state.get_node(uuid=self.node_uuid,
hostname=self.hostname),
desired_configuration, cluster_state))
return self.calculated_actions.pop(0)
# A deployment with no information:
EMPTY = Deployment(nodes=[])
EMPTY_STATE = DeploymentState()
def ideployer_tests_factory(fixture):
"""
Create test case for IDeployer implementation.
:param fixture: Callable that takes ``TestCase`` instance and returns
a ``IDeployer`` provider.
:return: ``TestCase`` subclass that will test the given fixture.
"""
class IDeployerTests(TestCase):
"""
Tests for ``IDeployer``.
"""
def test_interface(self):
"""
The object claims to provide the interface.
"""
self.assertTrue(verifyObject(IDeployer, fixture(self)))
def _discover_state(self):
"""
Create a deployer using the fixture and ask it to discover state.
:return: The return value of the object's ``discover_state``
method.
"""
deployer = fixture(self)
result = deployer.discover_state(NodeState(hostname=b"10.0.0.1"))
return result
def test_discover_state_list_result(self):
"""
The object's ``discover_state`` method returns a ``Deferred`` that
fires with a ``list``.
"""
def discovered(changes):
self.assertEqual(tuple, type(changes))
return self._discover_state().addCallback(discovered)
def test_discover_state_iclusterstatechange(self):
"""
The elements of the ``list`` that ``discover_state``\ 's
``Deferred`` fires with provide ``IClusterStateChange``.
"""
def discovered(changes):
wrong = []
for obj in changes:
if not IClusterStateChange.providedBy(obj):
wrong.append(obj)
if wrong:
template = (
"Some elements did not provide IClusterStateChange: {}"
)
self.fail(template.format(wrong))
return self._discover_state().addCallback(discovered)
def test_calculate_necessary_state_changes(self):
"""
The object's ``calculate_necessary_state_changes`` method returns a
``IStateChange`` provider.
"""
deployer = fixture(self)
result = deployer.calculate_changes(EMPTY, EMPTY_STATE)
self.assertTrue(verifyObject(IStateChange, result))
return IDeployerTests
def to_node(node_state):
"""
Convert a ``NodeState`` to a corresponding ``Node``.
:param NodeState node_state: Object to convert.
:return Node: Equivalent node.
"""
return Node(uuid=node_state.uuid, hostname=node_state.hostname,
applications=node_state.applications or [],
manifestations=node_state.manifestations or {})
def assert_calculated_changes_for_deployer(
case, deployer, node_state, node_config, nonmanifest_datasets,
additional_node_states, additional_node_config, expected_changes,
leases=Leases(),
):
"""
Assert that ``calculate_changes`` returns certain changes when it is
invoked with the given state and configuration.
:param TestCase case: The ``TestCase`` to use to make assertions (typically
the one being run at the moment).
:param IDeployer deployer: The deployer provider which will be asked to
calculate the changes.
:param NodeState node_state: The deployer will be asked to calculate
changes for a node that has this state.
:param Node node_config: The deployer will be asked to calculate changes
for a node with this desired configuration.
:param set nonmanifest_datasets: Datasets which will be presented as part
of the cluster state without manifestations on any node.
:param set additional_node_states: A set of ``NodeState`` for other nodes.
:param set additional_node_config: A set of ``Node`` for other nodes.
:param expected_changes: The ``IStateChange`` expected to be returned.
:param Leases leases: Currently configured leases. By default none exist.
"""
cluster_state = DeploymentState(
nodes={node_state} | additional_node_states,
nonmanifest_datasets={
dataset.dataset_id: dataset
for dataset in nonmanifest_datasets
},
)
cluster_configuration = Deployment(
nodes={node_config} | additional_node_config,
leases=leases,
)
changes = deployer.calculate_changes(
cluster_configuration, cluster_state,
)
case.assertEqual(expected_changes, changes)
ADDRESS_IN_USE = MessageType(
u"flocker:test:address_in_use",
fields(ip=unicode, port=int, name=bytes),
)
def _find_process_name(port_number):
"""
Get the name of the process using the given port number.
"""
for connection in psutil.net_connections():
if connection.laddr[1] == port_number:
return psutil.Process(connection.pid).name()
return None
def _retry_on_port_collision(reason, add, cleanup):
"""
Cleanup and re-add a container if it failed to start because of a port
collision.
:param reason: The exception describing the container startup failure.
:param add: A no-argument callable that can be used to try adding and
starting the container again.
:param cleanup: A no-argument callable that can be used to remove the
container.
"""
# We select a random, available port number on each attempt. If it was in
# use it's because the "available" part of that port number selection logic
# is fairly shaky. It should be good enough that trying again works fairly
# well, though. So do that.
reason.trap(AddressInUse)
ip, port = reason.value.address
used_by = _find_process_name(port)
ADDRESS_IN_USE(ip=ip, port=port, name=used_by).write()
d = cleanup()
d.addCallback(lambda ignored: add())
return d
def add_with_port_collision_retry(client, unit_name, **kw):
"""
Add a container. Try adding it repeatedly if it has ports defined and
container startup fails with ``AddressInUse``.
If ports in the container are defined with an external port number of ``0``
a locally free port number will be assigned. On each re-try attempt, these
will be re-assigned to try to avoid the port collision.
:param DockerClient client: The ``IDockerClient`` to use to try to add the
container.
:param unicode unit_name: The name of the container to add. See the
``unit_name`` parameter of ``IDockerClient.add``.
:param kw: Additional keyword arguments to pass on to
``IDockerClient.add``.
:return: A ``Deferred`` which fires with a two-tuple. The first element
represents the container which has been added and started. The second
element is a ``list`` of ``PortMap`` instances describing the ports
which were ultimately requested.
"""
ultimate_ports = []
def add():
# Generate a replacement for any auto-assigned ports
ultimate_ports[:] = tentative_ports = list(
port.set(
external_port=find_free_port()[1]
)
if port.external_port == 0
else port
for port in kw["ports"]
)
tentative_kw = kw.copy()
tentative_kw["ports"] = tentative_ports
return client.add(unit_name, **tentative_kw)
def cleanup():
return client.remove(unit_name)
if "ports" in kw:
trying = add()
trying.addErrback(_retry_on_port_collision, add, cleanup)
result = trying
else:
result = client.add(unit_name, **kw)
result.addCallback(lambda app: (app, ultimate_ports))
return result
| apache-2.0 |
i2c2-caj/CS4990 | Homework/crminal/crm/views.py | 1 | 3329 | from django.shortcuts import render
#from viewsets import ModelViewSet
from django.views.generic import TemplateView
from .models import *
from django.db.models import Count
from django.contrib.auth.models import User
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.views.generic import ListView, DetailView
from django.core.urlresolvers import reverse_lazy, reverse
### HOME ###
class DashboardView(TemplateView):
template_name = 'crm/dashboard.html'
def get_context_data(self, **kwargs):
context = super(DashboardView, self).get_context_data(**kwargs)
context['opportunity_list'] = Opportunity.objects.all().order_by('-create_date')[:5]
context['reminder_list'] = Reminder.objects.all().exclude(completed=True).order_by('date')[:5]
context['top_list'] = User.objects.annotate(num_op=Count('opportunity')).order_by('-num_op')[:3]
context['stage_by_opp_list'] = Stage.objects.annotate(opp_count=Count('opportunity'))
return context
### HOME ###
### SEARCH ###
class SearchResultsView(TemplateView):
template_name = 'crm/search_results.html'
def get_context_data(self, **kwargs):
context = super(SearchResultsView, self).get_context_data(**kwargs)
# If we don't have a search term in the URL, just return the context as is.
# Otherwise, populate the template context with potential search results.
if not self.request.GET.get("q", None):
return context
term = self.request.GET["q"] # save off the search term for convenience
context['searchterm'] = term # send the search term to the template's context
context["opportunity_list"] = Opportunity.objects.filter(company__name__icontains = term)
context['contact_list'] = Contact.objects.filter(first_name__icontains = term)
context['company_list'] = Company.objects.filter(name__icontains = term)
context['calllog_notes_list'] = CallLog.objects.filter(note__icontains = term)
return context
### SEARCH ###
### STAGES ###
class StageCreateView(CreateView):
model = Stage
fields = ['name', 'order', 'description', 'value']
def get_success_url(self):
return reverse('crm:stage_detail', args=(self.object.pk,))
class StageUpdateView(UpdateView):
model = Stage
fields = ['name', 'order', 'description', 'value']
def get_success_url(self):
return reverse('crm:stage_detail', args=(self.object.pk,))
class StageDeleteView(DeleteView):
model = Stage
success_url = reverse_lazy('crm:stage_delete_success')
class StageDetailView(DetailView):
model = Stage
#fields = ['name', 'order', 'description', 'value']
class StageListView(ListView):
model = Stage
#class StageViewSet(ModelViewSet):
# model = Stage
### STAGES ###
'''
class CompanyViewSet(ModelViewSet):
model = Company
class ContactViewSet(ModelViewSet):
model = Contact
class CampaignViewSet(ModelViewSet):
model = Campaign
class OpportunityViewSet(ModelViewSet):
model = Opportunity
class ReminderViewSet(ModelViewSet):
model = Reminder
class ReportViewSet(ModelViewSet):
model = Report
class CallLogViewSet(ModelViewSet):
model = CallLog
class OpportunityStageViewSet(ModelViewSet):
model = OpportunityStage
'''
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.