text stringlengths 4 1.02M | meta dict |
|---|---|
"""Unittests for lkgm_manager"""
from __future__ import print_function
import contextlib
import os
import sys
import tempfile
from xml.dom import minidom
import mock
from chromite.cbuildbot import lkgm_manager
from chromite.cbuildbot import manifest_version
from chromite.cbuildbot import repository
from chromite.lib import config_lib
from chromite.lib import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import cros_test_lib
from chromite.lib import git
from chromite.lib import osutils
from chromite.lib.buildstore import FakeBuildStore
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
FAKE_VERSION_STRING = '1.2.4-rc3'
FAKE_VERSION_STRING_NEXT = '1.2.4-rc4'
CHROME_BRANCH = '13'
FAKE_VERSION = """
CHROMEOS_BUILD=1
CHROMEOS_BRANCH=2
CHROMEOS_PATCH=4
CHROME_BRANCH=13
"""
# pylint: disable=protected-access
class LKGMCandidateInfoTest(cros_test_lib.TestCase):
"""Test methods testing methods in _LKGMCandidateInfo class."""
def testLoadFromString(self):
"""Tests whether we can load from a string."""
info = lkgm_manager._LKGMCandidateInfo(version_string=FAKE_VERSION_STRING,
chrome_branch=CHROME_BRANCH)
self.assertEqual(info.VersionString(), FAKE_VERSION_STRING)
def testIncrementVersionPatch(self):
"""Tests whether we can increment a lkgm info."""
info = lkgm_manager._LKGMCandidateInfo(version_string=FAKE_VERSION_STRING,
chrome_branch=CHROME_BRANCH)
info.IncrementVersion()
self.assertEqual(info.VersionString(), FAKE_VERSION_STRING_NEXT)
def testVersionCompare(self):
"""Tests whether our comparision method works."""
info0 = lkgm_manager._LKGMCandidateInfo('5.2.3-rc100')
info1 = lkgm_manager._LKGMCandidateInfo('1.2.3-rc1')
info2 = lkgm_manager._LKGMCandidateInfo('1.2.3-rc2')
info3 = lkgm_manager._LKGMCandidateInfo('1.2.200-rc1')
info4 = lkgm_manager._LKGMCandidateInfo('1.4.3-rc1')
self.assertGreater(info0, info1)
self.assertGreater(info0, info2)
self.assertGreater(info0, info3)
self.assertGreater(info0, info4)
self.assertGreater(info2, info1)
self.assertGreater(info3, info1)
self.assertGreater(info3, info2)
self.assertGreater(info4, info1)
self.assertGreater(info4, info2)
self.assertGreater(info4, info3)
self.assertEqual(info0, info0)
self.assertEqual(info1, info1)
self.assertEqual(info2, info2)
self.assertEqual(info3, info3)
self.assertEqual(info4, info4)
self.assertNotEqual(info0, info1)
self.assertNotEqual(info0, info2)
self.assertNotEqual(info0, info3)
self.assertNotEqual(info0, info4)
self.assertNotEqual(info1, info0)
self.assertNotEqual(info1, info2)
self.assertNotEqual(info1, info3)
self.assertNotEqual(info1, info4)
self.assertNotEqual(info2, info0)
self.assertNotEqual(info2, info1)
self.assertNotEqual(info2, info3)
self.assertNotEqual(info2, info4)
self.assertNotEqual(info3, info0)
self.assertNotEqual(info3, info1)
self.assertNotEqual(info3, info2)
self.assertNotEqual(info3, info4)
self.assertNotEqual(info4, info0)
self.assertNotEqual(info4, info1)
self.assertNotEqual(info4, info1)
self.assertNotEqual(info4, info3)
@contextlib.contextmanager
def TemporaryManifest():
with tempfile.NamedTemporaryFile(mode='w') as f:
# Create fake but empty manifest file.
new_doc = minidom.getDOMImplementation().createDocument(
None, 'manifest', None)
print(new_doc.toxml())
new_doc.writexml(f)
f.flush()
yield f
class LKGMManagerTest(cros_test_lib.MockTempDirTestCase):
"""Tests for the BuildSpecs manager."""
def setUp(self):
self.push_mock = self.PatchObject(git, 'CreatePushBranch')
self.source_repo = 'ssh://source/repo'
self.manifest_repo = 'ssh://manifest/repo'
self.version_file = 'version-file.sh'
self.branch = 'master'
self.build_name = 'amd64-generic'
self.incr_type = 'branch'
self.buildstore = FakeBuildStore()
# Create tmp subdirs based on the one provided TempDirMixin.
self.tmpdir = os.path.join(self.tempdir, 'base')
osutils.SafeMakedirs(self.tmpdir)
self.tmpmandir = os.path.join(self.tempdir, 'man')
osutils.SafeMakedirs(self.tmpmandir)
repo = repository.RepoRepository(
self.source_repo, self.tmpdir, self.branch, depth=1)
self.manager = lkgm_manager.LKGMManager(
repo, self.manifest_repo, self.build_name, constants.PFQ_TYPE, 'branch',
force=False, branch=self.branch, buildstore=self.buildstore,
dry_run=True)
self.manager.manifest_dir = self.tmpmandir
self.manager.lkgm_path = os.path.join(
self.tmpmandir, constants.LKGM_MANIFEST)
self.manager.all_specs_dir = '/LKGM/path'
manifest_dir = self.manager.manifest_dir
self.manager.specs_for_builder = os.path.join(manifest_dir,
self.manager.rel_working_dir,
'build-name', '%(builder)s')
self.manager.SLEEP_TIMEOUT = 0
def _GetPathToManifest(self, info):
return os.path.join(self.manager.all_specs_dir, '%s.xml' %
info.VersionString())
def testCreateFromManifest(self):
"""Tests that we can create a new candidate from another manifest."""
# Let's stub out other LKGMManager calls cause they're already
# unit tested.
version = '2010.0.0-rc7'
my_info = lkgm_manager._LKGMCandidateInfo('2010.0.0')
new_candidate = lkgm_manager._LKGMCandidateInfo(version)
manifest = ('/tmp/manifest-versions-internal/paladin/buildspecs/'
'20/%s.xml' % version)
new_manifest = '/path/to/tmp/file.xml'
build_id = 20162
site_params = config_lib.GetSiteParams()
# Patch out our RepoRepository to make sure we don't corrupt real repo.
self.PatchObject(self.manager, 'cros_source')
filter_mock = self.PatchObject(manifest_version, 'FilterManifest',
return_value=new_manifest)
# Do manifest refresh work.
self.PatchObject(lkgm_manager.LKGMManager, 'GetCurrentVersionInfo',
return_value=my_info)
self.PatchObject(lkgm_manager.LKGMManager, 'RefreshManifestCheckout')
init_mock = self.PatchObject(lkgm_manager.LKGMManager,
'InitializeManifestVariables')
# Publish new candidate.
publish_mock = self.PatchObject(lkgm_manager.LKGMManager, 'PublishManifest')
candidate_path = self.manager.CreateFromManifest(manifest,
build_id=build_id)
self.assertEqual(candidate_path, self._GetPathToManifest(new_candidate))
self.assertEqual(self.manager.current_version, version)
filter_mock.assert_called_once_with(
manifest, whitelisted_remotes=site_params.EXTERNAL_REMOTES)
publish_mock.assert_called_once_with(new_manifest, version,
build_id=build_id)
init_mock.assert_called_once_with(my_info)
self.push_mock.assert_called_once_with(mock.ANY, mock.ANY, sync=False)
def testCreateNewCandidateReturnNoneIfNoWorkToDo(self):
"""Tests that we return nothing if there is nothing to create."""
new_manifest = 'some_manifest'
my_info = lkgm_manager._LKGMCandidateInfo('1.2.3')
# Patch out our RepoRepository to make sure we don't corrupt real repo.
cros_source_mock = self.PatchObject(self.manager, 'cros_source')
cros_source_mock.branch = 'master'
cros_source_mock.directory = '/foo/repo'
self.PatchObject(lkgm_manager.LKGMManager, 'CheckoutSourceCode')
self.PatchObject(lkgm_manager.LKGMManager, 'CreateManifest',
return_value=new_manifest)
self.PatchObject(lkgm_manager.LKGMManager, 'RefreshManifestCheckout')
self.PatchObject(lkgm_manager.LKGMManager, 'GetCurrentVersionInfo',
return_value=my_info)
init_mock = self.PatchObject(lkgm_manager.LKGMManager,
'InitializeManifestVariables')
self.PatchObject(lkgm_manager.LKGMManager, 'HasCheckoutBeenBuilt',
return_value=True)
candidate = self.manager.CreateNewCandidate()
self.assertEqual(candidate, None)
init_mock.assert_called_once_with(my_info)
def _CreateManifest(self):
"""Returns a created test manifest in tmpdir with its dir_pfx."""
self.manager.current_version = '1.2.4-rc21'
dir_pfx = CHROME_BRANCH
manifest = os.path.join(self.manager.manifest_dir,
self.manager.rel_working_dir, 'buildspecs',
dir_pfx, '1.2.4-rc21.xml')
osutils.Touch(manifest)
return manifest, dir_pfx
def _MockParseGitLog(self, fake_git_log, project):
exists_mock = self.PatchObject(os.path, 'exists', return_value=True)
link_mock = self.PatchObject(logging, 'PrintBuildbotLink')
fake_project_handler = mock.Mock(spec=git.Manifest)
fake_project_handler.checkouts_by_path = {project['path']: project}
self.PatchObject(git, 'Manifest', return_value=fake_project_handler)
fake_result = cros_build_lib.CommandResult(output=fake_git_log)
self.PatchObject(git, 'RunGit', return_value=fake_result)
return exists_mock, link_mock
def testAddChromeVersionToManifest(self):
"""Tests whether we can write the chrome version to the manifest file."""
with TemporaryManifest() as f:
chrome_version = '35.0.1863.0'
# Write the chrome element to manifest.
self.manager._AddChromeVersionToManifest(f.name, chrome_version)
# Read the manifest file.
new_doc = minidom.parse(f.name)
elements = new_doc.getElementsByTagName(lkgm_manager.CHROME_ELEMENT)
self.assertEqual(len(elements), 1)
self.assertEqual(
elements[0].getAttribute(lkgm_manager.CHROME_VERSION_ATTR),
chrome_version)
| {
"content_hash": "6da2cec79802a5e7e2d6e4e634542d21",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 80,
"avg_line_length": 38.565384615384616,
"alnum_prop": 0.6886406701904857,
"repo_name": "endlessm/chromium-browser",
"id": "16549dbd08d2d56a4a10f04f66ebbe40ed0ba848",
"size": "10221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/chromite/cbuildbot/lkgm_manager_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
unified_strdate,
)
class ZDFIE(InfoExtractor):
_VALID_URL = r'^https?://www\.zdf\.de/ZDFmediathek(?P<hash>#)?/(.*beitrag/(?:video/)?)(?P<video_id>[0-9]+)(?:/[^/?]+)?(?:\?.*)?'
_TEST = {
'url': 'http://www.zdf.de/ZDFmediathek/beitrag/video/2037704/ZDFspezial---Ende-des-Machtpokers--?bc=sts;stt',
'info_dict': {
'id': '2037704',
'ext': 'webm',
'title': 'ZDFspezial - Ende des Machtpokers',
'description': 'Union und SPD haben sich auf einen Koalitionsvertrag geeinigt. Aber was bedeutet das für die Bürger? Sehen Sie hierzu das ZDFspezial "Ende des Machtpokers - Große Koalition für Deutschland".',
'duration': 1022,
'uploader': 'spezial',
'uploader_id': '225948',
'upload_date': '20131127',
},
'skip': 'Videos on ZDF.de are depublicised in short order',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('video_id')
xml_url = 'http://www.zdf.de/ZDFmediathek/xmlservice/web/beitragsDetails?ak=web&id=%s' % video_id
doc = self._download_xml(
xml_url, video_id,
note='Downloading video info',
errnote='Failed to download video info')
title = doc.find('.//information/title').text
description = doc.find('.//information/detail').text
duration = int(doc.find('.//details/lengthSec').text)
uploader_node = doc.find('.//details/originChannelTitle')
uploader = None if uploader_node is None else uploader_node.text
uploader_id_node = doc.find('.//details/originChannelId')
uploader_id = None if uploader_id_node is None else uploader_id_node.text
upload_date = unified_strdate(doc.find('.//details/airtime').text)
def xml_to_format(fnode):
video_url = fnode.find('url').text
is_available = 'http://www.metafilegenerator' not in video_url
format_id = fnode.attrib['basetype']
format_m = re.match(r'''(?x)
(?P<vcodec>[^_]+)_(?P<acodec>[^_]+)_(?P<container>[^_]+)_
(?P<proto>[^_]+)_(?P<index>[^_]+)_(?P<indexproto>[^_]+)
''', format_id)
ext = format_m.group('container')
proto = format_m.group('proto').lower()
quality = fnode.find('./quality').text
abr = int(fnode.find('./audioBitrate').text) // 1000
vbr_node = fnode.find('./videoBitrate')
vbr = None if vbr_node is None else int(vbr_node.text) // 1000
width_node = fnode.find('./width')
width = None if width_node is None else int_or_none(width_node.text)
height_node = fnode.find('./height')
height = None if height_node is None else int_or_none(height_node.text)
format_note = ''
if not format_note:
format_note = None
return {
'format_id': format_id + '-' + quality,
'url': video_url,
'ext': ext,
'acodec': format_m.group('acodec'),
'vcodec': format_m.group('vcodec'),
'abr': abr,
'vbr': vbr,
'width': width,
'height': height,
'filesize': int_or_none(fnode.find('./filesize').text),
'format_note': format_note,
'protocol': proto,
'_available': is_available,
}
format_nodes = doc.findall('.//formitaeten/formitaet')
formats = list(filter(
lambda f: f['_available'],
map(xml_to_format, format_nodes)))
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'uploader': uploader,
'uploader_id': uploader_id,
'upload_date': upload_date,
'formats': formats,
} | {
"content_hash": "9a660b832f81403c8b076a2e2a031cc8",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 220,
"avg_line_length": 38.69444444444444,
"alnum_prop": 0.5340990667623834,
"repo_name": "Grassboy/plugin.video.plurkTrend",
"id": "3b1ac4e9f5246e268e0c0b49d64249196270e9d4",
"size": "4199",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "youtube_dl/extractor/zdf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1059158"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
} |
import time
import subprocess
SAVE_FILE = '/tmp/udmi_agent.applied'
class GitManager:
def __init__(self):
self._nonce = None
self._applied = None
def restore(self):
try:
current = self._exec('git rev-list -n 1 HEAD')
print('Current version', current)
print('Loading comitted version from %s' % SAVE_FILE)
with open(SAVE_FILE, 'r') as fd:
saved = fd.readlines()[0].strip()
if current != saved:
print('Comitted != saved, restoring', saved)
self.apply(saved)
else:
print('Comitted == saved', saved)
except Exception as e:
raise Exception('Error loading comitted version: %s' % str(e))
def _exec(self, cmd):
print('executing: %s' % cmd)
cmd_args = cmd.split(' ')
process = subprocess.run(cmd_args, capture_output=True, check=False)
if process.returncode:
print('execution failed: %s, %s, %s' % (
process.returncode, process.stdout, process.stderr))
message = process.stderr.decode('utf-8').strip()
raise Exception('Failed subshell execution: %s' % message)
return process.stdout.decode('utf-8').strip()
def steady(self, target):
if self._applied and target == self._nonce:
print('Target/nonce match, writing', self._applied, SAVE_FILE)
with open(SAVE_FILE, 'w') as fd:
fd.write(self._applied)
elif target:
print('Target/nonce mismatch', target, self._nonce)
result = self._exec('git describe')
print('HEAD description', result)
return result
def fetch(self, target):
self._exec('git fetch origin %s' % target)
result = self._exec('git rev-list -n 1 %s' % target)
print('Target rev', result)
return result
def apply(self, target):
self._nonce = None
self._applied = target
previous = self._exec('git rev-list -n 1 HEAD')
print(self._exec('git reset --hard %s' % target))
current = self._exec('git rev-list -n 1 HEAD')
if current != target:
print(self._exec('git reset --hard %s' % previous))
raise Exception('Target HEAD mismatch')
self._nonce = str(time.time())
print('Apply nonce', self._nonce)
return self._nonce
| {
"content_hash": "7cb7fed95d8d6d1e65e05a11bffae0c8",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 76,
"avg_line_length": 37.44615384615385,
"alnum_prop": 0.5579293344289236,
"repo_name": "faucetsdn/udmi",
"id": "70b17f1756062bd7f846c634743c4ea6b7739a01",
"size": "2434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agent/git_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1294"
},
{
"name": "HCL",
"bytes": "16883"
},
{
"name": "HTML",
"bytes": "15862"
},
{
"name": "Java",
"bytes": "495172"
},
{
"name": "JavaScript",
"bytes": "35643"
},
{
"name": "Python",
"bytes": "58201"
},
{
"name": "SCSS",
"bytes": "6256"
},
{
"name": "Shell",
"bytes": "79835"
},
{
"name": "Smarty",
"bytes": "4333"
},
{
"name": "TypeScript",
"bytes": "233396"
}
],
"symlink_target": ""
} |
import fnmatch
import re
import sys
from basename_ranker import BasenameRanker
from trace_event import *
class DBIndexShard(object):
def __init__(self, basenames):
reload(sys)
sys.setdefaultencoding('utf8')
# The basenames come out of a hashtable so they are usually pretty badly
# shuffled around. Sort them here so that we get somewhat predictable results
# as a query is incrementally refined.
basenames.sort()
# Build the lower basenames list, removing dupes as needed.
lower_basenames = set()
for basename in basenames:
lower_basename = basename.lower()
lower_basenames.add(lower_basename)
# Build two giant strings that contain all the basenames [and lowercase basenames]
# concatenated together. This is what we will use to handle fuzzy queries.
self.basenames_unsplit = (u"\n" + u"\n".join(basenames) + u"\n")
self.lower_basenames_unsplit = (u"\n" + u"\n".join(lower_basenames) + u"\n")
assert type(self.lower_basenames_unsplit) == unicode
self._basename_ranker = BasenameRanker()
wordstarts = {}
for basename in basenames:
start_letters = self._basename_ranker.get_start_letters(basename)
if len(start_letters) <= 1:
continue
lower_basename = basename.lower()
for i in range(len(start_letters) + 1 - 2): # abcd -> ab abc abcd
ws = ''.join(start_letters[0:2+i])
if ws not in wordstarts:
wordstarts[ws] = []
loss = len(start_letters) - (2 + i)
wordstarts[ws].append((lower_basename, loss))
# now, order the actual entries so high qualities are at front
self.basenames_by_wordstarts = {}
for ws,items in wordstarts.iteritems():
items.sort(lambda x,y: cmp(x[1],y[1]))
self.basenames_by_wordstarts[ws] = [i[0] for i in items]
@traced
def search_basenames(self, query):
"""
Searches index for basenames matching the query.
Returns (hits, truncated) where:
hits is an array of basenames that matched.
truncated is a bool indicated whether not all possible matches were found.
"""
lower_query = query.lower()
lower_hits = set()
max_hits_hint = 25
# add exact matches first
trace_begin("exact")
self.add_all_matching( lower_hits, query, self.get_exact_match_filter(lower_query), max_hits_hint )
trace_end("exact")
# add in word starts
trace_begin("wordstarts")
self.add_all_wordstarts_matching( lower_hits, query, max_hits_hint )
trace_end("wordstarts")
# add in substring matches
trace_begin("substrings")
self.add_all_matching( lower_hits, query, self.get_substring_filter(lower_query), max_hits_hint )
trace_end("substrings")
# add in superfuzzy matches ONLY if we have no high-quality hit
has_hq = False
for lower_hit in lower_hits:
rank = self._basename_ranker.rank_query(query, lower_hit)
if rank > 2:
has_hq = True
break
if not has_hq:
trace_begin("superfuzzy")
self.add_all_matching( lower_hits, query, self.get_superfuzzy_filter(lower_query), max_hits_hint )
trace_end("superfuzzy")
return lower_hits, len(lower_hits) == max_hits_hint
def add_all_wordstarts_matching( self, lower_hits, query, max_hits_hint ):
lower_query = query.lower()
if lower_query in self.basenames_by_wordstarts:
for basename in self.basenames_by_wordstarts[lower_query]:
lower_hits.add(basename)
if len(lower_hits) >= max_hits_hint:
return
def get_exact_match_filter(self, query):
query = re.escape(query.lower())
# abc -> abc(\..*)?
flt = "\n%s(?:\\..*)?\n" % query
return (flt, False)
def get_delimited_wordstart_filter(self, query):
query = [re.escape(query[i]) for i in range(len(query))]
# abc -> ^a.*_b.*_c
# abc -> .*_a.*_b.*_c
tmp = []
tmp.append("(?:(?:%s)|(?:.*_%s))" % (query[0], query[0]))
for i in range(1, len(query)):
c = query[i]
tmp.append("_%s" % query[i])
flt = "\n%s.*\n" % '.*'.join(tmp)
return (flt, False)
def get_camelcase_wordstart_filter(self, query):
query = query.upper()
query = [re.escape(query[i]) for i in range(len(query))]
# abc -> A.*B.*C
# .*[^A-Z]A.*
tmp = []
tmp.append("(?:(?:%s)|(?:.*[^A-Z\n]%s))" % (query[0], query[0]))
for i in range(1, len(query)):
tmp.append("[^A-Z\n]%s" % query[i])
flt = "\n.*%s.*\n" % '.*'.join(tmp)
return (flt, True)
def get_substring_filter(self, query):
query = re.escape(query.lower())
# abc -> *abc*
flt = "\n.*%s.*\n" % query
return (flt, False)
def get_superfuzzy_filter(self, query):
tmp = []
for i in range(len(query)):
tmp.append(re.escape(query[i]))
flt = "\n.*%s.*\n" % '.*'.join(tmp)
return (flt, False)
def add_all_matching(self, lower_hits, query, flt_tuple, max_hits_hint):
"""
lower_hits is the dictionary to put results in
query is the query string originally entered by user, used by ranking
flt_tuple is [filter_regex, case_sensitive_bool]
max_hits_hint is largest hits should grow before matching terminates.
"""
flt, case_sensitive = flt_tuple
regex = re.compile(flt)
base = 0
if not case_sensitive:
index = self.lower_basenames_unsplit
else:
index = self.basenames_unsplit
while True:
m = regex.search(index, base)
if m:
hit = m.group(0)[1:-1]
if hit.find('\n') != -1:
raise Exception("Somethign is messed up with flt=[%s] query=[%s] hit=[%s]" % (flt,query,hit))
if case_sensitive:
hit = hit.lower()
lower_hits.add(hit)
base = m.end() - 1
if len(lower_hits) >= max_hits_hint:
truncated = True
break
else:
break
| {
"content_hash": "83a8fc42d4d76ec9fab34281deda1db7",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 104,
"avg_line_length": 33.27428571428572,
"alnum_prop": 0.615146831530139,
"repo_name": "natduca/quickopen",
"id": "24242635572c0e2ef5525e4f26ad8992ec2f2c5f",
"size": "6399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/db_index_shard.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "13"
},
{
"name": "C++",
"bytes": "10"
},
{
"name": "CSS",
"bytes": "1370"
},
{
"name": "E",
"bytes": "453"
},
{
"name": "Emacs Lisp",
"bytes": "13092"
},
{
"name": "HTML",
"bytes": "919"
},
{
"name": "JavaScript",
"bytes": "9356"
},
{
"name": "Python",
"bytes": "312244"
},
{
"name": "Vim script",
"bytes": "2411"
}
],
"symlink_target": ""
} |
"""
Models the WGS raw sequence set object.
"""
import json
import logging
import os
import string
from itertools import count
from cutlass.iHMPSession import iHMPSession
from cutlass.Base import Base
from cutlass.aspera import aspera
from cutlass.Util import *
# pylint: disable=W0703, C1801
# Create a module logger named after the module
module_logger = logging.getLogger(__name__)
# Add a NullHandler for the case if no logging is configured by the application
module_logger.addHandler(logging.NullHandler())
class WgsRawSeqSet(Base):
"""
The class encapsulating the Wgs Raw Sequence Set data for an iHMP instance.
This class contains all the fields required to save a Wgs Raw Sequence Set
object in the OSDF instance.
Attributes:
namespace (str): The namespace this class will use in the OSDF instance
"""
namespace = "ihmp"
aspera_server = "aspera2.ihmpdcc.org"
def __init__(self, *args, **kwargs):
"""
Constructor for the WgsRawSeqSet class. This initializes the fields
specific to the WgsRawSeqSet class, and inherits from the Base class.
Args:
None
"""
self.logger = logging.getLogger(self.__module__ + '.' + self.__class__.__name__)
self.logger.addHandler(logging.NullHandler())
# These are common to all objects
self._id = None
self._version = None
self._links = {}
self._tags = []
# These are particular to WgsRawSeqSet objects
self._checksums = None
self._comment = None
self._exp_length = None
self._format = None
self._format_doc = None
self._local_file = None
self._seq_model = None
self._sequence_type = None
self._size = None
self._study = None
self._urls = ['']
# Optional properties
self._private_files = None
super(WgsRawSeqSet, self).__init__(*args, **kwargs)
def is_valid(self):
"""
Validates the current object's data/JSON against the current schema
in the OSDF instance for the specific object. However, unlike
validates(), this method does not provide exact error messages,
it states if the validation was successful or not.
Args:
None
Returns:
True if the data validates, False if the current state of
fields in the instance do not validate with OSDF or
other node requirements.
"""
self.logger.debug("In is_valid.")
problems = self.validate()
valid = True
if len(problems):
self.logger.error("There were %s problems.", len(problems))
valid = False
self.logger.debug("Valid? %s", str(valid))
return valid
@property
def checksums(self):
"""
str: One or more checksums used to ensure file integrity.
"""
self.logger.debug("In 'checksums' getter.")
return self._checksums
@checksums.setter
@enforce_dict
def checksums(self, checksums):
"""
The setter for the checksum data.
Args:
checksums (dict): The checksums for the data file.
Returns:
None
"""
self.logger.debug("In 'checksums' setter.")
self._checksums = checksums
@property
def comment(self):
"""
str: Free-text comment.
"""
self.logger.debug("In 'comment' getter.")
return self._comment
@comment.setter
@enforce_string
def comment(self, comment):
"""
The setter for the comment field. The comment must be a string.
Args:
comment (str): The new comment to add to the string.
Returns:
None
"""
self.logger.debug("In 'comment' setter.")
self._comment = comment
@property
def exp_length(self):
"""
int: The number of raw bases or color space calls expected for the read,
includes both mate pairs and all technical portions.
"""
self.logger.debug("In 'exp_length' getter.")
return self._exp_length
@exp_length.setter
@enforce_int
def exp_length(self, exp_length):
"""
The setter for the WgsRawSeqSet exp length.
Args:
exp_length (int): The new exp_length for the current instance.
Returns:
None
"""
self.logger.debug("In exp_length setter.")
if exp_length < 0:
raise ValueError("The 'exp_length' must be non-negative.")
self._exp_length = exp_length
@property
def format(self):
"""
str: The file format of the sequence file.
"""
self.logger.debug("In 'format' getter.")
return self._format
@format.setter
@enforce_string
def format(self, format_str):
"""
The setter for the WgsRawSeqSet format. This must be either fasta or fastq.
Args:
format_str (str): The new format string for the current object.
Returns:
None
"""
self.logger.debug("In 'format' setter.")
formats = ["fasta", "fastq"]
if format_str in formats:
self._format = format_str
else:
raise Exception("Format must be either fasta or fastq.")
@property
def format_doc(self):
"""
str: URL for documentation of file format.
"""
self.logger.debug("In 'format_doc' getter.")
return self._format_doc
@format_doc.setter
@enforce_string
def format_doc(self, format_doc):
"""
The setter for the WgsRawSeqSet format doc.
Args:
format_doc (str): The new format_doc for the current object.
Returns:
None
"""
self.logger.debug("In 'format_doc' setter.")
self._format_doc = format_doc
@property
def local_file(self):
"""
str: URL to the local file to upload to the server.
"""
self.logger.debug("In 'local_file' getter.")
return self._local_file
@local_file.setter
@enforce_string
def local_file(self, local_file):
"""
The setter for the WgsRawSeqSet local file.
Args:
local_file (str): The URL to the local file that should
be uploaded to the server.
Returns:
None
"""
self.logger.debug("In 'local_file' setter.")
self._local_file = local_file
@property
def private_files(self):
"""
bool: Whether this object describes private data that should not
be uploaded to the DCC. Defaults to false.
"""
self.logger.debug("In 'private_files' getter.")
return self._private_files
@private_files.setter
@enforce_bool
def private_files(self, private_files):
"""
The setter for the private files flag to denote this object
describes data that should not be uploaded to the DCC.
Args:
private_files (bool):
Returns:
None
"""
self.logger.debug("In 'private_files' setter.")
self._private_files = private_files
@property
def seq_model(self):
"""
str: Sequencing instrument model.
"""
self.logger.debug("In 'seq_model' getter.")
return self._seq_model
@seq_model.setter
@enforce_string
def seq_model(self, seq_model):
"""
The setter for the WgsRawSeqSet seq model.
Args:
seq_model (str): The new seq model.
Returns:
None
"""
self.logger.debug("In 'seq_model' setter.")
self._seq_model = seq_model
@property
def sequence_type(self):
"""
str: Specifies whether the file contains peptide or nucleotide data.
"""
self.logger.debug("In 'sequence_type' getter.")
return self._sequence_type
@sequence_type.setter
@enforce_string
def sequence_type(self, sequence_type):
"""
The setter for the WgsRawSeqSet sequence type. This must be either
peptide or nucleotide.
Args:
sequence_type (str): The new sequence type.
Returns:
None
"""
self.logger.debug("In 'sequence_type' setter.")
types = ["peptide", "nucleotide"]
if sequence_type in types:
self._sequence_type = sequence_type
else:
raise Exception("Sequence type must be either peptide or nucleotide")
@property
def size(self):
"""
int: The size of the file in bytes.
"""
self.logger.debug("In 'size' getter.")
return self._size
@size.setter
@enforce_int
def size(self, size):
"""
The setter for the WgsRawSeqSet size.
Args:
size (int): The size of the seq set in bytes.
Returns:
None
"""
self.logger.debug("In 'size' setter.")
if size < 0:
raise ValueError("The size must be non-negative.")
self._size = size
@property
def study(self):
"""
str: One of the 3 studies that are part of the iHMP.
"""
self.logger.debug("In 'study' getter.")
return self._study
@study.setter
@enforce_string
def study(self, study):
"""
The setter for the WgsRawSeqSet study. This is restricted to be either
preg_preterm, ibd, or prediabetes.
Args:
study (str): The study of the seq set.
Returns:
None
"""
self.logger.debug("In 'study' setter.")
studies = ["preg_preterm", "ibd", "prediabetes"]
if study in studies:
self._study = study
else:
raise Exception("Not a valid study")
@property
def urls(self):
"""
array: An array of URL from where the file can be obtained,
http, ftp, fasp, etc...
"""
self.logger.debug("In 'urls' getter.")
return self._urls
def validate(self):
"""
Validates the current object's data/JSON against the current
schema in the OSDF instance for that specific object. All required
fields for that specific object must be present.
Args:
None
Returns:
A list of strings, where each string is the error that the
validation raised during OSDF validation
"""
self.logger.debug("In validate.")
document = self._get_raw_doc()
session = iHMPSession.get_session()
self.logger.info("Got iHMP session.")
(valid, error_message) = session.get_osdf().validate_node(document)
problems = []
if not valid:
self.logger.info("Validation did not succeed for " + __name__ + ".")
problems.append(error_message)
if self._private_files:
self.logger.info("User specified the files are private.")
else:
self.logger.info("Data is NOT private, so check that local_file is set.")
if self._local_file is None:
problems.append("Local file is not yet set.")
elif not os.path.isfile(self._local_file):
problems.append("Local file does not point to an actual file.")
if 'sequenced_from' not in self._links.keys():
problems.append("Must add a 'sequenced_from' link.")
self.logger.debug("Number of validation problems: %s.", len(problems))
return problems
@staticmethod
def required_fields():
"""
A static method. The required fields for the class.
Args:
None
Returns:
None
"""
module_logger.debug("In required fields.")
return ("checksums", "comment", "exp_length", "format", "format_doc",
"seq_model", "size", "study", "tags", "urls")
def _get_raw_doc(self):
"""
Generates the raw JSON document for the current object. All required
fields are filled into the JSON document, regardless they are set or
not. Any remaining fields are included only if they are set. This
allows the user to visualize the JSON to ensure fields are set
appropriately before saving into the database.
Args:
None
Returns:
A dictionary representation of the JSON document.
"""
self.logger.debug("In _get_raw_doc.")
doc = {
'acl': {
'read': ['all'],
'write': [WgsRawSeqSet.namespace]
},
'linkage': self._links,
'ns': WgsRawSeqSet.namespace,
'node_type': 'wgs_raw_seq_set',
'meta': {
"checksums": self._checksums,
"comment": self._comment,
"exp_length": self._exp_length,
"format": self._format,
"format_doc": self._format_doc,
"seq_model": self.seq_model,
"size": self._size,
"study": self._study,
"urls": self._urls,
"subtype": "wgs",
'tags': self._tags
}
}
if self._id is not None:
self.logger.debug("Object has the OSDF id set.")
doc['id'] = self._id
if self._version is not None:
self.logger.debug("Object has the OSDF version set.")
doc['ver'] = self._version
# Handle optional properties
if self._sequence_type is not None:
self.logger.debug("Object has the 'sequence_type' set.")
doc['meta']['sequence_type'] = self._sequence_type
if self._private_files is not None:
self.logger.debug("Object has the 'private_files' property set.")
doc['meta']['private_files'] = self._private_files
return doc
@staticmethod
def search(query="\"wgs_raw_seq_set\"[node_type]"):
"""
Searches the OSDF database through all WgsRawSeqSet node types. Any
criteria the user wishes to add is provided by the user in the query
language specifications provided in the OSDF documentation. A general
format is (including the quotes and brackets):
"search criteria"[field to search]
If there are any results, they are returned as a WgsRawSeqSet instance,
otherwise an empty list will be returned.
Args:
query (str): The query for the OSDF framework. Defaults to the
WgsRawSeqSet node type.
Returns:
Returns an array of WgsRawSeqSet objects. It returns an empty list
if there are no results.
"""
module_logger.debug("In search.")
session = iHMPSession.get_session()
module_logger.info("Got iHMP session.")
if query != '"wgs_raw_seq_set"[node_type]':
query = '({}) && "wgs_raw_seq_set"[node_type]'.format(query)
module_logger.debug("Submitting OQL query: %s", query)
wgsRawSeqSet_data = session.get_osdf().oql_query("ihmp", query)
all_results = wgsRawSeqSet_data['results']
result_list = list()
if len(all_results) > 0:
for i in all_results:
wgsRawSeqSet_result = WgsRawSeqSet.load_wgsRawSeqSet(i)
result_list.append(wgsRawSeqSet_result)
return result_list
@staticmethod
def load_wgsRawSeqSet(seq_set_data):
"""
Takes the provided JSON string and converts it to a WgsRawSeqSet
object
Args:
seq_set_data (str): The JSON string to convert
Returns:
Returns a WgsRawSeqSet instance.
"""
module_logger.info("Creating a template " + __name__ + ".")
seq_set = WgsRawSeqSet()
module_logger.debug("Filling in " + __name__ + " details.")
# The attributes commmon to all iHMP nodes
seq_set._set_id(seq_set_data['id'])
seq_set.version = seq_set_data['ver']
seq_set.links = seq_set_data['linkage']
# The attributes that are particular to WgsRawSeqSet documents
seq_set.checksums = seq_set_data['meta']['checksums']
seq_set.comment = seq_set_data['meta']['comment']
seq_set.exp_length = seq_set_data['meta']['exp_length']
seq_set.format = seq_set_data['meta']['format']
seq_set.format_doc = seq_set_data['meta']['format_doc']
seq_set.seq_model = seq_set_data['meta']['seq_model']
seq_set.size = seq_set_data['meta']['size']
seq_set.study = seq_set_data['meta']['study']
seq_set.tags = seq_set_data['meta']['tags']
seq_set._urls = seq_set_data['meta']['urls']
# Optional fields
if 'sequence_type' in seq_set_data['meta']:
seq_set.sequence_type = seq_set_data['meta']['sequence_type']
if 'private_files' in seq_set_data['meta']:
seq_set.private_files = seq_set_data['meta']['private_files']
module_logger.debug("Returning loaded %s.", __name__)
return seq_set
@staticmethod
def load(seq_set_id):
"""
Loads the data for the specified input ID from the OSDF instance to
this object. If the provided ID does not exist, then an error message
is provided stating the project does not exist.
Args:
seq_set_id (str): The OSDF ID for the document to load.
Returns:
A WgsRawSeqSet object with all the available OSDF data loaded into
it.
"""
module_logger.debug("In load. Specified ID: %s", seq_set_id)
session = iHMPSession.get_session()
module_logger.info("Got iHMP session.")
seq_set_data = session.get_osdf().get_node(seq_set_id)
seq_set = WgsRawSeqSet.load_wgsRawSeqSet(seq_set_data)
module_logger.debug("Returning loaded %s.", __name__)
return seq_set
def _upload_data(self):
self.logger.debug("In _upload_data.")
session = iHMPSession.get_session()
study = self._study
study2dir = {
"ibd": "ibd",
"preg_preterm": "ptb",
"prediabetes": "t2d"
}
if study not in study2dir:
raise ValueError("Invalid study. No directory mapping for %s" % study)
study_dir = study2dir[study]
remote_base = os.path.basename(self._local_file)
valid_chars = "-_.%s%s" % (string.ascii_letters, string.digits)
remote_base = ''.join(c for c in remote_base if c in valid_chars)
remote_base = remote_base.replace(' ', '_') # No spaces in filenames
remote_path = "/".join(["/" + study_dir, "genome", "microbiome", "wgs",
"raw", remote_base])
self.logger.debug("Remote path for this file will be %s.", remote_path)
# Upload the file to the iHMP aspera server
upload_result = aspera.upload_file(WgsRawSeqSet.aspera_server,
session.username,
session.password,
self._local_file,
remote_path)
if not upload_result:
self.logger.error("Experienced an error uploading the data. " + \
"Aborting save.")
raise Exception("Unable to upload WGS raw sequence set")
else:
self._urls = ["fasp://" + WgsRawSeqSet.aspera_server + remote_path]
def save(self):
"""
Saves the data in OSDF. The JSON form of the current data for the
instance is validated in the save function. If the data is not valid,
then the data will not be saved. If the instance was saved previously,
then the node ID is assigned the alpha numeric found in the OSDF
instance. If not saved previously, then the node ID is 'None', and upon
a successful save, will be assigned to thealpha numeric ID found in
OSDF.
Args:
None
Returns;
True if successful, False otherwise.
"""
self.logger.debug("In save.")
if not self.is_valid():
self.logger.error("Cannot save, data is invalid")
return False
session = iHMPSession.get_session()
self.logger.info("Got iHMP session.")
success = False
if self._private_files:
self._urls = ["<private>"]
else:
try:
self._upload_data()
except Exception as upload_exception:
self.logger.exception(upload_exception)
# Don't bother continuing...
return False
osdf = session.get_osdf()
if self._id is None:
self.logger.info("About to insert a new %s OSDF node.", __name__)
# Get the JSON form of the data and load it
self.logger.debug("Converting %s to parsed JSON form.", __name__)
data = json.loads(self.to_json())
try:
self.logger.info("Attempting to save a new node.")
node_id = osdf.insert_node(data)
self._set_id(node_id)
self._version = 1
self.logger.info("Save for %s %s successful.", __name__, node_id)
self.logger.info("Setting ID for %s %s.", __name__, node_id)
success = True
except Exception as save_exception:
self.logger.exception(save_exception)
self.logger.error("An error occurred while saving %s. " + \
"Reason: %s", __name__, save_exception)
else:
self.logger.info("%s already has an ID, so we do an update (not an insert).", __name__)
try:
seq_set_data = self._get_raw_doc()
seq_set_id = self._id
self.logger.info("Attempting to update %s with ID: %s.", __name__, seq_set_id)
osdf.edit_node(seq_set_data)
self.logger.info("Update for %s %s successful.", __name__, seq_set_id)
seq_set_data = osdf.get_node(seq_set_id)
latest_version = seq_set_data['ver']
self.logger.debug("The version of this %s is now %s", __name__, str(latest_version))
self._version = latest_version
success = True
except Exception as edit_exception:
self.logger.exception(edit_exception)
self.logger.error("An error occurred while updating %s %s. " + \
"Reason: %s", __name__, self._id,
edit_exception
)
self.logger.debug("Returning " + str(success))
return success
def viral_seq_sets(self):
"""
Returns an iterator of all ViralSeqSet nodes connected to this object.
"""
self.logger.debug("In viral_seq_sets().")
linkage_query = '"{}"[linkage.computed_from]'.format(self.id)
query = iHMPSession.get_session().get_osdf().oql_query
from cutlass.ViralSeqSet import ViralSeqSet
for page_no in count(1):
res = query(WgsRawSeqSet.namespace, linkage_query, page=page_no)
res_count = res['result_count']
for doc in res['results']:
yield ViralSeqSet.load_viral_seq_set(doc)
res_count -= len(res['results'])
if res_count < 1:
break
| {
"content_hash": "c876d3c3889424498875cc748ac360a2",
"timestamp": "",
"source": "github",
"line_count": 790,
"max_line_length": 100,
"avg_line_length": 30.29367088607595,
"alnum_prop": 0.5567858933645329,
"repo_name": "carze/cutlass",
"id": "0c71d078ab1a3767e72eb2084971009ac6709999",
"size": "23932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cutlass/WgsRawSeqSet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1327389"
}
],
"symlink_target": ""
} |
import os
import setuptools
setuptools.setup(
name='climate',
version='0.4.6',
packages=setuptools.find_packages(),
author='Leif Johnson',
author_email='leif@lmjohns3.com',
description='Command-line utilities',
long_description=open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.rst')).read(),
license='MIT',
url='http://github.com/lmjohns3/py-cli/',
keywords=('command-line '
'logging '
'arguments '
),
install_requires=['plac'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Utilities',
],
)
| {
"content_hash": "3326ca1506744bf99d51deae3f012b28",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 105,
"avg_line_length": 31.724137931034484,
"alnum_prop": 0.5902173913043478,
"repo_name": "lmjohns3/py-cli",
"id": "88b5e90bb9828fd3576a39a636e1ccc5cd24f823",
"size": "920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9650"
}
],
"symlink_target": ""
} |
def print_string(str):
print(str) | {
"content_hash": "a288f2621f790b5e08d963bb96b5d869",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 22,
"avg_line_length": 17.5,
"alnum_prop": 0.7142857142857143,
"repo_name": "zzcclp/carbondata",
"id": "aff3d818853e51ab53ce792982a9c91cc7f21ec1",
"size": "36",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/pycarbon/tests/im/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "16022"
},
{
"name": "Batchfile",
"bytes": "1639"
},
{
"name": "C#",
"bytes": "86"
},
{
"name": "C++",
"bytes": "110888"
},
{
"name": "CMake",
"bytes": "1555"
},
{
"name": "Java",
"bytes": "7859129"
},
{
"name": "Python",
"bytes": "368778"
},
{
"name": "Scala",
"bytes": "12011736"
},
{
"name": "Shell",
"bytes": "7259"
},
{
"name": "Thrift",
"bytes": "23385"
}
],
"symlink_target": ""
} |
from scrapy.item import Item, Field
class AnalyticsCounterItem(Item):
# define the fields for your item here like:
# name = Field()
pass
| {
"content_hash": "6520d7b2f458fe92a378dae710a85826",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 48,
"avg_line_length": 25,
"alnum_prop": 0.7,
"repo_name": "ikeikeikeike/analytics-counter",
"id": "537b6b2a0c68833b7b47a7fdee530d1b71ca0dc0",
"size": "277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analytics_counter/analytics_counter/items.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22075"
}
],
"symlink_target": ""
} |
from xd.tool.layer import *
from case import *
import os
import configparser
class ManifestStub(object):
def __init__(self, topdir, priority=None):
self.topdir = topdir
if priority is None:
self.priority = {}
else:
self.priority = priority
def get_priority(self, layer):
return self.priority.get(layer)
class tests(TestCase):
def test_init_no_priority(self):
os.mkdir('layer')
config = configparser.ConfigParser()
config.add_section('layer')
with open(os.path.join('layer', '.xd'), 'w') as f:
config.write(f)
manifest = ManifestStub(self.testdir)
layer = Layer(manifest, 'layer')
def test_init_not_a_layer_1(self):
os.mkdir('layer')
config = configparser.ConfigParser()
with open(os.path.join('layer', '.xd'), 'w') as f:
config.write(f)
manifest = ManifestStub(self.testdir)
with self.assertRaises(NotALayer):
layer = Layer(manifest, 'layer')
def test_init_not_a_layer_2(self):
os.mkdir('layer')
manifest = ManifestStub(self.testdir)
with self.assertRaises(NotALayer):
layer = Layer(manifest, 'layer')
def test_init_manifest_layer(self):
config = configparser.ConfigParser()
config.add_section('layer')
with open('.xd', 'w') as f:
config.write(f)
manifest = ManifestStub(self.testdir)
layer = Layer(manifest, '.')
def test_priority(self):
os.mkdir('layer')
config = configparser.ConfigParser()
config.add_section('layer')
config['layer']['priority'] = '10'
with open(os.path.join('layer', '.xd'), 'w') as f:
config.write(f)
manifest = ManifestStub(self.testdir)
layer = Layer(manifest, 'layer')
self.assertEqual(layer.priority(), 10)
def test_no_priority(self):
os.mkdir('layer')
config = configparser.ConfigParser()
config.add_section('layer')
with open(os.path.join('layer', '.xd'), 'w') as f:
config.write(f)
manifest = ManifestStub(self.testdir)
layer = Layer(manifest, 'layer')
self.assertIsNone(layer.priority())
| {
"content_hash": "edc3c17c5e324917d8a75f60b1683ace",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 58,
"avg_line_length": 30.756756756756758,
"alnum_prop": 0.5852372583479789,
"repo_name": "esben/xd-tool",
"id": "7e2994306973bc09df0618e5e78905429b95aba0",
"size": "2276",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/layer_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42858"
},
{
"name": "Shell",
"bytes": "105"
}
],
"symlink_target": ""
} |
import logging
import os
from ctypes import CDLL
import pytest
from py4j.java_gateway import JVMView
from py4j.java_gateway import JavaGateway
from conftest import skipif_no_pythonnet
from conftest import skipif_no_server32
from msl.examples.loadlib import Cpp64
from msl.examples.loadlib import EXAMPLES_DIR
from msl.loadlib import DEFAULT_EXTENSION
from msl.loadlib import IS_PYTHON_64BIT
from msl.loadlib import LoadLibrary
from msl.loadlib.load_library import DotNet
from msl.loadlib.utils import logger
suffix = '64' if IS_PYTHON_64BIT else '32'
def test_raises():
with pytest.raises(OSError, match=r'^Cannot find'):
with LoadLibrary('doesnotexist'):
pass
path = os.path.join(EXAMPLES_DIR, 'Trig.class')
with pytest.raises(ValueError, match=r'^Cannot load libtype'):
with LoadLibrary(path, libtype='invalid'):
pass
def test_cpp():
path = os.path.join(EXAMPLES_DIR, 'cpp_lib' + suffix + DEFAULT_EXTENSION)
with LoadLibrary(path) as library:
assert library.assembly is None
assert library.gateway is None
assert library.path == path
assert isinstance(library.lib, CDLL)
assert library.lib.add(1, 2) == 3
assert library.path == path
assert library.assembly is None
assert library.gateway is None
assert library.lib is None
# can still call this (even multiple times)
for _ in range(10):
library.cleanup()
assert 'libtype=NoneType' in str(library)
assert 'libtype=NoneType' in repr(library)
@skipif_no_pythonnet
def test_dotnet():
path = os.path.join(EXAMPLES_DIR, 'dotnet_lib' + suffix + '.dll')
with LoadLibrary(path, libtype='net') as library:
assert isinstance(library.assembly, library.lib.System.Reflection.Assembly)
assert library.assembly is not None
assert library.gateway is None
assert library.path == path
assert isinstance(library.lib, DotNet)
assert library.lib.DotNetMSL.BasicMath().add_integers(1, 2) == 3
assert library.path == path
assert library.assembly is None
assert library.gateway is None
assert library.lib is None
# can still call this (even multiple times)
for _ in range(10):
library.cleanup()
assert 'libtype=NoneType' in str(library)
assert 'libtype=NoneType' in repr(library)
def test_java(caplog):
caplog.set_level(logging.DEBUG, logger.name)
path = os.path.join(EXAMPLES_DIR, 'Trig.class')
with LoadLibrary(path) as library:
assert library.assembly is None
assert isinstance(library.gateway, JavaGateway)
assert library.path == path
assert isinstance(library.lib, JVMView)
assert library.lib.Trig.cos(0.0) == 1.0
assert library.path == path
assert library.assembly is None
assert library.gateway is None
assert library.lib is None
record = caplog.records[0]
assert record.levelname == 'DEBUG'
assert record.msg == 'Loaded %s'
record = caplog.records[1]
assert record.levelname == 'DEBUG'
assert record.msg == 'shutdown Py4J.GatewayServer'
# can still call this (even multiple times)
for _ in range(10):
library.cleanup()
assert 'libtype=NoneType' in str(library)
assert 'libtype=NoneType' in repr(library)
@skipif_no_server32
def test_client():
with Cpp64() as cpp:
assert cpp.connection is not None
assert cpp.add(1, -1) == 0
assert cpp.connection is None
# can still call this (even multiple times)
for _ in range(10):
out, err = cpp.shutdown_server32()
assert out.closed
assert err.closed
with Cpp64() as cpp:
out, err = cpp.shutdown_server32()
assert not out.closed
assert not err.closed
assert out.read() == b''
assert err.read() == b''
out, err = cpp.shutdown_server32()
assert out.closed
assert err.closed
assert 'lib=None address=None' in str(cpp)
assert 'lib=None address=None' in repr(cpp)
| {
"content_hash": "74fa35a6e511ea8ea2175024769fcbef",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 83,
"avg_line_length": 30.770992366412212,
"alnum_prop": 0.6772513024063508,
"repo_name": "MSLNZ/msl-loadlib",
"id": "49aab8fcf2e0a892dc59a023f48bfe886107e6ed",
"size": "4031",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_context_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1669"
},
{
"name": "C#",
"bytes": "5516"
},
{
"name": "C++",
"bytes": "2630"
},
{
"name": "Fortran",
"bytes": "4961"
},
{
"name": "Java",
"bytes": "15092"
},
{
"name": "Python",
"bytes": "286889"
}
],
"symlink_target": ""
} |
from datetime import datetime
from django.db import models
from pybaulicht import BaulichtDbus
bus = BaulichtDbus()
class Message(models.Model):
created = models.DateTimeField(auto_now_add=True)
created_ip = models.GenericIPAddressField(null=True)
name = models.CharField(blank=True, max_length=32, default="")
email = models.EmailField(blank=True, max_length=128, default="")
text = models.TextField(max_length=512)
dbus_path = models.CharField(blank=True, default="", max_length=32)
accepted = models.NullBooleanField(null=True, default=None)
is_active = models.NullBooleanField(null=True, default=None)
activated_at = models.DateTimeField(null=True)
def send_to_dbus(self):
if not self.dbus_path:
self.dbus_path = bus.add_text(self.text)
self.activated_at = datetime.now()
self.accepted = True
self.is_active = True
self.save()
def remove_from_dbus(self):
if self.dbus_path:
bus.remove_text(self.dbus_path)
self.is_active = False
self.save()
| {
"content_hash": "a2795254af012b540f4ee33cde09defe",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 71,
"avg_line_length": 32.61764705882353,
"alnum_prop": 0.6591523895401262,
"repo_name": "elnappo/Baulicht",
"id": "99c6777a9c7087714c92090d51b65531a68437db",
"size": "1109",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Webinterface/main/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "21843"
},
{
"name": "CSS",
"bytes": "1444"
},
{
"name": "Groovy",
"bytes": "1145"
},
{
"name": "Java",
"bytes": "21611"
},
{
"name": "Python",
"bytes": "14729"
},
{
"name": "Shell",
"bytes": "141"
}
],
"symlink_target": ""
} |
import optparse
from locale import gettext as _
from gi.repository import Gtk # pylint: disable=E0611
from malstor import MalstorWindow
from malstor_lib import set_up_logging, get_version
def parse_options():
"""Support for command line options"""
parser = optparse.OptionParser(version="%%prog %s" % get_version())
parser.add_option(
"-v", "--verbose", action="count", dest="verbose",
help=_("Show debug messages (-vv debugs malstor_lib also)"))
(options, args) = parser.parse_args()
set_up_logging(options)
def main():
'constructor for your class instances'
parse_options()
# Run the application.
window = MalstorWindow.MalstorWindow()
window.show()
Gtk.main()
| {
"content_hash": "27af759829da12b1d24b459ef6c7b99a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 71,
"avg_line_length": 26.25,
"alnum_prop": 0.673469387755102,
"repo_name": "cfossace/test",
"id": "3e7d0c802f0efa77bbe1891cb12fdaf145cd10f7",
"size": "881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "malstor/malstor/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8694"
},
{
"name": "CSS",
"bytes": "390510"
},
{
"name": "HTML",
"bytes": "475647"
},
{
"name": "JavaScript",
"bytes": "3515690"
},
{
"name": "Makefile",
"bytes": "5580"
},
{
"name": "Python",
"bytes": "2316851"
},
{
"name": "Shell",
"bytes": "32614"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Client'
db.create_table(u'freshbooks_client', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('freshbooks_id', self.gf('django.db.models.fields.IntegerField')()),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('organization', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75)),
('username', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('work_phone', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('home_phone', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('mobile', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('fax', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('p_street1', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('p_street2', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('p_city', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('p_state', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('p_country', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('p_code', self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True)),
('s_street1', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('s_street2', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('s_city', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('s_country', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, blank=True)),
('s_code', self.gf('django.db.models.fields.CharField')(max_length=16, null=True, blank=True)),
('url', self.gf('django.db.models.fields.CharField')(max_length=256, null=True, blank=True)),
))
db.send_create_signal(u'freshbooks', ['Client'])
def backwards(self, orm):
# Deleting model 'Client'
db.delete_table(u'freshbooks_client')
models = {
u'freshbooks.client': {
'Meta': {'object_name': 'Client'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'freshbooks_id': ('django.db.models.fields.IntegerField', [], {}),
'home_phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'p_city': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'p_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'p_country': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'p_state': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'p_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'p_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
's_city': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
's_code': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
's_country': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
's_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
's_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'work_phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['freshbooks'] | {
"content_hash": "64cde14da830aac857382a5f71439bdf",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 126,
"avg_line_length": 80.55844155844156,
"alnum_prop": 0.5845558600677092,
"repo_name": "unomena/django-freshbooks",
"id": "90e529643cdc40aeee41caabb9587733309ec9b5",
"size": "6227",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "freshbooks/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61253"
}
],
"symlink_target": ""
} |
import requests
import re
import time
import datetime
import settings
from selenium import webdriver
from w3lib.html import remove_tags
from lxml import etree
from raven import Client
from lib.tools.spider_tools import get_one_ua
from control.db.api.account_api import DBAccountApi
from control.db.api.article_api import DBArticleApi
from control.db.api.image_api import DBImageApi
from lib.tools.custom_exception import CustomException
from lib.tools.log_mgr import get_logger
from lib.mp.base import Base
from control.db.redis_mgr import RedisApi
class Twitter(Base):
logger = get_logger()
rs = RedisApi()
login_url = 'https://twitter.com/login'
post_url = 'https://twitter.com/sessions'
home_url = 'https://twitter.com/home'
user_url = 'https://twitter.com/i/search/typeahead.json?count=1200&media_tagging_in_prefetch=true&prefetch=true&result_type=users&users_cache_age=-1'
create_url = 'https://twitter.com/i/tweet/create'
mp_id = 10
zh_name = 'Twitter'
@staticmethod
def login(user, pswd, **kw):
_session = requests.session()
token, rf, cookies = Twitter._get_values()
_session.cookies = requests.cookies.cookiejar_from_dict(cookies)
_session.headers.update({
'User-Agent': get_one_ua(0), 'Referer': 'https://twitter.com/'})
post_data = {
'session[username_or_email]': user,
'session[password]': pswd,
'authenticity_token': token,
'ui_metrics': rf,
'scribe_log': '',
'redirect_after_login': '',
'authenticity_token': token,
'remember_me': 1,
}
print(post_data)
_session.post(Twitter.post_url, data=post_data, allow_redirects=False, timeout=30)
cookies = _session.cookies.get_dict()
_session.get(Twitter.home_url, allow_redirects=False)
try:
resp = _session.get(Twitter.user_url)
if resp.status_code == 200:
name = resp.json()['users'][0]['name']
Twitter.logger.info(name)
return cookies, name
except:
raise Exception('登录失败')
@staticmethod
def _get_values():
opt = webdriver.ChromeOptions()
opt.add_argument("--no-sandbox")
opt.add_argument('user-agent={}'.format(get_one_ua(0)))
opt.set_headless()
chrome = webdriver.Chrome(options=opt)
chrome.get(Twitter.login_url)
resp = chrome.page_source
token = re.compile(r'<input type="hidden" value="(.*?)" name="authenticity_token"', re.S).findall(resp)
rf = re.compile(r'<input type="hidden" name="ui_metrics" autocomplete="off" value="(.*?)" />', re.S).findall(
resp)
if token:
token = token[0]
else:
chrome.quit()
raise Exception('token 获取失败')
if rf:
rf = rf[0]
rf = rf.replace('"', '"')
# print(json.loads(rt))
else:
chrome.quit()
raise Exception('rf获取错')
cc = chrome.get_cookies()
cookies = {}
for s in cc:
cookies[s['name']] = s['value']
cookies['app_shell_visited'] = '1'
cookies['path'] = '/'
cookies['max-age'] = '5'
chrome.quit()
return token, rf, cookies
def publish(self, content):
self.session.headers['Referer'] = 'https://twitter.com/'
result = re.compile(r'<img.*?src="(.*?)".*?>', re.S).findall(content) # 查找img
media_id_list = []
if len(result) > 4:
raise Exception('图片不能超过4张')
for item in result:
res = requests.get(item)
img_data = res.content
s = self._upload_img(img_data)
media_id_list.append(str(s))
media_id = ','.join(media_id_list)
self.logger.info(media_id)
# 剔除标签
content = remove_tags(content)
# if len(content.encode(encoding='gbk')) > 280:
# raise Exception('文章内容已超过最大长度')
data = {
"authenticity_token": "285baf78b4de28459a94ec0c0a3c5fa3f62fdfd5",
"batch_mode": "off",
"is_permalink_page": "false",
"media_ids": media_id,
"place_id": "",
"status": content,
"tagged_users": "",
}
if not media_id_list:
del data['tagged_users']
del data['media_ids']
resp = self.session.post(
self.create_url, data=data, timeout=30)
resp_json = resp.json()
try:
tweet_id = resp_json['tweet_id']
return 2, '', tweet_id
except Exception as e:
cause = resp_json['message']
self.logger.error(e)
return 3, cause, ''
def _upload_img(self, img_data):
b_img_l = len(img_data)
url = 'https://upload.twitter.com/i/media/upload.json?command=INIT&total_bytes={}&media_type=image%2Fjpeg&media_' \
'category=tweet_image'.format(b_img_l)
data = {
"command": "INIT",
"total_bytes": b_img_l,
"media_type": "image/jpeg",
"media_category": "tweet_image",
}
resp = self.session.post(url, data=data, timeout=30).json() # 获取图id
media_id = resp['media_id']
up_url = 'https://upload.twitter.com/i/media/upload.json?command=APPEND&media_id={}&segment_index=0'.format(
media_id)
file = {
'media': ('blob', img_data, 'image/jpeg', {
'Content-Type': 'application/octet-stream'
})}
# 上传
self.session.post(up_url, files=file)
self.session.post(
'https://upload.twitter.com/i/media/upload.json?command=FINALIZE&media_id='.format(media_id),
data={
'command': 'FINALIZE',
'media_id': media_id
}, timeout=30)
return media_id
def upload_image(self, image_name, image_data):
return ''
def fetch_article_status(self, mp_article_id):
resp = self.session.get(self.user_url, timeout=30)
resp_json = resp.json()
screen_name = resp_json['users'][0]['screen_name']
url = 'https://twitter.com/{}/status/{}?conversation_id={}'.format(screen_name, mp_article_id, mp_article_id)
return 4, '', url
def query_article_data(self, mp_article_id):
resp = self.session.get(self.user_url, timeout=30)
resp_json = resp.json()
screen_name = resp_json['users'][0]['screen_name']
url = 'https://twitter.com/{}/status/{}?conversation_id={}'.format(screen_name, mp_article_id, mp_article_id)
self.session.headers['Accept'] = 'application/json, text/javascript, */*; q=0.01'
self.session.headers['Referer'] = 'https://twitter.com/'
resp = self.session.get(url, timeout=30)
html = etree.HTML(resp.text)
datas = html.xpath('//span[@class="ProfileTweet-actionCountForAria"]/text()')
if not datas:
return ''
reads = dict(
comment_num=int(datas[0][0]),
follow_num=int(datas[1][0]),
like_num=int(datas[2][0])
)
return reads
def check_user_cookies(self):
try:
resp = self.session.get(self.user_url, timeout=30)
resp_json = resp.json()
screen_name = resp_json['users'][0]['screen_name']
if screen_name:
return True
else:
self.logger.error('Twitter 规则改变')
return False
except:
return False
if __name__ == '__main__':
from control.db.api.account_api import DBAccountApi
from lib.tools.log_mgr import Flogger
Flogger()
db_account_api = DBAccountApi()
account = db_account_api.query(uid=1, mp_id=10)
if account:
account = account[0]
tw = Twitter(account)
title = '1082156664759869440'
content = 'Hello, this is my talk'
resp = tw.query_article_data(title)
print(resp)
# tw.publish(title, content, 1, flag=1)
# user = '837253132@qq.com'
# pswd = 'qiaolang521'
# # # cookies = Twitter.login(user, pswd)
# # # print(cookies)
# title = '233333'
# # content = 'Hello, this is an beautiful boy1'
# # flag = 3
# # category = '教育'
# tw = Twitter()
# # tw.publish(title, content, category, flag)
# # tw.fetch_article_status(1)
# tw.fetch_article_status('1078594593367547904')
| {
"content_hash": "ed204d8ff8cb13596a1b7b4525e218fc",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 153,
"avg_line_length": 36.60944206008584,
"alnum_prop": 0.5624853458382181,
"repo_name": "Svolcano/python_exercise",
"id": "4e8327bbc0d5e601253e4514ece909abe92548b3",
"size": "8622",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spiders/twitter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "732"
},
{
"name": "JavaScript",
"bytes": "157614"
},
{
"name": "Python",
"bytes": "3292561"
},
{
"name": "Shell",
"bytes": "1417"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('topthree', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='categories',
old_name='category1',
new_name='category_1',
),
migrations.RenameField(
model_name='categories',
old_name='category2',
new_name='category_2',
),
migrations.RenameField(
model_name='categories',
old_name='category3',
new_name='category_3',
),
]
| {
"content_hash": "afd44cd2b3953110d65d067e9d0b4984",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 39,
"avg_line_length": 23.357142857142858,
"alnum_prop": 0.5336391437308868,
"repo_name": "vipulkanade/EventbriteDjango",
"id": "bf88d32da996cd73aa8a99a79d8c9db53bc3af73",
"size": "726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src_eventbrite_django/topthree/migrations/0002_auto_20160125_0354.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "469417"
},
{
"name": "C++",
"bytes": "2005"
},
{
"name": "CSS",
"bytes": "278453"
},
{
"name": "HTML",
"bytes": "159434"
},
{
"name": "JavaScript",
"bytes": "518191"
},
{
"name": "Makefile",
"bytes": "69338"
},
{
"name": "Python",
"bytes": "8532178"
},
{
"name": "Shell",
"bytes": "17841"
}
],
"symlink_target": ""
} |
import os
import cattle
import pytest
import gdapi
from docker import Client
from docker.utils import kwargs_from_env
ADMIN_HEADERS = dict(gdapi.HEADERS)
ADMIN_HEADERS['X-API-Project-Id'] = 'USER'
class CattleConfig(object):
def __init__(self, url_env):
self.url_env = url_env
def _get_client(self):
client = cattle.from_env(url=self.cattle_url(),
cache=False,
headers=ADMIN_HEADERS)
assert client.valid()
return client
def cattle_url(self):
return os.environ.get(self.url_env)
def _get_setting(self, setting):
client = self._get_client()
return client.by_id_setting('1as!{0}'.format(setting))['activeValue']
def assert_setting(self, setting, value):
configured_setting = self._get_setting(setting)
assert configured_setting == value
class DockerContainerTester(object):
def __init__(self, assert_hostname=True):
kwargs = kwargs_from_env(assert_hostname=assert_hostname)
self.client = Client(**kwargs)
self.containers = self._load_containers()
def _load_containers(self):
data = {}
for container in self.client.containers():
name = container['Names'][0][1:]
data[name] = container
return data
def _get_container(self, container_name):
'''
This checks the objects container cache, refreshes
if it does not exist. Then tries to return again.
'''
container = self.containers.get(container_name, None)
if container is not None:
return container
else:
self.containers = self._load_containers()
return self.containers.get(container_name, None)
def _get_process_commands(self, container_name):
# for simplicity at this moment... I know user will be important
commands = []
container = self._get_container(container_name)
if container is not None:
top = self.client.top(container['Id'])
try:
idx = top['Titles'].index("COMMAND")
except ValueError:
idx = top['Titles'].index("CMD")
processes = top['Processes']
commands = [process[idx] for process in processes]
return commands
def assert_command_running(self, container_name, process_name):
processes = self._get_process_commands(container_name)
assert process_name in processes
def assert_command_not_running(self, container_name, process_name):
processes = self._get_process_commands(container_name)
assert process_name not in processes
@pytest.fixture()
def docker_containers():
return DockerContainerTester(assert_hostname=False)
@pytest.fixture()
def mysql_command():
mysql_cmd = "/usr/sbin/mysqld --basedir=/usr --datadir=/var/lib/mysql" \
" --plugin-dir=/usr/lib/mysql/plugin --user=mysql" \
" --log-error=/var/log/mysql/error.log" \
" --pid-file=/var/run/mysqld/mysqld.pid" \
" --socket=/var/run/mysqld/mysqld.sock --port=3306"
return mysql_cmd
@pytest.fixture()
def h2_cattle_config():
return CattleConfig('CATTLE_H2DB_TEST_URL')
@pytest.fixture()
def mysql_link_cattle_config():
return CattleConfig('CATTLE_MYSQL_LINK_TEST_URL')
@pytest.fixture()
def mysql_local_cattle_config():
return CattleConfig('CATTLE_MYSQL_LOCAL_TEST_URL')
@pytest.fixture()
def mysql_manual_cattle_config():
return CattleConfig('CATTLE_MYSQL_MANUAL_TEST_URL')
def test_h2_database_overrides_mysql(h2_cattle_config):
h2_cattle_config.assert_setting('db.cattle.database', 'h2')
def test_mysql_link_database_db(mysql_link_cattle_config):
mysql_link_cattle_config.assert_setting('db.cattle.database', 'mysql')
def test_mysql_local_database_db(mysql_local_cattle_config):
mysql_local_cattle_config.assert_setting('db.cattle.database', 'mysql')
def test_mysql_manual_database_db(mysql_manual_cattle_config):
mysql_manual_cattle_config.assert_setting('db.cattle.database', 'mysql')
def test_local_cattle_db_has_mysql_process(docker_containers, mysql_command):
docker_containers.assert_command_running(
'server_localmysqlcattle_1', mysql_command)
def test_link_cattle_no_mysql_process(docker_containers, mysql_command):
docker_containers.assert_command_not_running(
'server_mysqllinkcattle_1', mysql_command)
def test_manual_cattle_no_mysql_process(docker_containers, mysql_command):
docker_containers.assert_command_not_running(
'server_mysqlmanualcattle_1', mysql_command)
def test_h2_cattle_no_mysql_process(docker_containers, mysql_command):
docker_containers.assert_command_not_running(
'server_h2dbcattle_1', mysql_command)
| {
"content_hash": "e4141614e167373139efff977abb022f",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 77,
"avg_line_length": 30.97452229299363,
"alnum_prop": 0.6559736787990952,
"repo_name": "imikushin/rancher",
"id": "9d61b43c457e5e450dace51524251d0be025ba88",
"size": "4863",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/server/ranchertests/test_cattles.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6945"
},
{
"name": "Shell",
"bytes": "19765"
}
],
"symlink_target": ""
} |
"""Auto-generated file, do not edit by hand. ST metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_ST = PhoneMetadata(id='ST', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2}', possible_length=(3,)),
emergency=PhoneNumberDesc(national_number_pattern='112', example_number='112', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='112', example_number='112', possible_length=(3,)),
short_data=True)
| {
"content_hash": "aecb61ea9b0ed2964540b5eca8789454",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 106,
"avg_line_length": 68.5,
"alnum_prop": 0.75,
"repo_name": "gencer/python-phonenumbers",
"id": "e65a6b3a50b3745417b731b10294139bb8682b1a",
"size": "548",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "python/phonenumbers/shortdata/region_ST.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "23039485"
}
],
"symlink_target": ""
} |
import os
from sikwidgets.widgets.widget import Widget
from sikwidgets.widgets.widget import WidgetError
from sikwidgets.widgets.button import Button
from sikwidgets import settings
class ScrollableWidget(Widget):
def __init__(self, parent, name):
Widget.__init__(self, parent, name)
# create a virtual widget to hold our buttons
# TODO: is it worth it to create a Scrollbar widget?
self._scrollbar = Widget(self, "__scrollbar__")
# create the 4 directional scrollbar buttons
self._top_button = Button(self._scrollbar, "top")
self._bottom_button = Button(self._scrollbar, "bottom")
self._left_button = Button(self._scrollbar, "left")
self._right_button = Button(self._scrollbar, "right")
def capture_screenshots(self):
# take screenshots of the scrollbar states
self._top_button.capture_screenshots()
self._bottom_button.capture_screenshots()
self._left_button.capture_screenshots()
self._right_button.capture_screenshots()
def has_vertical_scrollbar(self):
return (self._top_button.exists(force_check=True) and
self._bottom_button.exists(force_check=True))
def has_horizontal_scrollbar(self):
return (self._left_button.exists(force_check=True) and
self._right_button.exists(force_check=True))
def is_scrollable(self):
""" Although a widget could be scrollable, it may
not actually be. First, check if scroll states
actually exist. Second, look for them in the
current widget.
It should be noted that this method checks if
a widget is capable of being scrolled *at this
moment in time*
"""
return self.has_vertical_scrollbar() or self.has_horizontal_scrollbar()
def scrollbar_at_top(self):
if not self.has_vertical_scrollbar():
return True
return self._top_button.is_touching()
def scrollbar_at_bottom(self):
if not self.has_vertical_scrollbar():
return True
return self._bottom_button.is_touching()
def scrollbar_at_left(self):
if not self.has_horizontal_scrollbar():
return True
return self._left_button.is_touching()
def scrollbar_at_right(self):
if not self.has_horizontal_scrollbar():
return True
return self._right_button.is_touching()
def scroll_up(self, amount=1):
for i in range(amount):
if self.scrollbar_at_top():
# return how many we scrolled before
# reaching the top
return i
self._top_button.click()
# we scrolled the full amount
return amount
def scroll_down(self, amount=1):
for i in range(amount):
if self.scrollbar_at_bottom():
return i
self._bottom_button.click()
return amount
def scroll_left(self, amount=1):
for i in range(amount):
if self.scrollbar_at_left():
return i
self._left_button.click()
return amount
def scroll_right(self, amount=1):
for i in range(amount):
if self.scrollbar_at_right():
return i
self._right_button.click()
return amount
def scroll_to_top(self):
while self.scroll_up(): pass
def scroll_to_bottom(self):
while self.scroll_down(): pass
def scroll_to_left(self):
while self.scroll_left(): pass
def scroll_to_right(self):
while self.scroll_right(): pass
| {
"content_hash": "2d40225afaab11c6c4f5b9a50bf29268",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 79,
"avg_line_length": 33.31818181818182,
"alnum_prop": 0.6065484311050477,
"repo_name": "griffy/sikwidgets",
"id": "da1dc4ca16cee2218e40718c752322b40737f011",
"size": "3665",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sikwidgets/widgets/scrollable_widget.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "88682"
},
{
"name": "Shell",
"bytes": "1728"
},
{
"name": "Visual Basic",
"bytes": "759"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "seeker.settings.prod")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "c3066192b401ded6d730f62f532a7a11",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 25.77777777777778,
"alnum_prop": 0.7112068965517241,
"repo_name": "LomaxRx/service-seeker",
"id": "f409ea3320994c017196a27b4a8464ea73c126c9",
"size": "254",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31363"
},
{
"name": "Shell",
"bytes": "73"
}
],
"symlink_target": ""
} |
"""This module contains a Google ML Engine Hook."""
import logging
import random
import time
from typing import Callable, Dict, List, Optional
from googleapiclient.discovery import Resource, build
from googleapiclient.errors import HttpError
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
from airflow.version import version as airflow_version
log = logging.getLogger(__name__)
_AIRFLOW_VERSION = 'v' + airflow_version.replace('.', '-').replace('+', '-')
def _poll_with_exponential_delay(request, execute_num_retries, max_n, is_done_func, is_error_func):
"""
Execute request with exponential delay.
This method is intended to handle and retry in case of api-specific errors,
such as 429 "Too Many Requests", unlike the `request.execute` which handles
lower level errors like `ConnectionError`/`socket.timeout`/`ssl.SSLError`.
:param request: request to be executed.
:param execute_num_retries: num_retries for `request.execute` method.
:param max_n: number of times to retry request in this method.
:param is_done_func: callable to determine if operation is done.
:param is_error_func: callable to determine if operation is failed.
:return: response
:rtype: httplib2.Response
"""
for i in range(0, max_n):
try:
response = request.execute(num_retries=execute_num_retries)
if is_error_func(response):
raise ValueError(f'The response contained an error: {response}')
if is_done_func(response):
log.info('Operation is done: %s', response)
return response
time.sleep((2**i) + (random.randint(0, 1000) / 1000))
except HttpError as e:
if e.resp.status != 429:
log.info('Something went wrong. Not retrying: %s', format(e))
raise
else:
time.sleep((2**i) + (random.randint(0, 1000) / 1000))
raise ValueError(f'Connection could not be established after {max_n} retries.')
class MLEngineHook(GoogleBaseHook):
"""
Hook for Google ML Engine APIs.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
"""
def get_conn(self) -> Resource:
"""
Retrieves the connection to MLEngine.
:return: Google MLEngine services object.
"""
authed_http = self._authorize()
return build('ml', 'v1', http=authed_http, cache_discovery=False)
@GoogleBaseHook.fallback_to_default_project_id
def create_job(self, job: dict, project_id: str, use_existing_job_fn: Optional[Callable] = None) -> dict:
"""
Launches a MLEngine job and wait for it to reach a terminal state.
:param project_id: The Google Cloud project id within which MLEngine
job will be launched. If set to None or missing, the default project_id from the Google Cloud
connection is used.
:param job: MLEngine Job object that should be provided to the MLEngine
API, such as: ::
{
'jobId': 'my_job_id',
'trainingInput': {
'scaleTier': 'STANDARD_1',
...
}
}
:param use_existing_job_fn: In case that a MLEngine job with the same
job_id already exist, this method (if provided) will decide whether
we should use this existing job, continue waiting for it to finish
and returning the job object. It should accepts a MLEngine job
object, and returns a boolean value indicating whether it is OK to
reuse the existing job. If 'use_existing_job_fn' is not provided,
we by default reuse the existing MLEngine job.
:return: The MLEngine job object if the job successfully reach a
terminal state (which might be FAILED or CANCELLED state).
:rtype: dict
"""
hook = self.get_conn()
self._append_label(job)
self.log.info("Creating job.")
request = hook.projects().jobs().create(parent=f'projects/{project_id}', body=job)
job_id = job['jobId']
try:
request.execute(num_retries=self.num_retries)
except HttpError as e:
# 409 means there is an existing job with the same job ID.
if e.resp.status == 409:
if use_existing_job_fn is not None:
existing_job = self._get_job(project_id, job_id)
if not use_existing_job_fn(existing_job):
self.log.error(
'Job with job_id %s already exist, but it does not match our expectation: %s',
job_id,
existing_job,
)
raise
self.log.info('Job with job_id %s already exist. Will waiting for it to finish', job_id)
else:
self.log.error('Failed to create MLEngine job: %s', e)
raise
return self._wait_for_job_done(project_id, job_id)
@GoogleBaseHook.fallback_to_default_project_id
def cancel_job(
self,
job_id: str,
project_id: str,
) -> dict:
"""
Cancels a MLEngine job.
:param project_id: The Google Cloud project id within which MLEngine
job will be cancelled. If set to None or missing, the default project_id from the Google Cloud
connection is used.
:param job_id: A unique id for the want-to-be cancelled Google MLEngine training job.
:return: Empty dict if cancelled successfully
:rtype: dict
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
request = hook.projects().jobs().cancel(name=f'projects/{project_id}/jobs/{job_id}')
try:
return request.execute(num_retries=self.num_retries)
except HttpError as e:
if e.resp.status == 404:
self.log.error('Job with job_id %s does not exist. ', job_id)
raise
elif e.resp.status == 400:
self.log.info('Job with job_id %s is already complete, cancellation aborted.', job_id)
return {}
else:
self.log.error('Failed to cancel MLEngine job: %s', e)
raise
def _get_job(self, project_id: str, job_id: str) -> dict:
"""
Gets a MLEngine job based on the job id.
:param project_id: The project in which the Job is located. If set to None or missing, the default
project_id from the Google Cloud connection is used. (templated)
:param job_id: A unique id for the Google MLEngine job. (templated)
:return: MLEngine job object if succeed.
:rtype: dict
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
job_name = f'projects/{project_id}/jobs/{job_id}'
request = hook.projects().jobs().get(name=job_name)
while True:
try:
return request.execute(num_retries=self.num_retries)
except HttpError as e:
if e.resp.status == 429:
# polling after 30 seconds when quota failure occurs
time.sleep(30)
else:
self.log.error('Failed to get MLEngine job: %s', e)
raise
def _wait_for_job_done(self, project_id: str, job_id: str, interval: int = 30):
"""
Waits for the Job to reach a terminal state.
This method will periodically check the job state until the job reach
a terminal state.
:param project_id: The project in which the Job is located. If set to None or missing, the default
project_id from the Google Cloud connection is used. (templated)
:param job_id: A unique id for the Google MLEngine job. (templated)
:param interval: Time expressed in seconds after which the job status is checked again. (templated)
:raises: googleapiclient.errors.HttpError
"""
self.log.info("Waiting for job. job_id=%s", job_id)
if interval <= 0:
raise ValueError("Interval must be > 0")
while True:
job = self._get_job(project_id, job_id)
if job['state'] in ['SUCCEEDED', 'FAILED', 'CANCELLED']:
return job
time.sleep(interval)
@GoogleBaseHook.fallback_to_default_project_id
def create_version(
self,
model_name: str,
version_spec: Dict,
project_id: str,
) -> dict:
"""
Creates the Version on Google Cloud ML Engine.
:param version_spec: A dictionary containing the information about the version. (templated)
:param model_name: The name of the Google Cloud ML Engine model that the version belongs to.
(templated)
:param project_id: The Google Cloud project name to which MLEngine model belongs.
If set to None or missing, the default project_id from the Google Cloud connection is used.
(templated)
:return: If the version was created successfully, returns the operation.
Otherwise raises an error .
:rtype: dict
"""
hook = self.get_conn()
parent_name = f'projects/{project_id}/models/{model_name}'
self._append_label(version_spec)
create_request = hook.projects().models().versions().create(parent=parent_name, body=version_spec)
response = create_request.execute(num_retries=self.num_retries)
get_request = hook.projects().operations().get(name=response['name'])
return _poll_with_exponential_delay(
request=get_request,
execute_num_retries=self.num_retries,
max_n=9,
is_done_func=lambda resp: resp.get('done', False),
is_error_func=lambda resp: resp.get('error', None) is not None,
)
@GoogleBaseHook.fallback_to_default_project_id
def set_default_version(
self,
model_name: str,
version_name: str,
project_id: str,
) -> dict:
"""
Sets a version to be the default. Blocks until finished.
:param model_name: The name of the Google Cloud ML Engine model that the version belongs to.
(templated)
:param version_name: A name to use for the version being operated upon. (templated)
:param project_id: The Google Cloud project name to which MLEngine model belongs. If set to None
or missing, the default project_id from the Google Cloud connection is used. (templated)
:return: If successful, return an instance of Version.
Otherwise raises an error.
:rtype: dict
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
full_version_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}'
request = hook.projects().models().versions().setDefault(name=full_version_name, body={})
try:
response = request.execute(num_retries=self.num_retries)
self.log.info('Successfully set version: %s to default', response)
return response
except HttpError as e:
self.log.error('Something went wrong: %s', e)
raise
@GoogleBaseHook.fallback_to_default_project_id
def list_versions(
self,
model_name: str,
project_id: str,
) -> List[dict]:
"""
Lists all available versions of a model. Blocks until finished.
:param model_name: The name of the Google Cloud ML Engine model that the version
belongs to. (templated)
:param project_id: The Google Cloud project name to which MLEngine model belongs. If set to None or
missing, the default project_id from the Google Cloud connection is used. (templated)
:return: return an list of instance of Version.
:rtype: List[Dict]
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
result = [] # type: List[Dict]
full_parent_name = f'projects/{project_id}/models/{model_name}'
request = hook.projects().models().versions().list(parent=full_parent_name, pageSize=100)
while request is not None:
response = request.execute(num_retries=self.num_retries)
result.extend(response.get('versions', []))
request = (
hook.projects()
.models()
.versions()
.list_next(previous_request=request, previous_response=response)
)
time.sleep(5)
return result
@GoogleBaseHook.fallback_to_default_project_id
def delete_version(
self,
model_name: str,
version_name: str,
project_id: str,
) -> dict:
"""
Deletes the given version of a model. Blocks until finished.
:param model_name: The name of the Google Cloud ML Engine model that the version
belongs to. (templated)
:param project_id: The Google Cloud project name to which MLEngine
model belongs.
:return: If the version was deleted successfully, returns the operation.
Otherwise raises an error.
:rtype: Dict
"""
hook = self.get_conn()
full_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}'
delete_request = hook.projects().models().versions().delete(name=full_name)
response = delete_request.execute(num_retries=self.num_retries)
get_request = hook.projects().operations().get(name=response['name'])
return _poll_with_exponential_delay(
request=get_request,
execute_num_retries=self.num_retries,
max_n=9,
is_done_func=lambda resp: resp.get('done', False),
is_error_func=lambda resp: resp.get('error', None) is not None,
)
@GoogleBaseHook.fallback_to_default_project_id
def create_model(
self,
model: dict,
project_id: str,
) -> dict:
"""
Create a Model. Blocks until finished.
:param model: A dictionary containing the information about the model.
:param project_id: The Google Cloud project name to which MLEngine model belongs. If set to None or
missing, the default project_id from the Google Cloud connection is used. (templated)
:return: If the version was created successfully, returns the instance of Model.
Otherwise raises an error.
:rtype: Dict
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
if 'name' not in model or not model['name']:
raise ValueError("Model name must be provided and could not be an empty string")
project = f'projects/{project_id}'
self._append_label(model)
try:
request = hook.projects().models().create(parent=project, body=model)
response = request.execute(num_retries=self.num_retries)
except HttpError as e:
if e.resp.status != 409:
raise e
str(e) # Fills in the error_details field
if not e.error_details or len(e.error_details) != 1:
raise e
error_detail = e.error_details[0]
if error_detail["@type"] != 'type.googleapis.com/google.rpc.BadRequest':
raise e
if "fieldViolations" not in error_detail or len(error_detail['fieldViolations']) != 1:
raise e
field_violation = error_detail['fieldViolations'][0]
if (
field_violation["field"] != "model.name"
or field_violation["description"] != "A model with the same name already exists."
):
raise e
response = self.get_model(model_name=model['name'], project_id=project_id)
return response
@GoogleBaseHook.fallback_to_default_project_id
def get_model(
self,
model_name: str,
project_id: str,
) -> Optional[dict]:
"""
Gets a Model. Blocks until finished.
:param model_name: The name of the model.
:param project_id: The Google Cloud project name to which MLEngine model belongs. If set to None
or missing, the default project_id from the Google Cloud connection is used. (templated)
:return: If the model exists, returns the instance of Model.
Otherwise return None.
:rtype: Dict
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
if not model_name:
raise ValueError("Model name must be provided and it could not be an empty string")
full_model_name = f'projects/{project_id}/models/{model_name}'
request = hook.projects().models().get(name=full_model_name)
try:
return request.execute(num_retries=self.num_retries)
except HttpError as e:
if e.resp.status == 404:
self.log.error('Model was not found: %s', e)
return None
raise
@GoogleBaseHook.fallback_to_default_project_id
def delete_model(
self,
model_name: str,
project_id: str,
delete_contents: bool = False,
) -> None:
"""
Delete a Model. Blocks until finished.
:param model_name: The name of the model.
:param delete_contents: Whether to force the deletion even if the models is not empty.
Will delete all version (if any) in the dataset if set to True.
The default value is False.
:param project_id: The Google Cloud project name to which MLEngine model belongs. If set to None
or missing, the default project_id from the Google Cloud connection is used. (templated)
:raises: googleapiclient.errors.HttpError
"""
hook = self.get_conn()
if not model_name:
raise ValueError("Model name must be provided and it could not be an empty string")
model_path = f'projects/{project_id}/models/{model_name}'
if delete_contents:
self._delete_all_versions(model_name, project_id)
request = hook.projects().models().delete(name=model_path)
try:
request.execute(num_retries=self.num_retries)
except HttpError as e:
if e.resp.status == 404:
self.log.error('Model was not found: %s', e)
return
raise
def _delete_all_versions(self, model_name: str, project_id: str):
versions = self.list_versions(project_id=project_id, model_name=model_name)
# The default version can only be deleted when it is the last one in the model
non_default_versions = (version for version in versions if not version.get('isDefault', False))
for version in non_default_versions:
_, _, version_name = version['name'].rpartition('/')
self.delete_version(project_id=project_id, model_name=model_name, version_name=version_name)
default_versions = (version for version in versions if version.get('isDefault', False))
for version in default_versions:
_, _, version_name = version['name'].rpartition('/')
self.delete_version(project_id=project_id, model_name=model_name, version_name=version_name)
def _append_label(self, model: dict) -> None:
model['labels'] = model.get('labels', {})
model['labels']['airflow-version'] = _AIRFLOW_VERSION
| {
"content_hash": "22ad935b9128d48bcb58c51fa4cf50f4",
"timestamp": "",
"source": "github",
"line_count": 479,
"max_line_length": 109,
"avg_line_length": 41.48434237995825,
"alnum_prop": 0.6026873332997836,
"repo_name": "lyft/incubator-airflow",
"id": "b1b6e83918aa4188842043df3896759acd21c042",
"size": "20658",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "airflow/providers/google/cloud/hooks/mlengine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "161328"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jinja",
"bytes": "8565"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10019710"
},
{
"name": "Shell",
"bytes": "220780"
}
],
"symlink_target": ""
} |
import argparse
import sys
from getpass import getpass
from hashlib import md5
from mushi.core.db import db_session
from mushi.core.db.models import User
from mushi.core.exc import InvalidArgumentError
from mushi.factory import create_app
class Command():
def __init__(self, argv, **kwargs):
self.argv = argv
def __call__(self):
# Create an application context.
app = create_app(__name__, [])
ctx = app.test_request_context()
ctx.push()
parser = argparse.ArgumentParser(
prog=self.argv[0],
description="Manage the user's account.")
subparsers = parser.add_subparsers(dest='subcommand')
subparsers.required = True
sub = subparsers.add_parser('add', help='add a user')
sub.add_argument('email', action='store', help="the email of the new user's account")
sub.add_argument(
'-n', '--name', dest='name', action='store',
help='the full name of the user (default: email address)')
sub.add_argument(
'-p', '--password', dest='password', action='store',
help='the full name of the user (will be asked if not provided)')
sub = subparsers.add_parser('list', help='list users')
args = parser.parse_args(self.argv[1:])
if args.subcommand == 'add':
new_user = User()
new_user.email = args.email
new_user.name = args.name or args.email
if args.password:
password = args.password
else:
password = getpass('password: ')
if getpass('confirm: ') != password:
raise InvalidArgumentError('Password do not match.')
new_user.password = md5(password.encode()).hexdigest()
db_session.add(new_user)
db_session.commit()
elif args.subcommand == 'list':
for user in db_session.query(User):
print('name: {:>15}, email: {:>15}'.format(user.name, user.email))
ctx.pop()
| {
"content_hash": "7ba4937ed6d07c0b5673abbdd03e883f",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 93,
"avg_line_length": 33.29032258064516,
"alnum_prop": 0.5760658914728682,
"repo_name": "kyouko-taiga/mushi",
"id": "808688ebd11e61df8d20db0914fb9acd4a6b05c1",
"size": "2644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mushi/manage/commands/users.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "99671"
},
{
"name": "HTML",
"bytes": "63585"
},
{
"name": "JavaScript",
"bytes": "105700"
},
{
"name": "Python",
"bytes": "66546"
}
],
"symlink_target": ""
} |
from base.AwsService import AwsService
class S3(AwsService):
def set_serviceConfig(self, config):
self.set_s3Service(config)
def get_serviceConfig(self):
ret=self.get_s3Service()
return ret
def get_s3Service(self):
## Standard Storage:
section = self.get_element('table.service.S3Service table.subSection:nth-child(1)')
# Standard Storage:
s3_size ,s3_size_unit = self.get_val_and_type("table.SF_S3_STORAGE", section)
# PUT/COPY/POST/LIST リクエスト
s3_req_put = int(self.get_value("table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input", section))
# GET とその他のリクエスト
s3_req_get =int(self.get_value("table.SF_S3_GET_OTHER_REQUESTS input", section))
## Standard - Infrequent Access Storage:
section = self.get_element('table.service.S3Service table.subSection:nth-child(2)')
# Infrequent Access Storage:
ia_size ,ia_size_unit = self.get_val_and_type("table.SF_S3_IA_STORAGE", section)
# PUT/COPY/POST/LIST リクエスト
ia_req_put = int(self.get_value("table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input", section))
# GET とその他のリクエスト
ia_req_get =int(self.get_value("table.SF_S3_GET_OTHER_REQUESTS input", section))
# Lifecycle Transitions
ia_transitions = int(self.get_value("table.SF_S3_LIFECYCLE_TRANSITION_REQUESTS input", section))
# Data Retrieval
ia_retrieval ,ia_retrieval_unit = self.get_val_and_type("table.SF_S3_DATA_RETRIEVALS", section)
## Reduced Redundancy Storage:
section = self.get_element('table.service.S3Service table.subSection:nth-child(3)')
# 低冗長化ストレージ:
rr_size ,rr_size_unit = self.get_val_and_type("table.SF_S3_RR_STORAGE", section)
# PUT/COPY/POST/LIST リクエスト
rr_req_put = int(self.get_value("table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input", section))
# GET とその他のリクエスト
rr_req_get =int(self.get_value("table.SF_S3_GET_OTHER_REQUESTS input", section))
# データ転送:
section = self.get_element('table.service.S3Service table.subSection:nth-child(4)')
# リージョン間データ転送送信:
inter_region , inter_region_type = self.get_val_and_type("div.subContent > table:nth-child(1)", section)
# データ転送送信:
internet_out , internet_out_type = self.get_val_and_type("div.subContent > table:nth-child(2)", section)
# データ転送受信:
internet_in , internet_in_type = self.get_val_and_type("div.subContent > table:nth-child(3)", section)
return {
'StandardStorage': {
'Size' : {
'Value' : s3_size,
'Type' : s3_size_unit
},
'PutCopyPostListRequests' : s3_req_put,
'GetOtherRequests' : s3_req_get,
},
'InfrequentAccessStorage': {
'Size' : {
'Value' : ia_size,
'Type' : ia_size_unit
},
'PutCopyPostListRequests' : ia_req_put,
'GetOtherRequests' : ia_req_get,
'LifecycleTransitions' : ia_transitions,
'DataRetrieval' : {
'Value' : ia_retrieval,
'Type' : ia_retrieval_unit
}
},
'ReducedRedundancy': {
'Size' : {
'Value' : rr_size,
'Type' : rr_size_unit
},
'PutCopyPostListRequests' : rr_req_put,
'GetOtherRequests' : rr_req_get
},
"InterRegion" : {
"Value" : inter_region,
"Type" : inter_region_type
},
"InternetSend" : {
"Value" : internet_out,
"Type" : internet_out_type
},
"InternetReceive" : {
"Value" : internet_in,
"Type" : internet_in_type
}
}
def set_s3Service(self, conf):
table = self.get_element('table.service.S3Service')
## Standard - Infrequent Access Storage:
section = self.get_element('table.service.S3Service table.subSection:nth-child(1)')
if 'StandardStorage' in conf:
s = conf['StandardStorage']
# Standard Storage:
self.set_val_and_type('table.SF_S3_STORAGE', s['Size'], section)
# PUT/COPY/POST/LIST リクエスト
self.set_value('table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input', s['PutCopyPostListRequests'], section, int)
# GET とその他のリクエスト
self.set_value('table.SF_S3_GET_OTHER_REQUESTS input', s['GetOtherRequests'], section, int)
## Standard - Infrequent Access Storage:
section = self.get_element('table.service.S3Service table.subSection:nth-child(2)')
if 'InfrequentAccessStorage' in conf:
s = conf['InfrequentAccessStorage']
# Infrequent Access Storage:
self.set_val_and_type('table.SF_S3_IA_STORAGE', s['Size'], section)
# PUT/COPY/POST/LIST リクエスト
self.set_value('table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input', s['PutCopyPostListRequests'], section, int)
# GET とその他のリクエスト
self.set_value('table.SF_S3_GET_OTHER_REQUESTS input', s['GetOtherRequests'], section, int)
# Lifecycle Transitions
self.set_value('table.SF_S3_LIFECYCLE_TRANSITION_REQUESTS input', s['LifecycleTransitions'], section, int)
# Data Retrieval
self.set_val_and_type('table.SF_S3_DATA_RETRIEVALS', s['DataRetrieval'], section)
## Reduced Redundancy Storage
section = self.get_element('table.service.S3Service table.subSection:nth-child(3)')
if 'ReducedRedundancy' in conf:
s = conf['ReducedRedundancy']
# 低冗長化ストレージ:
self.set_val_and_type('table.SF_S3_RR_STORAGE', s['Size'], section)
# PUT/COPY/POST/LIST リクエスト
self.set_value('table.SF_S3_PUT_COPY_POST_LIST_REQUESTS input', s['PutCopyPostListRequests'], section, int)
# GET とその他のリクエスト
self.set_value('table.SF_S3_GET_OTHER_REQUESTS input', s['GetOtherRequests'], section, int)
# データ転送:
section = self.get_element('table.service.S3Service table.subSection:nth-child(4)')
# リージョン間データ転送送信:
if 'InterRegion' in conf:
self.set_val_and_type('div.subContent > table:nth-child(1)', conf['InterRegion'], section)
# データ転送送信:
if 'InternetSend' in conf:
self.set_val_and_type('div.subContent > table:nth-child(2)', conf['InternetSend'], section)
# データ転送受信:
if 'InternetReceive' in conf:
self.set_val_and_type('div.subContent > table:nth-child(3)', conf['InternetReceive'], section)
| {
"content_hash": "ae8ed8ae08f3bc0ab4644b1a08f4381a",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 119,
"avg_line_length": 48.02097902097902,
"alnum_prop": 0.5768166593854668,
"repo_name": "mkimura3/auto-simple-calculator",
"id": "ea63a170af2579d658b04ff169305eded4eee6ae",
"size": "7258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AwsAutoEstimate/services/S3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "98079"
},
{
"name": "Shell",
"bytes": "356"
}
],
"symlink_target": ""
} |
"""Classes to aid in working with the STIX2 patterning language."""
import base64
import binascii
import datetime
import re
from .utils import parse_into_datetime
def escape_quotes_and_backslashes(s):
return s.replace(u'\\', u'\\\\').replace(u"'", u"\\'")
def quote_if_needed(x):
if isinstance(x, str):
if x.find("-") != -1:
if not x.startswith("'"):
return "'" + x + "'"
return x
class _Constant(object):
pass
class StringConstant(_Constant):
"""Pattern string constant
Args:
value (str): string value
"""
def __init__(self, value, from_parse_tree=False):
self.needs_to_be_quoted = not from_parse_tree
self.value = value
def __str__(self):
return "'%s'" % (escape_quotes_and_backslashes(self.value) if self.needs_to_be_quoted else self.value)
class TimestampConstant(_Constant):
"""Pattern timestamp constant
Args:
value (datetime.datetime OR str): if string, must be a timestamp string
"""
def __init__(self, value):
try:
self.value = parse_into_datetime(value)
except Exception:
raise ValueError("Must be a datetime object or timestamp string.")
def __str__(self):
return "t%s" % repr(self.value)
class IntegerConstant(_Constant):
"""Pattern integer constant
Args:
value (int): integer value
"""
def __init__(self, value):
try:
self.value = int(value)
except Exception:
raise ValueError("must be an integer.")
def __str__(self):
return "%s" % self.value
class FloatConstant(_Constant):
def __init__(self, value):
try:
self.value = float(value)
except Exception:
raise ValueError("must be a float.")
def __str__(self):
return "%s" % self.value
class BooleanConstant(_Constant):
"""Pattern boolean constant
Args:
value (str OR int):
(str) 'true', 't' for True; 'false', 'f' for False
(int) 1 for True; 0 for False
"""
def __init__(self, value):
if isinstance(value, bool):
self.value = value
return
trues = ['true', 't', '1']
falses = ['false', 'f', '0']
try:
if value.lower() in trues:
self.value = True
return
elif value.lower() in falses:
self.value = False
return
except AttributeError:
if value == 1:
self.value = True
return
elif value == 0:
self.value = False
return
raise ValueError("must be a boolean value.")
def __str__(self):
return str(self.value).lower()
_HASH_REGEX = {
"MD5": (r"^[a-fA-F0-9]{32}$", "MD5"),
"MD6": (r"^[a-fA-F0-9]{32}|[a-fA-F0-9]{40}|[a-fA-F0-9]{56}|[a-fA-F0-9]{64}|[a-fA-F0-9]{96}|[a-fA-F0-9]{128}$", "MD6"),
"RIPEMD160": (r"^[a-fA-F0-9]{40}$", "RIPEMD-160"),
"SHA1": (r"^[a-fA-F0-9]{40}$", "SHA-1"),
"SHA224": (r"^[a-fA-F0-9]{56}$", "SHA-224"),
"SHA256": (r"^[a-fA-F0-9]{64}$", "SHA-256"),
"SHA384": (r"^[a-fA-F0-9]{96}$", "SHA-384"),
"SHA512": (r"^[a-fA-F0-9]{128}$", "SHA-512"),
"SHA3224": (r"^[a-fA-F0-9]{56}$", "SHA3-224"),
"SHA3256": (r"^[a-fA-F0-9]{64}$", "SHA3-256"),
"SHA3384": (r"^[a-fA-F0-9]{96}$", "SHA3-384"),
"SHA3512": (r"^[a-fA-F0-9]{128}$", "SHA3-512"),
"SSDEEP": (r"^[a-zA-Z0-9/+:.]{1,128}$", "SSDEEP"),
"WHIRLPOOL": (r"^[a-fA-F0-9]{128}$", "WHIRLPOOL"),
"TLSH": (r"^[a-fA-F0-9]{70}$", "TLSH"),
}
class HashConstant(StringConstant):
"""Pattern hash constant
Args:
value (str): hash value
type (str): hash algorithm name. Supported hash algorithms:
"MD5", "MD6", RIPEMD160", "SHA1", "SHA224", "SHA256",
"SHA384", "SHA512", "SHA3224", "SHA3256", "SHA3384",
"SHA3512", "SSDEEP", "WHIRLPOOL"
"""
def __init__(self, value, type):
key = type.upper().replace('-', '')
if key in _HASH_REGEX:
vocab_key = _HASH_REGEX[key][1]
if not re.match(_HASH_REGEX[key][0], value):
raise ValueError("'%s' is not a valid %s hash" % (value, vocab_key))
super(HashConstant, self).__init__(value)
class BinaryConstant(_Constant):
"""Pattern binary constant
Args:
value (str): base64 encoded string value
"""
def __init__(self, value, from_parse_tree=False):
# support with or without a 'b'
if from_parse_tree:
m = re.match("^b'(.+)'$", value)
if m:
value = m.group(1)
try:
base64.b64decode(value)
self.value = value
except (binascii.Error, TypeError):
raise ValueError("must contain a base64 encoded string")
def __str__(self):
return "b'%s'" % self.value
class HexConstant(_Constant):
"""Pattern hexadecimal constant
Args:
value (str): hexadecimal value
"""
def __init__(self, value, from_parse_tree=False):
# support with or without an 'h'
if not from_parse_tree and re.match('^([a-fA-F0-9]{2})+$', value):
self.value = value
else:
m = re.match("^h'(([a-fA-F0-9]{2})+)'$", value)
if m:
self.value = m.group(1)
else:
raise ValueError("must contain an even number of hexadecimal characters")
def __str__(self):
return "h'%s'" % self.value
class ListConstant(_Constant):
"""Pattern list constant
Args:
value (list): list of values
"""
def __init__(self, values):
# handle _Constants or make a _Constant
self.value = [x if isinstance(x, _Constant) else make_constant(x) for x in values]
def __str__(self):
return "(" + ", ".join(["%s" % x for x in self.value]) + ")"
def make_constant(value):
"""Convert value to Pattern constant, best effort attempt
at determining root value type and corresponding conversion
Args:
value: value to convert to Pattern constant
"""
if isinstance(value, _Constant):
return value
try:
return TimestampConstant(value)
except (ValueError, TypeError):
pass
if isinstance(value, str):
return StringConstant(value)
elif isinstance(value, bool):
return BooleanConstant(value)
elif isinstance(value, int):
return IntegerConstant(value)
elif isinstance(value, float):
return FloatConstant(value)
elif isinstance(value, list):
return ListConstant(value)
else:
raise ValueError("Unable to create a constant from %s" % value)
class _ObjectPathComponent(object):
@staticmethod
def create_ObjectPathComponent(component_name):
# first case is to handle if component_name was quoted
if isinstance(component_name, StringConstant):
return BasicObjectPathComponent(component_name.value, False)
elif component_name.endswith("_ref"):
return ReferenceObjectPathComponent(component_name)
elif component_name.find("[") != -1:
parse1 = component_name.split("[")
return ListObjectPathComponent(parse1[0], parse1[1][:-1])
else:
return BasicObjectPathComponent(component_name, False)
def __str__(self):
return quote_if_needed(self.property_name)
class BasicObjectPathComponent(_ObjectPathComponent):
"""Basic object path component (for an observation or expression)
By "Basic", implies that the object path component is not a
list, object reference or further referenced property, i.e. terminal
component
Args:
property_name (str): object property name
is_key (bool): is dictionary key, default: False
"""
def __init__(self, property_name, is_key):
self.property_name = property_name
# TODO: set is_key to True if this component is a dictionary key
# self.is_key = is_key
class ListObjectPathComponent(_ObjectPathComponent):
"""List object path component (for an observation or expression)
Args:
property_name (str): list object property name
index (int): index of the list property's value that is specified
"""
def __init__(self, property_name, index):
self.property_name = property_name
self.index = index
def __str__(self):
return "%s[%s]" % (quote_if_needed(self.property_name), self.index)
class ReferenceObjectPathComponent(_ObjectPathComponent):
"""Reference object path component (for an observation or expression)
Args:
reference_property_name (str): reference object property name
"""
def __init__(self, reference_property_name):
self.property_name = reference_property_name
class ObjectPath(object):
"""Pattern operand object (property) path
Args:
object_type_name (str): name of object type for corresponding object path component
property_path (_ObjectPathComponent OR str): object path
"""
def __init__(self, object_type_name, property_path):
self.object_type_name = object_type_name
self.property_path = [
x if isinstance(x, _ObjectPathComponent) else
_ObjectPathComponent.create_ObjectPathComponent(x)
for x in property_path
]
def __str__(self):
return "%s:%s" % (self.object_type_name, ".".join(["%s" % quote_if_needed(x) for x in self.property_path]))
def merge(self, other):
"""Extend the object property with that of the supplied object property path"""
self.property_path.extend(other.property_path)
return self
@staticmethod
def make_object_path(lhs):
"""Create ObjectPath from string encoded object path
Args:
lhs (str): object path of left-hand-side component of expression
"""
path_as_parts = lhs.split(":")
return ObjectPath(path_as_parts[0], path_as_parts[1].split("."))
class _PatternExpression(object):
pass
class _ComparisonExpression(_PatternExpression):
"""Pattern Comparison Expression
Args:
operator (str): operator of comparison expression
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, operator, lhs, rhs, negated=False):
if operator == "=" and isinstance(rhs, (ListConstant, list)):
self.operator = "IN"
else:
self.operator = operator
if isinstance(lhs, ObjectPath):
self.lhs = lhs
else:
self.lhs = ObjectPath.make_object_path(lhs)
if isinstance(rhs, _Constant):
self.rhs = rhs
else:
self.rhs = make_constant(rhs)
self.negated = negated
self.root_types = {self.lhs.object_type_name}
def __str__(self):
if self.negated:
return "%s NOT %s %s" % (self.lhs, self.operator, self.rhs)
else:
return "%s %s %s" % (self.lhs, self.operator, self.rhs)
class EqualityComparisonExpression(_ComparisonExpression):
"""Pattern Equality Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(EqualityComparisonExpression, self).__init__("=", lhs, rhs, negated)
class GreaterThanComparisonExpression(_ComparisonExpression):
"""Pattern Greater-than Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(GreaterThanComparisonExpression, self).__init__(">", lhs, rhs, negated)
class LessThanComparisonExpression(_ComparisonExpression):
"""Pattern Less-than Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(LessThanComparisonExpression, self).__init__("<", lhs, rhs, negated)
class GreaterThanEqualComparisonExpression(_ComparisonExpression):
"""Pattern Greater-Than-or-Equal-to Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(GreaterThanEqualComparisonExpression, self).__init__(">=", lhs, rhs, negated)
class LessThanEqualComparisonExpression(_ComparisonExpression):
"""Pattern Less-Than-or-Equal-to Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(LessThanEqualComparisonExpression, self).__init__("<=", lhs, rhs, negated)
class InComparisonExpression(_ComparisonExpression):
"""'in' Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(InComparisonExpression, self).__init__("IN", lhs, rhs, negated)
class LikeComparisonExpression(_ComparisonExpression):
"""'like' Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(LikeComparisonExpression, self).__init__("LIKE", lhs, rhs, negated)
class MatchesComparisonExpression(_ComparisonExpression):
"""'Matches' Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(MatchesComparisonExpression, self).__init__("MATCHES", lhs, rhs, negated)
class IsSubsetComparisonExpression(_ComparisonExpression):
""" 'is subset' Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(IsSubsetComparisonExpression, self).__init__("ISSUBSET", lhs, rhs, negated)
class IsSupersetComparisonExpression(_ComparisonExpression):
""" 'is super set' Comparison Expression
Args:
lhs (ObjectPath OR str): object path of left-hand-side component of expression
rhs (ObjectPath OR str): object path of right-hand-side component of expression
negated (bool): comparison expression negated. Default: False
"""
def __init__(self, lhs, rhs, negated=False):
super(IsSupersetComparisonExpression, self).__init__("ISSUPERSET", lhs, rhs, negated)
class _BooleanExpression(_PatternExpression):
"""Boolean Pattern Expression
Args:
operator (str): boolean operator
operands (list): boolean operands
"""
def __init__(self, operator, operands):
self.operator = operator
self.operands = list(operands)
for arg in self.operands:
if not hasattr(self, "root_types"):
self.root_types = arg.root_types
elif operator == "AND":
self.root_types &= arg.root_types
else:
self.root_types |= arg.root_types
if not self.root_types:
raise ValueError("All operands to an 'AND' expression must be satisfiable with the same object type")
def __str__(self):
sub_exprs = []
for o in self.operands:
sub_exprs.append(str(o))
return (" " + self.operator + " ").join(sub_exprs)
class AndBooleanExpression(_BooleanExpression):
"""'AND' Boolean Pattern Expression. Only use if both operands are of
the same root object.
Args:
operands (list): AND operands
"""
def __init__(self, operands):
super(AndBooleanExpression, self).__init__("AND", operands)
class OrBooleanExpression(_BooleanExpression):
"""'OR' Boolean Pattern Expression. Only use if both operands are of the same root object
Args:
operands (list): OR operands
"""
def __init__(self, operands):
super(OrBooleanExpression, self).__init__("OR", operands)
class ObservationExpression(_PatternExpression):
"""Observation Expression
Args:
operand (str): observation expression operand
"""
def __init__(self, operand):
self.operand = operand
def __str__(self):
return "%s" % self.operand if isinstance(self.operand, (ObservationExpression, _CompoundObservationExpression)) else "[%s]" % self.operand
class _CompoundObservationExpression(_PatternExpression):
"""Compound Observation Expression
Args:
operator (str): compound observation operator
operands (str): compound observation operands
"""
def __init__(self, operator, operands):
self.operator = operator
self.operands = operands
def __str__(self):
sub_exprs = []
for o in self.operands:
sub_exprs.append("%s" % o)
return (" " + self.operator + " ").join(sub_exprs)
class AndObservationExpression(_CompoundObservationExpression):
"""'AND' Compound Observation Pattern Expression
Args:
operands (str): compound observation operands
"""
def __init__(self, operands):
super(AndObservationExpression, self).__init__("AND", operands)
class OrObservationExpression(_CompoundObservationExpression):
"""Pattern 'OR' Compound Observation Expression
Args:
operands (str): compound observation operands
"""
def __init__(self, operands):
super(OrObservationExpression, self).__init__("OR", operands)
class FollowedByObservationExpression(_CompoundObservationExpression):
"""Pattern 'Followed by' Compound Observation Expression
Args:
operands (str): compound observation operands
"""
def __init__(self, operands):
super(FollowedByObservationExpression, self).__init__("FOLLOWEDBY", operands)
class ParentheticalExpression(_PatternExpression):
"""Pattern Parenthetical Observation Expression
Args:
exp (str): observation expression
"""
def __init__(self, exp):
self.expression = exp
if hasattr(exp, "root_types"):
self.root_types = exp.root_types
def __str__(self):
return "(%s)" % self.expression
class _ExpressionQualifier(_PatternExpression):
pass
class RepeatQualifier(_ExpressionQualifier):
"""Pattern Repeat Qualifier
Args:
times_to_repeat (int): times the qualifiers is repeated
"""
def __init__(self, times_to_repeat):
if isinstance(times_to_repeat, IntegerConstant):
self.times_to_repeat = times_to_repeat
elif isinstance(times_to_repeat, int):
self.times_to_repeat = IntegerConstant(times_to_repeat)
else:
raise ValueError("%s is not a valid argument for a Repeat Qualifier" % times_to_repeat)
def __str__(self):
return "REPEATS %s TIMES" % self.times_to_repeat
class WithinQualifier(_ExpressionQualifier):
"""Pattern 'Within' Qualifier
Args:
number_of_seconds (int): seconds value for 'within' qualifier
"""
def __init__(self, number_of_seconds):
if isinstance(number_of_seconds, IntegerConstant):
self.number_of_seconds = number_of_seconds
elif isinstance(number_of_seconds, int):
self.number_of_seconds = IntegerConstant(number_of_seconds)
else:
raise ValueError("%s is not a valid argument for a Within Qualifier" % number_of_seconds)
def __str__(self):
return "WITHIN %s SECONDS" % self.number_of_seconds
class StartStopQualifier(_ExpressionQualifier):
"""Pattern Start/Stop Qualifier
Args:
start_time (TimestampConstant OR datetime.date): start timestamp for qualifier
stop_time (TimestampConstant OR datetime.date): stop timestamp for qualifier
"""
def __init__(self, start_time, stop_time):
if isinstance(start_time, TimestampConstant):
self.start_time = start_time
elif isinstance(start_time, datetime.date):
self.start_time = TimestampConstant(start_time)
elif isinstance(start_time, StringConstant):
self.start_time = StringConstant(start_time.value)
else:
raise ValueError("%s is not a valid argument for a Start/Stop Qualifier" % start_time)
if isinstance(stop_time, TimestampConstant):
self.stop_time = stop_time
elif isinstance(stop_time, datetime.date):
self.stop_time = TimestampConstant(stop_time)
elif isinstance(stop_time, StringConstant):
self.stop_time = StringConstant(stop_time.value)
else:
raise ValueError("%s is not a valid argument for a Start/Stop Qualifier" % stop_time)
def __str__(self):
return "START %s STOP %s" % (self.start_time, self.stop_time)
class QualifiedObservationExpression(_PatternExpression):
"""Pattern Qualified Observation Expression
Args:
observation_expression (PatternExpression OR _CompoundObservationExpression OR ): pattern expression
qualifier (_ExpressionQualifier): pattern expression qualifier
"""
def __init__(self, observation_expression, qualifier):
self.observation_expression = observation_expression
self.qualifier = qualifier
def __str__(self):
return "%s %s" % (self.observation_expression, self.qualifier)
| {
"content_hash": "bd643795644af417cad731af86e49399",
"timestamp": "",
"source": "github",
"line_count": 704,
"max_line_length": 146,
"avg_line_length": 33.13210227272727,
"alnum_prop": 0.6247374062165059,
"repo_name": "oasis-open/cti-python-stix2",
"id": "c53a83f729ae564f85886f24f33e01b6f18a2974",
"size": "23325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stix2/patterns.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1737742"
}
],
"symlink_target": ""
} |
from meowurl import app, db
from meowurl.dbmodels import InviteCode
@app.cli.command('initdb')
def initdb():
db.create_all()
@app.cli.command('dropdb')
def dropdb():
db.drop_all()
@app.cli.command('gencode')
def gencode():
code = InviteCode.generate_code(1)[0]
db.session.add(code)
db.session.commit()
print(code)
| {
"content_hash": "ba52b712caae3ca9cbf7dfd8baee567e",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 41,
"avg_line_length": 17.2,
"alnum_prop": 0.6715116279069767,
"repo_name": "maomihz/MeowURL",
"id": "b5e617221099d0f06a3d508bc9a77ad8730d76e0",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meowurl/cli.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "33523"
},
{
"name": "Python",
"bytes": "29210"
}
],
"symlink_target": ""
} |
from jupyterworkflow.data import get_fremont_data
import pandas as pd
import numpy as np
def test_fremont_data():
data = get_fremont_data()
assert all(data.columns == ['west', 'east', 'Total'])
assert isinstance(data.index, pd.DatetimeIndex)
assert len(np.unique(data.index.time)) == 24
| {
"content_hash": "c612dc2fdddcfa4be813599782fea878",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 57,
"avg_line_length": 30.5,
"alnum_prop": 0.7049180327868853,
"repo_name": "muxuezi/jupyterworkflow",
"id": "5236528405a0102389fc69a4b50520aba2c39e84",
"size": "305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "103bestpractice/jupyterworkflow/test/test_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1324530"
},
{
"name": "Makefile",
"bytes": "34"
},
{
"name": "Python",
"bytes": "1314"
}
],
"symlink_target": ""
} |
"""
MIT License
Copyright (c) 2016-2018 Madcore Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import collections
import yamlordereddictloader
import os
import yaml
import subprocess
#from fabric.api import *
import re
from os.path import expanduser
import errno
import cmdkubectl
import os.path
from static import Static
from cmd import Cmd
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
class Settings(object):
# full arguments object passed
args = None
aws_zone = None
master_ip = None
ingress_ips = []
cluster = None
provision = None
elements = None
#current_context = None
#data_path = None
#config = None
#config_locust = None
settings = None
filepath_settings = None
filepath_clusterfile = None
folder_user = None
folder_user_populated = None
folder_user_clusters = None
folder_app_templates = None
folder_app_clusters = None
def __init__(self, args):
self.args = args
self.set_user_folders()
self.set_app_folders()
self.load_settings_file()
def set_zone(self):
if self.provision.cloud == "aws":
self.aws_zone = "{0}{1}".format(
self.provision.region, self.provision.zone_id)
def set_clusterfile(self):
# name is file without extension
# save name and filename
# init has to replace internal name
#is in args
# is in app or user space
#is same as settings
#pass
def switch_check(in_passed):
# check if different
different = False
if in_passed != self.settings.clusterfile:
different = True
else:
return False
if different:
if os.path.isfile(os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "clusters/"), in_passed)):
return True
else:
Static.msg_bold(
"Error", "Clusterfile file {0} does not exist. Cannot continue.".format(in_passed))
raise SystemExit(99)
filepath_app_settings_clusterfile = os.path.join(self.folder_app_clusters, self.settings.clusterfile)
filepath_user_settings_clusterfile = os.path.join(self.folder_user_clusters, self.settings.clusterfile)
args_clusterfile = None
if self.args.provision:
args_clusterfile = self.args.provision
elif self.args.clusterfile:
args_clusterfile = self.args.clusterfile
filepath_app_args_clusterfile = None
filepath_user_args_clusterfile = None
if args_clusterfile:
# args present - settings UPDATE will happen
filepath_app_args_clusterfile = os.path.join(self.folder_app_clusters, args_clusterfile)
filepath_user_args_clusterfile = os.path.join(self.folder_user_clusters, args_clusterfile)
# check which one exist
# put that one in settings
if os.path.isfile(filepath_app_args_clusterfile):
self.settings.clusterfile = filepath_app_args_clusterfile
elif os.path.isfile(filepath_user_args_clusterfile):
self.settings.clusterfile = filepath_user_args_clusterfile
else:
Static.msg_bold("Clusterfile Not Found. Cannot Continue. Run madcore with -c flag", filepath_app_args_clusterfile)
raise SystemExit(98)
else:
# using from settings, check if exists
if os.path.isfile(self.settings.clusterfile):
Static.msg("Default clusterfile remains as", self.settings.clusterfile)
else:
Static.msg_bold("Clusterfile Not Found. Cannot Continue. Run madcore with -c flag to specify new.", self.settings.clusterfile)
raise SystemExit(99)
'''
# switch if required
switched = False
if self.args.provision:
if switch_check(self.args.provision):
self.settings.settings = self.args.provision
switched = True
elif self.args.clusterfile:
if switch_check(self.args.clusterfile):
self.settings.settings = self.args.clusterfile
switched = True
if switched:
self.save_settings_file()
Static.msg("Default clusterfile set to", self.settings.clusterfile)
else:
Static.msg("Default clusterfile remains as", self.settings.clusterfile)
'''
def load_clusterfile(self):
clusterfile_data = None
if os.path.isfile(self.settings.clusterfile):
clusterfile_data = yaml.load(open(self.settings.clusterfile), Loader=yamlordereddictloader.Loader)
else:
Static.msg_bold("Clusterfile not found", self.settings.clusterfile)
#if os.path.isfile(clusterfile_config_path):
# clusterfile_data = yaml.load(open(self.settings.clusterfile), Loader=yamlordereddictloader.Loader)
#else:
# clusterfile_data = yaml.load(open(clusterfile_app_path), Loader=yamlordereddictloader.Loader)
clusterfile_struct = Struct(**clusterfile_data)
self.cluster = Struct(**clusterfile_struct.cluster)
self.provision = Struct(**clusterfile_struct.provision)
self.elements = clusterfile_data['elements']
'''
Static.msg("Initializing", "Clusterfile")
clusterfile_src = os.path.join(sett.folder_app_clusters, args.init[0])
clusterfile_dst = os.path.join(sett.folder_config_clusters, args.init[1])
if os.path.isfile(clusterfile_dst):
print "DESTINATION FILE {0} ALREADY EXISTS".format(clusterfile_dst)
if not os.path.isfile(clusterfile_src):
print "SOURCE FILE {0} NOT FOUND".format(clusterfile_src)
Cmd.local_run_get_out("CREATED CLUSTERFILE {0}".format(clusterfile_src),
"cp {0} {1}".format(clusterfile_src, clusterfile_dst)
)
new_template_name = args.init[0].split(".")
Cmd.local_run_get_out("UPDATING TEMPLATE NAME", "cp {0} {1}".format(clusterfile_src, clusterfile_dst))
#sed -i 's/name:*/name: {0}/g'.format(args.init[1]) clusterfile_dst
# new file goes to ~/.madcore/templates
#self.settings.folder_clusters #copy to this folder form existing
'''
def initialize_new_clusterfile(self):
clusterfile_src = os.path.join(self.folder_app_clusters, self.args.init[0])
clusterfile_dst = os.path.join(self.folder_user_clusters, self.args.init[1])
if os.path.isfile(clusterfile_dst):
print "DESTINATION FILE {0} ALREADY EXISTS".format(clusterfile_dst)
if not os.path.isfile(clusterfile_src):
print "SOURCE FILE {0} NOT FOUND".format(clusterfile_src)
Static.msg("Initializing New Clusterfile", clusterfile_dst)
Cmd.local_run_get_out("INITIATED CLUSTERFILE {0}".format(clusterfile_src),
"cp {0} {1}".format(clusterfile_src, clusterfile_dst)
)
self.settings.clusterfile = clusterfile_dst
self.save_settings_file()
#new_template_name = args.init[0].split(".")
#Cmd.local_run_get_out("UPDATING TEMPLATE NAME", "cp {0} {1}".format(clusterfile_src, clusterfile_dst))
#sed -i 's/name:*/name: {0}/g'.format(args.init[1]) clusterfile_dst
# new file goes to ~/.madcore/templates
#self.settings.folder_clusters #copy to this folder form existing
def set_user_folders(self):
self.folder_user = os.path.join(expanduser("~"), ".madcore")
self.folder_user_populated = os.path.join(self.folder_user, "rendered")
if not os.path.exists(self.folder_user_populated):
self.mkdir_p(self.folder_user_populated)
self.folder_user_clusters = os.path.join(self.folder_user, "clusters")
if not os.path.exists(self.folder_user_clusters):
self.mkdir_p(self.folder_user_clusters)
def mkdir_p(self, path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def set_app_folders(self):
self.folder_app_templates = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates/"))
self.folder_app_clusters = os.path.join(os.path.join(os.path.dirname(os.path.realpath(__file__)), "clusters/"))
def load_settings_file(self):
self.filepath_settings = os.path.join(self.folder_user, 'settings.yaml')
if os.path.isfile(self.filepath_settings):
settings_raw = yaml.load(open(self.filepath_settings))
settings_raw_struct = Struct(**settings_raw)
self.settings = Struct(**settings_raw_struct.settings)
else:
new_settings = """settings:
clusterfile: minikube.yaml
"""
with open(self.filepath_settings, 'wb') as settings_file:
settings_file.write(new_settings)
self.load_settings_file()
def save_settings_file(self):
# self.config_locust.selected__stream = 'mystream1'
# self.config_locust.locustfile = 'test.py'
# self.config_locust.clients = 1
# self.config_locust.no_web = True
# self.config_locust.run_time = '1m'
settings = dict()
settings['settings'] = self.settings.__dict__
with open(self.filepath_settings, 'w') as settings_file:
settings_file.write(yaml.dump(settings, default_flow_style=False))
def get_populated_filename(self, name):
return os.path.join(self.folder_user_populated, name)
| {
"content_hash": "966402b3eaf4a1b614fa9fd2de54ada9",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 142,
"avg_line_length": 36.79391891891892,
"alnum_prop": 0.6341933706730328,
"repo_name": "madcore-ai/cli",
"id": "f78589ba0eb546c2450779c98c8bc4229aec4cbb",
"size": "10891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "madcore/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64157"
},
{
"name": "Shell",
"bytes": "489"
}
],
"symlink_target": ""
} |
from elasticsearch_dsl import mapping
def test_mapping_saved_into_es(write_client):
m = mapping.Mapping('test-type')
m.field('name', 'string').field('tags', 'string', index='not_analyzed')
m.save('test-mapping', using=write_client)
m = mapping.Mapping('other-type')
m.field('title', 'string').field('categories', 'string', index='not_analyzed')
m.save('test-mapping', using=write_client)
assert write_client.indices.exists_type(index='test-mapping', doc_type='test-type')
assert {
'test-mapping': {
'mappings': {
'test-type': {
'properties': {
'name': {'type': 'string'},
'tags': {'index': 'not_analyzed', 'type': 'string'}
}
},
'other-type': {
'properties': {
'title': {'type': 'string'},
'categories': {'index': 'not_analyzed', 'type': 'string'}
}
}
}
}
} == write_client.indices.get_mapping(index='test-mapping')
def test_mapping_gets_updated_from_es(write_client):
write_client.indices.create(
index='test-mapping',
body={
'settings': {'number_of_shards': 1, 'number_of_replicas': 0},
'mappings': {
'my_doc': {
'properties': {
'title': {
'type': 'string',
'analyzer': 'snowball',
'fields': {
'raw': {'type': 'string', 'index': 'not_analyzed'}
}
},
'created_at': {'type': 'date'},
'comments': {
'type': 'nested',
'properties': {
'created': {'type': 'date'},
'author': {
'type': 'string',
'analyzer': 'snowball',
'fields': {
'raw': {'type': 'string', 'index': 'not_analyzed'}
}
}
}
}
}
}
}
}
)
m = mapping.Mapping.from_es('test-mapping', 'my_doc', using=write_client)
assert ['comments', 'created_at', 'title'] == list(sorted(m.properties.properties._d_.keys()))
assert {
'my_doc': {
'properties': {
'comments': {
'type': 'nested',
'properties': {
'created': {'type': 'date', 'format': 'dateOptionalTime'},
'author': {'analyzer': 'snowball', 'fields': {'raw': {'index': 'not_analyzed', 'type': 'string'}}, 'type': 'string'}
},
},
'created_at': {'format': 'dateOptionalTime', 'type': 'date'},
'title': {'analyzer': 'snowball', 'fields': {'raw': {'index': 'not_analyzed', 'type': 'string'}}, 'type': 'string'}
}
}
} == m.to_dict()
| {
"content_hash": "9de65205ad5a9e2ceb2f9b5889878b2c",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 140,
"avg_line_length": 38.57471264367816,
"alnum_prop": 0.3787246722288439,
"repo_name": "ngokevin/elasticsearch-dsl-py",
"id": "3c0492adb15db8652b4cd452267b9765378b68be",
"size": "3356",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test_elasticsearch_dsl/test_integration/test_mapping.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "142626"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'BookReview.review'
db.add_column(u'demo4_bookreview', 'review',
self.gf('django.db.models.fields.TextField')(default=u'-'),
keep_default=False)
def backwards(self, orm):
# Deleting field 'BookReview.review'
db.delete_column(u'demo4_bookreview', 'review')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'demo4.author': {
'Meta': {'ordering': "('last_name',)", 'object_name': 'Author'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
u'demo4.book': {
'Meta': {'ordering': "('title',)", 'object_name': 'Book'},
'abstract': ('django.db.models.fields.TextField', [], {}),
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'book_authors'", 'symmetrical': 'False', 'through': u"orm['demo4.BookAuthors']", 'to': u"orm['demo4.Author']"}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'demo4_book_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'price_net': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'demo4.bookauthors': {
'Meta': {'ordering': "('order',)", 'object_name': 'BookAuthors'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['demo4.Author']"}),
'book': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['demo4.Book']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'demo4.bookreview': {
'Meta': {'ordering': "('-created_at',)", 'object_name': 'BookReview'},
'book': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'review_book'", 'to': u"orm['demo4.Book']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'review_user'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'review': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['demo4'] | {
"content_hash": "4e063730e116da6c8e1301ac2765944b",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 209,
"avg_line_length": 68.67032967032966,
"alnum_prop": 0.5525684109457513,
"repo_name": "stephanpoetschner/django-vienna",
"id": "06efb08c5139aa48f15ec61d8c8a36a566a13c28",
"size": "6273",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "2014/03/django-rest-framework/src/demo4/migrations/0003_auto__add_field_bookreview_review.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "131200"
},
{
"name": "HTML",
"bytes": "51555"
},
{
"name": "JavaScript",
"bytes": "215136"
},
{
"name": "Python",
"bytes": "85666"
}
],
"symlink_target": ""
} |
import builtins
import copyreg
import gc
import itertools
import math
import pickle
import sys
import types
import unittest
import warnings
import weakref
from copy import deepcopy
from test import support
class OperatorsTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.binops = {
'add': '+',
'sub': '-',
'mul': '*',
'matmul': '@',
'truediv': '/',
'floordiv': '//',
'divmod': 'divmod',
'pow': '**',
'lshift': '<<',
'rshift': '>>',
'and': '&',
'xor': '^',
'or': '|',
'cmp': 'cmp',
'lt': '<',
'le': '<=',
'eq': '==',
'ne': '!=',
'gt': '>',
'ge': '>=',
}
for name, expr in list(self.binops.items()):
if expr.islower():
expr = expr + "(a, b)"
else:
expr = 'a %s b' % expr
self.binops[name] = expr
self.unops = {
'pos': '+',
'neg': '-',
'abs': 'abs',
'invert': '~',
'int': 'int',
'float': 'float',
}
for name, expr in list(self.unops.items()):
if expr.islower():
expr = expr + "(a)"
else:
expr = '%s a' % expr
self.unops[name] = expr
def unop_test(self, a, res, expr="len(a)", meth="__len__"):
d = {'a': a}
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
# Find method in parent class
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a), res)
bm = getattr(a, meth)
self.assertEqual(bm(), res)
def binop_test(self, a, b, res, expr="a+b", meth="__add__"):
d = {'a': a, 'b': b}
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a, b), res)
bm = getattr(a, meth)
self.assertEqual(bm(b), res)
def sliceop_test(self, a, b, c, res, expr="a[b:c]", meth="__getitem__"):
d = {'a': a, 'b': b, 'c': c}
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a, slice(b, c)), res)
bm = getattr(a, meth)
self.assertEqual(bm(slice(b, c)), res)
def setop_test(self, a, b, res, stmt="a+=b", meth="__iadd__"):
d = {'a': deepcopy(a), 'b': b}
exec(stmt, d)
self.assertEqual(d['a'], res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
d['a'] = deepcopy(a)
m(d['a'], b)
self.assertEqual(d['a'], res)
d['a'] = deepcopy(a)
bm = getattr(d['a'], meth)
bm(b)
self.assertEqual(d['a'], res)
def set2op_test(self, a, b, c, res, stmt="a[b]=c", meth="__setitem__"):
d = {'a': deepcopy(a), 'b': b, 'c': c}
exec(stmt, d)
self.assertEqual(d['a'], res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
d['a'] = deepcopy(a)
m(d['a'], b, c)
self.assertEqual(d['a'], res)
d['a'] = deepcopy(a)
bm = getattr(d['a'], meth)
bm(b, c)
self.assertEqual(d['a'], res)
def setsliceop_test(self, a, b, c, d, res, stmt="a[b:c]=d", meth="__setitem__"):
dictionary = {'a': deepcopy(a), 'b': b, 'c': c, 'd': d}
exec(stmt, dictionary)
self.assertEqual(dictionary['a'], res)
t = type(a)
while meth not in t.__dict__:
t = t.__bases__[0]
m = getattr(t, meth)
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
dictionary['a'] = deepcopy(a)
m(dictionary['a'], slice(b, c), d)
self.assertEqual(dictionary['a'], res)
dictionary['a'] = deepcopy(a)
bm = getattr(dictionary['a'], meth)
bm(slice(b, c), d)
self.assertEqual(dictionary['a'], res)
def test_lists(self):
# Testing list operations...
# Asserts are within individual test methods
self.binop_test([1], [2], [1,2], "a+b", "__add__")
self.binop_test([1,2,3], 2, 1, "b in a", "__contains__")
self.binop_test([1,2,3], 4, 0, "b in a", "__contains__")
self.binop_test([1,2,3], 1, 2, "a[b]", "__getitem__")
self.sliceop_test([1,2,3], 0, 2, [1,2], "a[b:c]", "__getitem__")
self.setop_test([1], [2], [1,2], "a+=b", "__iadd__")
self.setop_test([1,2], 3, [1,2,1,2,1,2], "a*=b", "__imul__")
self.unop_test([1,2,3], 3, "len(a)", "__len__")
self.binop_test([1,2], 3, [1,2,1,2,1,2], "a*b", "__mul__")
self.binop_test([1,2], 3, [1,2,1,2,1,2], "b*a", "__rmul__")
self.set2op_test([1,2], 1, 3, [1,3], "a[b]=c", "__setitem__")
self.setsliceop_test([1,2,3,4], 1, 3, [5,6], [1,5,6,4], "a[b:c]=d",
"__setitem__")
def test_dicts(self):
# Testing dict operations...
self.binop_test({1:2,3:4}, 1, 1, "b in a", "__contains__")
self.binop_test({1:2,3:4}, 2, 0, "b in a", "__contains__")
self.binop_test({1:2,3:4}, 1, 2, "a[b]", "__getitem__")
d = {1:2, 3:4}
l1 = []
for i in list(d.keys()):
l1.append(i)
l = []
for i in iter(d):
l.append(i)
self.assertEqual(l, l1)
l = []
for i in d.__iter__():
l.append(i)
self.assertEqual(l, l1)
l = []
for i in dict.__iter__(d):
l.append(i)
self.assertEqual(l, l1)
d = {1:2, 3:4}
self.unop_test(d, 2, "len(a)", "__len__")
self.assertEqual(eval(repr(d), {}), d)
self.assertEqual(eval(d.__repr__(), {}), d)
self.set2op_test({1:2,3:4}, 2, 3, {1:2,2:3,3:4}, "a[b]=c",
"__setitem__")
# Tests for unary and binary operators
def number_operators(self, a, b, skip=[]):
dict = {'a': a, 'b': b}
for name, expr in self.binops.items():
if name not in skip:
name = "__%s__" % name
if hasattr(a, name):
res = eval(expr, dict)
self.binop_test(a, b, res, expr, name)
for name, expr in list(self.unops.items()):
if name not in skip:
name = "__%s__" % name
if hasattr(a, name):
res = eval(expr, dict)
self.unop_test(a, res, expr, name)
def test_ints(self):
# Testing int operations...
self.number_operators(100, 3)
# The following crashes in Python 2.2
self.assertEqual((1).__bool__(), 1)
self.assertEqual((0).__bool__(), 0)
# This returns 'NotImplemented' in Python 2.2
class C(int):
def __add__(self, other):
return NotImplemented
self.assertEqual(C(5), 5)
try:
C() + ""
except TypeError:
pass
else:
self.fail("NotImplemented should have caused TypeError")
def test_floats(self):
# Testing float operations...
self.number_operators(100.0, 3.0)
def test_complexes(self):
# Testing complex operations...
self.number_operators(100.0j, 3.0j, skip=['lt', 'le', 'gt', 'ge',
'int', 'float',
'floordiv', 'divmod', 'mod'])
class Number(complex):
__slots__ = ['prec']
def __new__(cls, *args, **kwds):
result = complex.__new__(cls, *args)
result.prec = kwds.get('prec', 12)
return result
def __repr__(self):
prec = self.prec
if self.imag == 0.0:
return "%.*g" % (prec, self.real)
if self.real == 0.0:
return "%.*gj" % (prec, self.imag)
return "(%.*g+%.*gj)" % (prec, self.real, prec, self.imag)
__str__ = __repr__
a = Number(3.14, prec=6)
self.assertEqual(repr(a), "3.14")
self.assertEqual(a.prec, 6)
a = Number(a, prec=2)
self.assertEqual(repr(a), "3.1")
self.assertEqual(a.prec, 2)
a = Number(234.5)
self.assertEqual(repr(a), "234.5")
self.assertEqual(a.prec, 12)
def test_explicit_reverse_methods(self):
# see issue 9930
self.assertEqual(complex.__radd__(3j, 4.0), complex(4.0, 3.0))
self.assertEqual(float.__rsub__(3.0, 1), -2.0)
@support.impl_detail("the module 'xxsubtype' is internal")
def test_spam_lists(self):
# Testing spamlist operations...
import copy, xxsubtype as spam
def spamlist(l, memo=None):
import xxsubtype as spam
return spam.spamlist(l)
# This is an ugly hack:
copy._deepcopy_dispatch[spam.spamlist] = spamlist
self.binop_test(spamlist([1]), spamlist([2]), spamlist([1,2]), "a+b",
"__add__")
self.binop_test(spamlist([1,2,3]), 2, 1, "b in a", "__contains__")
self.binop_test(spamlist([1,2,3]), 4, 0, "b in a", "__contains__")
self.binop_test(spamlist([1,2,3]), 1, 2, "a[b]", "__getitem__")
self.sliceop_test(spamlist([1,2,3]), 0, 2, spamlist([1,2]), "a[b:c]",
"__getitem__")
self.setop_test(spamlist([1]), spamlist([2]), spamlist([1,2]), "a+=b",
"__iadd__")
self.setop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*=b",
"__imul__")
self.unop_test(spamlist([1,2,3]), 3, "len(a)", "__len__")
self.binop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*b",
"__mul__")
self.binop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "b*a",
"__rmul__")
self.set2op_test(spamlist([1,2]), 1, 3, spamlist([1,3]), "a[b]=c",
"__setitem__")
self.setsliceop_test(spamlist([1,2,3,4]), 1, 3, spamlist([5,6]),
spamlist([1,5,6,4]), "a[b:c]=d", "__setitem__")
# Test subclassing
class C(spam.spamlist):
def foo(self): return 1
a = C()
self.assertEqual(a, [])
self.assertEqual(a.foo(), 1)
a.append(100)
self.assertEqual(a, [100])
self.assertEqual(a.getstate(), 0)
a.setstate(42)
self.assertEqual(a.getstate(), 42)
@support.impl_detail("the module 'xxsubtype' is internal")
def test_spam_dicts(self):
# Testing spamdict operations...
import copy, xxsubtype as spam
def spamdict(d, memo=None):
import xxsubtype as spam
sd = spam.spamdict()
for k, v in list(d.items()):
sd[k] = v
return sd
# This is an ugly hack:
copy._deepcopy_dispatch[spam.spamdict] = spamdict
self.binop_test(spamdict({1:2,3:4}), 1, 1, "b in a", "__contains__")
self.binop_test(spamdict({1:2,3:4}), 2, 0, "b in a", "__contains__")
self.binop_test(spamdict({1:2,3:4}), 1, 2, "a[b]", "__getitem__")
d = spamdict({1:2,3:4})
l1 = []
for i in list(d.keys()):
l1.append(i)
l = []
for i in iter(d):
l.append(i)
self.assertEqual(l, l1)
l = []
for i in d.__iter__():
l.append(i)
self.assertEqual(l, l1)
l = []
for i in type(spamdict({})).__iter__(d):
l.append(i)
self.assertEqual(l, l1)
straightd = {1:2, 3:4}
spamd = spamdict(straightd)
self.unop_test(spamd, 2, "len(a)", "__len__")
self.unop_test(spamd, repr(straightd), "repr(a)", "__repr__")
self.set2op_test(spamdict({1:2,3:4}), 2, 3, spamdict({1:2,2:3,3:4}),
"a[b]=c", "__setitem__")
# Test subclassing
class C(spam.spamdict):
def foo(self): return 1
a = C()
self.assertEqual(list(a.items()), [])
self.assertEqual(a.foo(), 1)
a['foo'] = 'bar'
self.assertEqual(list(a.items()), [('foo', 'bar')])
self.assertEqual(a.getstate(), 0)
a.setstate(100)
self.assertEqual(a.getstate(), 100)
class ClassPropertiesAndMethods(unittest.TestCase):
def assertHasAttr(self, obj, name):
self.assertTrue(hasattr(obj, name),
'%r has no attribute %r' % (obj, name))
def assertNotHasAttr(self, obj, name):
self.assertFalse(hasattr(obj, name),
'%r has unexpected attribute %r' % (obj, name))
def test_python_dicts(self):
# Testing Python subclass of dict...
self.assertTrue(issubclass(dict, dict))
self.assertIsInstance({}, dict)
d = dict()
self.assertEqual(d, {})
self.assertIs(d.__class__, dict)
self.assertIsInstance(d, dict)
class C(dict):
state = -1
def __init__(self_local, *a, **kw):
if a:
self.assertEqual(len(a), 1)
self_local.state = a[0]
if kw:
for k, v in list(kw.items()):
self_local[v] = k
def __getitem__(self, key):
return self.get(key, 0)
def __setitem__(self_local, key, value):
self.assertIsInstance(key, type(0))
dict.__setitem__(self_local, key, value)
def setstate(self, state):
self.state = state
def getstate(self):
return self.state
self.assertTrue(issubclass(C, dict))
a1 = C(12)
self.assertEqual(a1.state, 12)
a2 = C(foo=1, bar=2)
self.assertEqual(a2[1] == 'foo' and a2[2], 'bar')
a = C()
self.assertEqual(a.state, -1)
self.assertEqual(a.getstate(), -1)
a.setstate(0)
self.assertEqual(a.state, 0)
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.state, 10)
self.assertEqual(a.getstate(), 10)
self.assertEqual(a[42], 0)
a[42] = 24
self.assertEqual(a[42], 24)
N = 50
for i in range(N):
a[i] = C()
for j in range(N):
a[i][j] = i*j
for i in range(N):
for j in range(N):
self.assertEqual(a[i][j], i*j)
def test_python_lists(self):
# Testing Python subclass of list...
class C(list):
def __getitem__(self, i):
if isinstance(i, slice):
return i.start, i.stop
return list.__getitem__(self, i) + 100
a = C()
a.extend([0,1,2])
self.assertEqual(a[0], 100)
self.assertEqual(a[1], 101)
self.assertEqual(a[2], 102)
self.assertEqual(a[100:200], (100,200))
def test_metaclass(self):
# Testing metaclasses...
class C(metaclass=type):
def __init__(self):
self.__state = 0
def getstate(self):
return self.__state
def setstate(self, state):
self.__state = state
a = C()
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.getstate(), 10)
class _metaclass(type):
def myself(cls): return cls
class D(metaclass=_metaclass):
pass
self.assertEqual(D.myself(), D)
d = D()
self.assertEqual(d.__class__, D)
class M1(type):
def __new__(cls, name, bases, dict):
dict['__spam__'] = 1
return type.__new__(cls, name, bases, dict)
class C(metaclass=M1):
pass
self.assertEqual(C.__spam__, 1)
c = C()
self.assertEqual(c.__spam__, 1)
class _instance(object):
pass
class M2(object):
@staticmethod
def __new__(cls, name, bases, dict):
self = object.__new__(cls)
self.name = name
self.bases = bases
self.dict = dict
return self
def __call__(self):
it = _instance()
# Early binding of methods
for key in self.dict:
if key.startswith("__"):
continue
setattr(it, key, self.dict[key].__get__(it, self))
return it
class C(metaclass=M2):
def spam(self):
return 42
self.assertEqual(C.name, 'C')
self.assertEqual(C.bases, ())
self.assertIn('spam', C.dict)
c = C()
self.assertEqual(c.spam(), 42)
# More metaclass examples
class autosuper(type):
# Automatically add __super to the class
# This trick only works for dynamic classes
def __new__(metaclass, name, bases, dict):
cls = super(autosuper, metaclass).__new__(metaclass,
name, bases, dict)
# Name mangling for __super removes leading underscores
while name[:1] == "_":
name = name[1:]
if name:
name = "_%s__super" % name
else:
name = "__super"
setattr(cls, name, super(cls))
return cls
class A(metaclass=autosuper):
def meth(self):
return "A"
class B(A):
def meth(self):
return "B" + self.__super.meth()
class C(A):
def meth(self):
return "C" + self.__super.meth()
class D(C, B):
def meth(self):
return "D" + self.__super.meth()
self.assertEqual(D().meth(), "DCBA")
class E(B, C):
def meth(self):
return "E" + self.__super.meth()
self.assertEqual(E().meth(), "EBCA")
class autoproperty(type):
# Automatically create property attributes when methods
# named _get_x and/or _set_x are found
def __new__(metaclass, name, bases, dict):
hits = {}
for key, val in dict.items():
if key.startswith("_get_"):
key = key[5:]
get, set = hits.get(key, (None, None))
get = val
hits[key] = get, set
elif key.startswith("_set_"):
key = key[5:]
get, set = hits.get(key, (None, None))
set = val
hits[key] = get, set
for key, (get, set) in hits.items():
dict[key] = property(get, set)
return super(autoproperty, metaclass).__new__(metaclass,
name, bases, dict)
class A(metaclass=autoproperty):
def _get_x(self):
return -self.__x
def _set_x(self, x):
self.__x = -x
a = A()
self.assertNotHasAttr(a, "x")
a.x = 12
self.assertEqual(a.x, 12)
self.assertEqual(a._A__x, -12)
class multimetaclass(autoproperty, autosuper):
# Merge of multiple cooperating metaclasses
pass
class A(metaclass=multimetaclass):
def _get_x(self):
return "A"
class B(A):
def _get_x(self):
return "B" + self.__super._get_x()
class C(A):
def _get_x(self):
return "C" + self.__super._get_x()
class D(C, B):
def _get_x(self):
return "D" + self.__super._get_x()
self.assertEqual(D().x, "DCBA")
# Make sure type(x) doesn't call x.__class__.__init__
class T(type):
counter = 0
def __init__(self, *args):
T.counter += 1
class C(metaclass=T):
pass
self.assertEqual(T.counter, 1)
a = C()
self.assertEqual(type(a), C)
self.assertEqual(T.counter, 1)
class C(object): pass
c = C()
try: c()
except TypeError: pass
else: self.fail("calling object w/o call method should raise "
"TypeError")
# Testing code to find most derived baseclass
class A(type):
def __new__(*args, **kwargs):
return type.__new__(*args, **kwargs)
class B(object):
pass
class C(object, metaclass=A):
pass
# The most derived metaclass of D is A rather than type.
class D(B, C):
pass
self.assertIs(A, type(D))
# issue1294232: correct metaclass calculation
new_calls = [] # to check the order of __new__ calls
class AMeta(type):
@staticmethod
def __new__(mcls, name, bases, ns):
new_calls.append('AMeta')
return super().__new__(mcls, name, bases, ns)
@classmethod
def __prepare__(mcls, name, bases):
return {}
class BMeta(AMeta):
@staticmethod
def __new__(mcls, name, bases, ns):
new_calls.append('BMeta')
return super().__new__(mcls, name, bases, ns)
@classmethod
def __prepare__(mcls, name, bases):
ns = super().__prepare__(name, bases)
ns['BMeta_was_here'] = True
return ns
class A(metaclass=AMeta):
pass
self.assertEqual(['AMeta'], new_calls)
new_calls.clear()
class B(metaclass=BMeta):
pass
# BMeta.__new__ calls AMeta.__new__ with super:
self.assertEqual(['BMeta', 'AMeta'], new_calls)
new_calls.clear()
class C(A, B):
pass
# The most derived metaclass is BMeta:
self.assertEqual(['BMeta', 'AMeta'], new_calls)
new_calls.clear()
# BMeta.__prepare__ should've been called:
self.assertIn('BMeta_was_here', C.__dict__)
# The order of the bases shouldn't matter:
class C2(B, A):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
new_calls.clear()
self.assertIn('BMeta_was_here', C2.__dict__)
# Check correct metaclass calculation when a metaclass is declared:
class D(C, metaclass=type):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
new_calls.clear()
self.assertIn('BMeta_was_here', D.__dict__)
class E(C, metaclass=AMeta):
pass
self.assertEqual(['BMeta', 'AMeta'], new_calls)
new_calls.clear()
self.assertIn('BMeta_was_here', E.__dict__)
# Special case: the given metaclass isn't a class,
# so there is no metaclass calculation.
marker = object()
def func(*args, **kwargs):
return marker
class X(metaclass=func):
pass
class Y(object, metaclass=func):
pass
class Z(D, metaclass=func):
pass
self.assertIs(marker, X)
self.assertIs(marker, Y)
self.assertIs(marker, Z)
# The given metaclass is a class,
# but not a descendant of type.
prepare_calls = [] # to track __prepare__ calls
class ANotMeta:
def __new__(mcls, *args, **kwargs):
new_calls.append('ANotMeta')
return super().__new__(mcls)
@classmethod
def __prepare__(mcls, name, bases):
prepare_calls.append('ANotMeta')
return {}
class BNotMeta(ANotMeta):
def __new__(mcls, *args, **kwargs):
new_calls.append('BNotMeta')
return super().__new__(mcls)
@classmethod
def __prepare__(mcls, name, bases):
prepare_calls.append('BNotMeta')
return super().__prepare__(name, bases)
class A(metaclass=ANotMeta):
pass
self.assertIs(ANotMeta, type(A))
self.assertEqual(['ANotMeta'], prepare_calls)
prepare_calls.clear()
self.assertEqual(['ANotMeta'], new_calls)
new_calls.clear()
class B(metaclass=BNotMeta):
pass
self.assertIs(BNotMeta, type(B))
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
class C(A, B):
pass
self.assertIs(BNotMeta, type(C))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
class C2(B, A):
pass
self.assertIs(BNotMeta, type(C2))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
# This is a TypeError, because of a metaclass conflict:
# BNotMeta is neither a subclass, nor a superclass of type
with self.assertRaises(TypeError):
class D(C, metaclass=type):
pass
class E(C, metaclass=ANotMeta):
pass
self.assertIs(BNotMeta, type(E))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
class F(object(), C):
pass
self.assertIs(BNotMeta, type(F))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
class F2(C, object()):
pass
self.assertIs(BNotMeta, type(F2))
self.assertEqual(['BNotMeta', 'ANotMeta'], new_calls)
new_calls.clear()
self.assertEqual(['BNotMeta', 'ANotMeta'], prepare_calls)
prepare_calls.clear()
# TypeError: BNotMeta is neither a
# subclass, nor a superclass of int
with self.assertRaises(TypeError):
class X(C, int()):
pass
with self.assertRaises(TypeError):
class X(int(), C):
pass
def test_module_subclasses(self):
# Testing Python subclass of module...
log = []
MT = type(sys)
class MM(MT):
def __init__(self, name):
MT.__init__(self, name)
def __getattribute__(self, name):
log.append(("getattr", name))
return MT.__getattribute__(self, name)
def __setattr__(self, name, value):
log.append(("setattr", name, value))
MT.__setattr__(self, name, value)
def __delattr__(self, name):
log.append(("delattr", name))
MT.__delattr__(self, name)
a = MM("a")
a.foo = 12
x = a.foo
del a.foo
self.assertEqual(log, [("setattr", "foo", 12),
("getattr", "foo"),
("delattr", "foo")])
# http://python.org/sf/1174712
try:
class Module(types.ModuleType, str):
pass
except TypeError:
pass
else:
self.fail("inheriting from ModuleType and str at the same time "
"should fail")
def test_multiple_inheritance(self):
# Testing multiple inheritance...
class C(object):
def __init__(self):
self.__state = 0
def getstate(self):
return self.__state
def setstate(self, state):
self.__state = state
a = C()
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.getstate(), 10)
class D(dict, C):
def __init__(self):
type({}).__init__(self)
C.__init__(self)
d = D()
self.assertEqual(list(d.keys()), [])
d["hello"] = "world"
self.assertEqual(list(d.items()), [("hello", "world")])
self.assertEqual(d["hello"], "world")
self.assertEqual(d.getstate(), 0)
d.setstate(10)
self.assertEqual(d.getstate(), 10)
self.assertEqual(D.__mro__, (D, dict, C, object))
# SF bug #442833
class Node(object):
def __int__(self):
return int(self.foo())
def foo(self):
return "23"
class Frag(Node, list):
def foo(self):
return "42"
self.assertEqual(Node().__int__(), 23)
self.assertEqual(int(Node()), 23)
self.assertEqual(Frag().__int__(), 42)
self.assertEqual(int(Frag()), 42)
def test_diamond_inheritance(self):
# Testing multiple inheritance special cases...
class A(object):
def spam(self): return "A"
self.assertEqual(A().spam(), "A")
class B(A):
def boo(self): return "B"
def spam(self): return "B"
self.assertEqual(B().spam(), "B")
self.assertEqual(B().boo(), "B")
class C(A):
def boo(self): return "C"
self.assertEqual(C().spam(), "A")
self.assertEqual(C().boo(), "C")
class D(B, C): pass
self.assertEqual(D().spam(), "B")
self.assertEqual(D().boo(), "B")
self.assertEqual(D.__mro__, (D, B, C, A, object))
class E(C, B): pass
self.assertEqual(E().spam(), "B")
self.assertEqual(E().boo(), "C")
self.assertEqual(E.__mro__, (E, C, B, A, object))
# MRO order disagreement
try:
class F(D, E): pass
except TypeError:
pass
else:
self.fail("expected MRO order disagreement (F)")
try:
class G(E, D): pass
except TypeError:
pass
else:
self.fail("expected MRO order disagreement (G)")
# see thread python-dev/2002-October/029035.html
def test_ex5_from_c3_switch(self):
# Testing ex5 from C3 switch discussion...
class A(object): pass
class B(object): pass
class C(object): pass
class X(A): pass
class Y(A): pass
class Z(X,B,Y,C): pass
self.assertEqual(Z.__mro__, (Z, X, B, Y, A, C, object))
# see "A Monotonic Superclass Linearization for Dylan",
# by Kim Barrett et al. (OOPSLA 1996)
def test_monotonicity(self):
# Testing MRO monotonicity...
class Boat(object): pass
class DayBoat(Boat): pass
class WheelBoat(Boat): pass
class EngineLess(DayBoat): pass
class SmallMultihull(DayBoat): pass
class PedalWheelBoat(EngineLess,WheelBoat): pass
class SmallCatamaran(SmallMultihull): pass
class Pedalo(PedalWheelBoat,SmallCatamaran): pass
self.assertEqual(PedalWheelBoat.__mro__,
(PedalWheelBoat, EngineLess, DayBoat, WheelBoat, Boat, object))
self.assertEqual(SmallCatamaran.__mro__,
(SmallCatamaran, SmallMultihull, DayBoat, Boat, object))
self.assertEqual(Pedalo.__mro__,
(Pedalo, PedalWheelBoat, EngineLess, SmallCatamaran,
SmallMultihull, DayBoat, WheelBoat, Boat, object))
# see "A Monotonic Superclass Linearization for Dylan",
# by Kim Barrett et al. (OOPSLA 1996)
def test_consistency_with_epg(self):
# Testing consistency with EPG...
class Pane(object): pass
class ScrollingMixin(object): pass
class EditingMixin(object): pass
class ScrollablePane(Pane,ScrollingMixin): pass
class EditablePane(Pane,EditingMixin): pass
class EditableScrollablePane(ScrollablePane,EditablePane): pass
self.assertEqual(EditableScrollablePane.__mro__,
(EditableScrollablePane, ScrollablePane, EditablePane, Pane,
ScrollingMixin, EditingMixin, object))
def test_mro_disagreement(self):
# Testing error messages for MRO disagreement...
mro_err_msg = """Cannot create a consistent method resolution
order (MRO) for bases """
def raises(exc, expected, callable, *args):
try:
callable(*args)
except exc as msg:
# the exact msg is generally considered an impl detail
if support.check_impl_detail():
if not str(msg).startswith(expected):
self.fail("Message %r, expected %r" %
(str(msg), expected))
else:
self.fail("Expected %s" % exc)
class A(object): pass
class B(A): pass
class C(object): pass
# Test some very simple errors
raises(TypeError, "duplicate base class A",
type, "X", (A, A), {})
raises(TypeError, mro_err_msg,
type, "X", (A, B), {})
raises(TypeError, mro_err_msg,
type, "X", (A, C, B), {})
# Test a slightly more complex error
class GridLayout(object): pass
class HorizontalGrid(GridLayout): pass
class VerticalGrid(GridLayout): pass
class HVGrid(HorizontalGrid, VerticalGrid): pass
class VHGrid(VerticalGrid, HorizontalGrid): pass
raises(TypeError, mro_err_msg,
type, "ConfusedGrid", (HVGrid, VHGrid), {})
def test_object_class(self):
# Testing object class...
a = object()
self.assertEqual(a.__class__, object)
self.assertEqual(type(a), object)
b = object()
self.assertNotEqual(a, b)
self.assertNotHasAttr(a, "foo")
try:
a.foo = 12
except (AttributeError, TypeError):
pass
else:
self.fail("object() should not allow setting a foo attribute")
self.assertNotHasAttr(object(), "__dict__")
class Cdict(object):
pass
x = Cdict()
self.assertEqual(x.__dict__, {})
x.foo = 1
self.assertEqual(x.foo, 1)
self.assertEqual(x.__dict__, {'foo': 1})
def test_object_class_assignment_between_heaptypes_and_nonheaptypes(self):
class SubType(types.ModuleType):
a = 1
m = types.ModuleType("m")
self.assertTrue(m.__class__ is types.ModuleType)
self.assertFalse(hasattr(m, "a"))
m.__class__ = SubType
self.assertTrue(m.__class__ is SubType)
self.assertTrue(hasattr(m, "a"))
m.__class__ = types.ModuleType
self.assertTrue(m.__class__ is types.ModuleType)
self.assertFalse(hasattr(m, "a"))
# Make sure that builtin immutable objects don't support __class__
# assignment, because the object instances may be interned.
# We set __slots__ = () to ensure that the subclasses are
# memory-layout compatible, and thus otherwise reasonable candidates
# for __class__ assignment.
# The following types have immutable instances, but are not
# subclassable and thus don't need to be checked:
# NoneType, bool
class MyInt(int):
__slots__ = ()
with self.assertRaises(TypeError):
(1).__class__ = MyInt
class MyFloat(float):
__slots__ = ()
with self.assertRaises(TypeError):
(1.0).__class__ = MyFloat
class MyComplex(complex):
__slots__ = ()
with self.assertRaises(TypeError):
(1 + 2j).__class__ = MyComplex
class MyStr(str):
__slots__ = ()
with self.assertRaises(TypeError):
"a".__class__ = MyStr
class MyBytes(bytes):
__slots__ = ()
with self.assertRaises(TypeError):
b"a".__class__ = MyBytes
class MyTuple(tuple):
__slots__ = ()
with self.assertRaises(TypeError):
().__class__ = MyTuple
class MyFrozenSet(frozenset):
__slots__ = ()
with self.assertRaises(TypeError):
frozenset().__class__ = MyFrozenSet
def test_slots(self):
# Testing __slots__...
class C0(object):
__slots__ = []
x = C0()
self.assertNotHasAttr(x, "__dict__")
self.assertNotHasAttr(x, "foo")
class C1(object):
__slots__ = ['a']
x = C1()
self.assertNotHasAttr(x, "__dict__")
self.assertNotHasAttr(x, "a")
x.a = 1
self.assertEqual(x.a, 1)
x.a = None
self.assertEqual(x.a, None)
del x.a
self.assertNotHasAttr(x, "a")
class C3(object):
__slots__ = ['a', 'b', 'c']
x = C3()
self.assertNotHasAttr(x, "__dict__")
self.assertNotHasAttr(x, 'a')
self.assertNotHasAttr(x, 'b')
self.assertNotHasAttr(x, 'c')
x.a = 1
x.b = 2
x.c = 3
self.assertEqual(x.a, 1)
self.assertEqual(x.b, 2)
self.assertEqual(x.c, 3)
class C4(object):
"""Validate name mangling"""
__slots__ = ['__a']
def __init__(self, value):
self.__a = value
def get(self):
return self.__a
x = C4(5)
self.assertNotHasAttr(x, '__dict__')
self.assertNotHasAttr(x, '__a')
self.assertEqual(x.get(), 5)
try:
x.__a = 6
except AttributeError:
pass
else:
self.fail("Double underscored names not mangled")
# Make sure slot names are proper identifiers
try:
class C(object):
__slots__ = [None]
except TypeError:
pass
else:
self.fail("[None] slots not caught")
try:
class C(object):
__slots__ = ["foo bar"]
except TypeError:
pass
else:
self.fail("['foo bar'] slots not caught")
try:
class C(object):
__slots__ = ["foo\0bar"]
except TypeError:
pass
else:
self.fail("['foo\\0bar'] slots not caught")
try:
class C(object):
__slots__ = ["1"]
except TypeError:
pass
else:
self.fail("['1'] slots not caught")
try:
class C(object):
__slots__ = [""]
except TypeError:
pass
else:
self.fail("[''] slots not caught")
class C(object):
__slots__ = ["a", "a_b", "_a", "A0123456789Z"]
# XXX(nnorwitz): was there supposed to be something tested
# from the class above?
# Test a single string is not expanded as a sequence.
class C(object):
__slots__ = "abc"
c = C()
c.abc = 5
self.assertEqual(c.abc, 5)
# Test unicode slot names
# Test a single unicode string is not expanded as a sequence.
class C(object):
__slots__ = "abc"
c = C()
c.abc = 5
self.assertEqual(c.abc, 5)
# _unicode_to_string used to modify slots in certain circumstances
slots = ("foo", "bar")
class C(object):
__slots__ = slots
x = C()
x.foo = 5
self.assertEqual(x.foo, 5)
self.assertIs(type(slots[0]), str)
# this used to leak references
try:
class C(object):
__slots__ = [chr(128)]
except (TypeError, UnicodeEncodeError):
pass
else:
self.fail("[chr(128)] slots not caught")
# Test leaks
class Counted(object):
counter = 0 # counts the number of instances alive
def __init__(self):
Counted.counter += 1
def __del__(self):
Counted.counter -= 1
class C(object):
__slots__ = ['a', 'b', 'c']
x = C()
x.a = Counted()
x.b = Counted()
x.c = Counted()
self.assertEqual(Counted.counter, 3)
del x
support.gc_collect()
self.assertEqual(Counted.counter, 0)
class D(C):
pass
x = D()
x.a = Counted()
x.z = Counted()
self.assertEqual(Counted.counter, 2)
del x
support.gc_collect()
self.assertEqual(Counted.counter, 0)
class E(D):
__slots__ = ['e']
x = E()
x.a = Counted()
x.z = Counted()
x.e = Counted()
self.assertEqual(Counted.counter, 3)
del x
support.gc_collect()
self.assertEqual(Counted.counter, 0)
# Test cyclical leaks [SF bug 519621]
class F(object):
__slots__ = ['a', 'b']
s = F()
s.a = [Counted(), s]
self.assertEqual(Counted.counter, 1)
s = None
support.gc_collect()
self.assertEqual(Counted.counter, 0)
# Test lookup leaks [SF bug 572567]
if hasattr(gc, 'get_objects'):
class G(object):
def __eq__(self, other):
return False
g = G()
orig_objects = len(gc.get_objects())
for i in range(10):
g==g
new_objects = len(gc.get_objects())
self.assertEqual(orig_objects, new_objects)
class H(object):
__slots__ = ['a', 'b']
def __init__(self):
self.a = 1
self.b = 2
def __del__(self_):
self.assertEqual(self_.a, 1)
self.assertEqual(self_.b, 2)
with support.captured_output('stderr') as s:
h = H()
del h
self.assertEqual(s.getvalue(), '')
class X(object):
__slots__ = "a"
with self.assertRaises(AttributeError):
del X().a
def test_slots_special(self):
# Testing __dict__ and __weakref__ in __slots__...
class D(object):
__slots__ = ["__dict__"]
a = D()
self.assertHasAttr(a, "__dict__")
self.assertNotHasAttr(a, "__weakref__")
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
class W(object):
__slots__ = ["__weakref__"]
a = W()
self.assertHasAttr(a, "__weakref__")
self.assertNotHasAttr(a, "__dict__")
try:
a.foo = 42
except AttributeError:
pass
else:
self.fail("shouldn't be allowed to set a.foo")
class C1(W, D):
__slots__ = []
a = C1()
self.assertHasAttr(a, "__dict__")
self.assertHasAttr(a, "__weakref__")
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
class C2(D, W):
__slots__ = []
a = C2()
self.assertHasAttr(a, "__dict__")
self.assertHasAttr(a, "__weakref__")
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
def test_slots_special2(self):
# Testing __qualname__ and __classcell__ in __slots__
class Meta(type):
def __new__(cls, name, bases, namespace, attr):
self.assertIn(attr, namespace)
return super().__new__(cls, name, bases, namespace)
class C1:
def __init__(self):
self.b = 42
class C2(C1, metaclass=Meta, attr="__classcell__"):
__slots__ = ["__classcell__"]
def __init__(self):
super().__init__()
self.assertIsInstance(C2.__dict__["__classcell__"],
types.MemberDescriptorType)
c = C2()
self.assertEqual(c.b, 42)
self.assertNotHasAttr(c, "__classcell__")
c.__classcell__ = 42
self.assertEqual(c.__classcell__, 42)
with self.assertRaises(TypeError):
class C3:
__classcell__ = 42
__slots__ = ["__classcell__"]
class Q1(metaclass=Meta, attr="__qualname__"):
__slots__ = ["__qualname__"]
self.assertEqual(Q1.__qualname__, C1.__qualname__[:-2] + "Q1")
self.assertIsInstance(Q1.__dict__["__qualname__"],
types.MemberDescriptorType)
q = Q1()
self.assertNotHasAttr(q, "__qualname__")
q.__qualname__ = "q"
self.assertEqual(q.__qualname__, "q")
with self.assertRaises(TypeError):
class Q2:
__qualname__ = object()
__slots__ = ["__qualname__"]
def test_slots_descriptor(self):
# Issue2115: slot descriptors did not correctly check
# the type of the given object
import abc
class MyABC(metaclass=abc.ABCMeta):
__slots__ = "a"
class Unrelated(object):
pass
MyABC.register(Unrelated)
u = Unrelated()
self.assertIsInstance(u, MyABC)
# This used to crash
self.assertRaises(TypeError, MyABC.a.__set__, u, 3)
def test_dynamics(self):
# Testing class attribute propagation...
class D(object):
pass
class E(D):
pass
class F(D):
pass
D.foo = 1
self.assertEqual(D.foo, 1)
# Test that dynamic attributes are inherited
self.assertEqual(E.foo, 1)
self.assertEqual(F.foo, 1)
# Test dynamic instances
class C(object):
pass
a = C()
self.assertNotHasAttr(a, "foobar")
C.foobar = 2
self.assertEqual(a.foobar, 2)
C.method = lambda self: 42
self.assertEqual(a.method(), 42)
C.__repr__ = lambda self: "C()"
self.assertEqual(repr(a), "C()")
C.__int__ = lambda self: 100
self.assertEqual(int(a), 100)
self.assertEqual(a.foobar, 2)
self.assertNotHasAttr(a, "spam")
def mygetattr(self, name):
if name == "spam":
return "spam"
raise AttributeError
C.__getattr__ = mygetattr
self.assertEqual(a.spam, "spam")
a.new = 12
self.assertEqual(a.new, 12)
def mysetattr(self, name, value):
if name == "spam":
raise AttributeError
return object.__setattr__(self, name, value)
C.__setattr__ = mysetattr
try:
a.spam = "not spam"
except AttributeError:
pass
else:
self.fail("expected AttributeError")
self.assertEqual(a.spam, "spam")
class D(C):
pass
d = D()
d.foo = 1
self.assertEqual(d.foo, 1)
# Test handling of int*seq and seq*int
class I(int):
pass
self.assertEqual("a"*I(2), "aa")
self.assertEqual(I(2)*"a", "aa")
self.assertEqual(2*I(3), 6)
self.assertEqual(I(3)*2, 6)
self.assertEqual(I(3)*I(2), 6)
# Test comparison of classes with dynamic metaclasses
class dynamicmetaclass(type):
pass
class someclass(metaclass=dynamicmetaclass):
pass
self.assertNotEqual(someclass, object)
def test_errors(self):
# Testing errors...
try:
class C(list, dict):
pass
except TypeError:
pass
else:
self.fail("inheritance from both list and dict should be illegal")
try:
class C(object, None):
pass
except TypeError:
pass
else:
self.fail("inheritance from non-type should be illegal")
class Classic:
pass
try:
class C(type(len)):
pass
except TypeError:
pass
else:
self.fail("inheritance from CFunction should be illegal")
try:
class C(object):
__slots__ = 1
except TypeError:
pass
else:
self.fail("__slots__ = 1 should be illegal")
try:
class C(object):
__slots__ = [1]
except TypeError:
pass
else:
self.fail("__slots__ = [1] should be illegal")
class M1(type):
pass
class M2(type):
pass
class A1(object, metaclass=M1):
pass
class A2(object, metaclass=M2):
pass
try:
class B(A1, A2):
pass
except TypeError:
pass
else:
self.fail("finding the most derived metaclass should have failed")
def test_classmethods(self):
# Testing class methods...
class C(object):
def foo(*a): return a
goo = classmethod(foo)
c = C()
self.assertEqual(C.goo(1), (C, 1))
self.assertEqual(c.goo(1), (C, 1))
self.assertEqual(c.foo(1), (c, 1))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (D, 1))
self.assertEqual(d.goo(1), (D, 1))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
# Test for a specific crash (SF bug 528132)
def f(cls, arg): return (cls, arg)
ff = classmethod(f)
self.assertEqual(ff.__get__(0, int)(42), (int, 42))
self.assertEqual(ff.__get__(0)(42), (int, 42))
# Test super() with classmethods (SF bug 535444)
self.assertEqual(C.goo.__self__, C)
self.assertEqual(D.goo.__self__, D)
self.assertEqual(super(D,D).goo.__self__, D)
self.assertEqual(super(D,d).goo.__self__, D)
self.assertEqual(super(D,D).goo(), (D,))
self.assertEqual(super(D,d).goo(), (D,))
# Verify that a non-callable will raise
meth = classmethod(1).__get__(1)
self.assertRaises(TypeError, meth)
# Verify that classmethod() doesn't allow keyword args
try:
classmethod(f, kw=1)
except TypeError:
pass
else:
self.fail("classmethod shouldn't accept keyword args")
cm = classmethod(f)
self.assertEqual(cm.__dict__, {})
cm.x = 42
self.assertEqual(cm.x, 42)
self.assertEqual(cm.__dict__, {"x" : 42})
del cm.x
self.assertNotHasAttr(cm, "x")
@support.refcount_test
def test_refleaks_in_classmethod___init__(self):
gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount')
cm = classmethod(None)
refs_before = gettotalrefcount()
for i in range(100):
cm.__init__(None)
self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10)
@support.impl_detail("the module 'xxsubtype' is internal")
def test_classmethods_in_c(self):
# Testing C-based class methods...
import xxsubtype as spam
a = (1, 2, 3)
d = {'abc': 123}
x, a1, d1 = spam.spamlist.classmeth(*a, **d)
self.assertEqual(x, spam.spamlist)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
x, a1, d1 = spam.spamlist().classmeth(*a, **d)
self.assertEqual(x, spam.spamlist)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
spam_cm = spam.spamlist.__dict__['classmeth']
x2, a2, d2 = spam_cm(spam.spamlist, *a, **d)
self.assertEqual(x2, spam.spamlist)
self.assertEqual(a2, a1)
self.assertEqual(d2, d1)
class SubSpam(spam.spamlist): pass
x2, a2, d2 = spam_cm(SubSpam, *a, **d)
self.assertEqual(x2, SubSpam)
self.assertEqual(a2, a1)
self.assertEqual(d2, d1)
with self.assertRaises(TypeError):
spam_cm()
with self.assertRaises(TypeError):
spam_cm(spam.spamlist())
with self.assertRaises(TypeError):
spam_cm(list)
def test_staticmethods(self):
# Testing static methods...
class C(object):
def foo(*a): return a
goo = staticmethod(foo)
c = C()
self.assertEqual(C.goo(1), (1,))
self.assertEqual(c.goo(1), (1,))
self.assertEqual(c.foo(1), (c, 1,))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (1,))
self.assertEqual(d.goo(1), (1,))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
sm = staticmethod(None)
self.assertEqual(sm.__dict__, {})
sm.x = 42
self.assertEqual(sm.x, 42)
self.assertEqual(sm.__dict__, {"x" : 42})
del sm.x
self.assertNotHasAttr(sm, "x")
@support.refcount_test
def test_refleaks_in_staticmethod___init__(self):
gettotalrefcount = support.get_attribute(sys, 'gettotalrefcount')
sm = staticmethod(None)
refs_before = gettotalrefcount()
for i in range(100):
sm.__init__(None)
self.assertAlmostEqual(gettotalrefcount() - refs_before, 0, delta=10)
@support.impl_detail("the module 'xxsubtype' is internal")
def test_staticmethods_in_c(self):
# Testing C-based static methods...
import xxsubtype as spam
a = (1, 2, 3)
d = {"abc": 123}
x, a1, d1 = spam.spamlist.staticmeth(*a, **d)
self.assertEqual(x, None)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
x, a1, d2 = spam.spamlist().staticmeth(*a, **d)
self.assertEqual(x, None)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
def test_classic(self):
# Testing classic classes...
class C:
def foo(*a): return a
goo = classmethod(foo)
c = C()
self.assertEqual(C.goo(1), (C, 1))
self.assertEqual(c.goo(1), (C, 1))
self.assertEqual(c.foo(1), (c, 1))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (D, 1))
self.assertEqual(d.goo(1), (D, 1))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
class E: # *not* subclassing from C
foo = C.foo
self.assertEqual(E().foo.__func__, C.foo) # i.e., unbound
self.assertTrue(repr(C.foo.__get__(C())).startswith("<bound method "))
def test_compattr(self):
# Testing computed attributes...
class C(object):
class computed_attribute(object):
def __init__(self, get, set=None, delete=None):
self.__get = get
self.__set = set
self.__delete = delete
def __get__(self, obj, type=None):
return self.__get(obj)
def __set__(self, obj, value):
return self.__set(obj, value)
def __delete__(self, obj):
return self.__delete(obj)
def __init__(self):
self.__x = 0
def __get_x(self):
x = self.__x
self.__x = x+1
return x
def __set_x(self, x):
self.__x = x
def __delete_x(self):
del self.__x
x = computed_attribute(__get_x, __set_x, __delete_x)
a = C()
self.assertEqual(a.x, 0)
self.assertEqual(a.x, 1)
a.x = 10
self.assertEqual(a.x, 10)
self.assertEqual(a.x, 11)
del a.x
self.assertNotHasAttr(a, 'x')
def test_newslots(self):
# Testing __new__ slot override...
class C(list):
def __new__(cls):
self = list.__new__(cls)
self.foo = 1
return self
def __init__(self):
self.foo = self.foo + 2
a = C()
self.assertEqual(a.foo, 3)
self.assertEqual(a.__class__, C)
class D(C):
pass
b = D()
self.assertEqual(b.foo, 3)
self.assertEqual(b.__class__, D)
@unittest.expectedFailure
def test_bad_new(self):
self.assertRaises(TypeError, object.__new__)
self.assertRaises(TypeError, object.__new__, '')
self.assertRaises(TypeError, list.__new__, object)
self.assertRaises(TypeError, object.__new__, list)
class C(object):
__new__ = list.__new__
self.assertRaises(TypeError, C)
class C(list):
__new__ = object.__new__
self.assertRaises(TypeError, C)
def test_object_new(self):
class A(object):
pass
object.__new__(A)
self.assertRaises(TypeError, object.__new__, A, 5)
object.__init__(A())
self.assertRaises(TypeError, object.__init__, A(), 5)
class A(object):
def __init__(self, foo):
self.foo = foo
object.__new__(A)
object.__new__(A, 5)
object.__init__(A(3))
self.assertRaises(TypeError, object.__init__, A(3), 5)
class A(object):
def __new__(cls, foo):
return object.__new__(cls)
object.__new__(A)
self.assertRaises(TypeError, object.__new__, A, 5)
object.__init__(A(3))
object.__init__(A(3), 5)
class A(object):
def __new__(cls, foo):
return object.__new__(cls)
def __init__(self, foo):
self.foo = foo
object.__new__(A)
self.assertRaises(TypeError, object.__new__, A, 5)
object.__init__(A(3))
self.assertRaises(TypeError, object.__init__, A(3), 5)
@unittest.expectedFailure
def test_restored_object_new(self):
class A(object):
def __new__(cls, *args, **kwargs):
raise AssertionError
self.assertRaises(AssertionError, A)
class B(A):
__new__ = object.__new__
def __init__(self, foo):
self.foo = foo
with warnings.catch_warnings():
warnings.simplefilter('error', DeprecationWarning)
b = B(3)
self.assertEqual(b.foo, 3)
self.assertEqual(b.__class__, B)
del B.__new__
self.assertRaises(AssertionError, B)
del A.__new__
with warnings.catch_warnings():
warnings.simplefilter('error', DeprecationWarning)
b = B(3)
self.assertEqual(b.foo, 3)
self.assertEqual(b.__class__, B)
def test_altmro(self):
# Testing mro() and overriding it...
class A(object):
def f(self): return "A"
class B(A):
pass
class C(A):
def f(self): return "C"
class D(B, C):
pass
self.assertEqual(A.mro(), [A, object])
self.assertEqual(A.__mro__, (A, object))
self.assertEqual(B.mro(), [B, A, object])
self.assertEqual(B.__mro__, (B, A, object))
self.assertEqual(C.mro(), [C, A, object])
self.assertEqual(C.__mro__, (C, A, object))
self.assertEqual(D.mro(), [D, B, C, A, object])
self.assertEqual(D.__mro__, (D, B, C, A, object))
self.assertEqual(D().f(), "C")
class PerverseMetaType(type):
def mro(cls):
L = type.mro(cls)
L.reverse()
return L
class X(D,B,C,A, metaclass=PerverseMetaType):
pass
self.assertEqual(X.__mro__, (object, A, C, B, D, X))
self.assertEqual(X().f(), "A")
try:
class _metaclass(type):
def mro(self):
return [self, dict, object]
class X(object, metaclass=_metaclass):
pass
# In CPython, the class creation above already raises
# TypeError, as a protection against the fact that
# instances of X would segfault it. In other Python
# implementations it would be ok to let the class X
# be created, but instead get a clean TypeError on the
# __setitem__ below.
x = object.__new__(X)
x[5] = 6
except TypeError:
pass
else:
self.fail("devious mro() return not caught")
try:
class _metaclass(type):
def mro(self):
return [1]
class X(object, metaclass=_metaclass):
pass
except TypeError:
pass
else:
self.fail("non-class mro() return not caught")
try:
class _metaclass(type):
def mro(self):
return 1
class X(object, metaclass=_metaclass):
pass
except TypeError:
pass
else:
self.fail("non-sequence mro() return not caught")
def test_overloading(self):
# Testing operator overloading...
class B(object):
"Intermediate class because object doesn't have a __setattr__"
class C(B):
def __getattr__(self, name):
if name == "foo":
return ("getattr", name)
else:
raise AttributeError
def __setattr__(self, name, value):
if name == "foo":
self.setattr = (name, value)
else:
return B.__setattr__(self, name, value)
def __delattr__(self, name):
if name == "foo":
self.delattr = name
else:
return B.__delattr__(self, name)
def __getitem__(self, key):
return ("getitem", key)
def __setitem__(self, key, value):
self.setitem = (key, value)
def __delitem__(self, key):
self.delitem = key
a = C()
self.assertEqual(a.foo, ("getattr", "foo"))
a.foo = 12
self.assertEqual(a.setattr, ("foo", 12))
del a.foo
self.assertEqual(a.delattr, "foo")
self.assertEqual(a[12], ("getitem", 12))
a[12] = 21
self.assertEqual(a.setitem, (12, 21))
del a[12]
self.assertEqual(a.delitem, 12)
self.assertEqual(a[0:10], ("getitem", slice(0, 10)))
a[0:10] = "foo"
self.assertEqual(a.setitem, (slice(0, 10), "foo"))
del a[0:10]
self.assertEqual(a.delitem, (slice(0, 10)))
def test_methods(self):
# Testing methods...
class C(object):
def __init__(self, x):
self.x = x
def foo(self):
return self.x
c1 = C(1)
self.assertEqual(c1.foo(), 1)
class D(C):
boo = C.foo
goo = c1.foo
d2 = D(2)
self.assertEqual(d2.foo(), 2)
self.assertEqual(d2.boo(), 2)
self.assertEqual(d2.goo(), 1)
class E(object):
foo = C.foo
self.assertEqual(E().foo.__func__, C.foo) # i.e., unbound
self.assertTrue(repr(C.foo.__get__(C(1))).startswith("<bound method "))
def test_special_method_lookup(self):
# The lookup of special methods bypasses __getattr__ and
# __getattribute__, but they still can be descriptors.
def run_context(manager):
with manager:
pass
def iden(self):
return self
def hello(self):
return b"hello"
def empty_seq(self):
return []
def zero(self):
return 0
def complex_num(self):
return 1j
def stop(self):
raise StopIteration
def return_true(self, thing=None):
return True
def do_isinstance(obj):
return isinstance(int, obj)
def do_issubclass(obj):
return issubclass(int, obj)
def do_dict_missing(checker):
class DictSub(checker.__class__, dict):
pass
self.assertEqual(DictSub()["hi"], 4)
def some_number(self_, key):
self.assertEqual(key, "hi")
return 4
def swallow(*args): pass
def format_impl(self, spec):
return "hello"
# It would be nice to have every special method tested here, but I'm
# only listing the ones I can remember outside of typeobject.c, since it
# does it right.
specials = [
("__bytes__", bytes, hello, set(), {}),
("__reversed__", reversed, empty_seq, set(), {}),
("__length_hint__", list, zero, set(),
{"__iter__" : iden, "__next__" : stop}),
("__sizeof__", sys.getsizeof, zero, set(), {}),
("__instancecheck__", do_isinstance, return_true, set(), {}),
("__missing__", do_dict_missing, some_number,
set(("__class__",)), {}),
("__subclasscheck__", do_issubclass, return_true,
set(("__bases__",)), {}),
("__enter__", run_context, iden, set(), {"__exit__" : swallow}),
("__exit__", run_context, swallow, set(), {"__enter__" : iden}),
("__complex__", complex, complex_num, set(), {}),
("__format__", format, format_impl, set(), {}),
("__floor__", math.floor, zero, set(), {}),
("__trunc__", math.trunc, zero, set(), {}),
("__trunc__", int, zero, set(), {}),
("__ceil__", math.ceil, zero, set(), {}),
("__dir__", dir, empty_seq, set(), {}),
("__round__", round, zero, set(), {}),
]
class Checker(object):
def __getattr__(self, attr, test=self):
test.fail("__getattr__ called with {0}".format(attr))
def __getattribute__(self, attr, test=self):
if attr not in ok:
test.fail("__getattribute__ called with {0}".format(attr))
return object.__getattribute__(self, attr)
class SpecialDescr(object):
def __init__(self, impl):
self.impl = impl
def __get__(self, obj, owner):
record.append(1)
return self.impl.__get__(obj, owner)
class MyException(Exception):
pass
class ErrDescr(object):
def __get__(self, obj, owner):
raise MyException
for name, runner, meth_impl, ok, env in specials:
class X(Checker):
pass
for attr, obj in env.items():
setattr(X, attr, obj)
setattr(X, name, meth_impl)
runner(X())
record = []
class X(Checker):
pass
for attr, obj in env.items():
setattr(X, attr, obj)
setattr(X, name, SpecialDescr(meth_impl))
runner(X())
self.assertEqual(record, [1], name)
class X(Checker):
pass
for attr, obj in env.items():
setattr(X, attr, obj)
setattr(X, name, ErrDescr())
self.assertRaises(MyException, runner, X())
def test_specials(self):
# Testing special operators...
# Test operators like __hash__ for which a built-in default exists
# Test the default behavior for static classes
class C(object):
def __getitem__(self, i):
if 0 <= i < 10: return i
raise IndexError
c1 = C()
c2 = C()
self.assertFalse(not c1)
self.assertNotEqual(id(c1), id(c2))
hash(c1)
hash(c2)
self.assertEqual(c1, c1)
self.assertTrue(c1 != c2)
self.assertFalse(c1 != c1)
self.assertFalse(c1 == c2)
# Note that the module name appears in str/repr, and that varies
# depending on whether this test is run standalone or from a framework.
self.assertGreaterEqual(str(c1).find('C object at '), 0)
self.assertEqual(str(c1), repr(c1))
self.assertNotIn(-1, c1)
for i in range(10):
self.assertIn(i, c1)
self.assertNotIn(10, c1)
# Test the default behavior for dynamic classes
class D(object):
def __getitem__(self, i):
if 0 <= i < 10: return i
raise IndexError
d1 = D()
d2 = D()
self.assertFalse(not d1)
self.assertNotEqual(id(d1), id(d2))
hash(d1)
hash(d2)
self.assertEqual(d1, d1)
self.assertNotEqual(d1, d2)
self.assertFalse(d1 != d1)
self.assertFalse(d1 == d2)
# Note that the module name appears in str/repr, and that varies
# depending on whether this test is run standalone or from a framework.
self.assertGreaterEqual(str(d1).find('D object at '), 0)
self.assertEqual(str(d1), repr(d1))
self.assertNotIn(-1, d1)
for i in range(10):
self.assertIn(i, d1)
self.assertNotIn(10, d1)
# Test overridden behavior
class Proxy(object):
def __init__(self, x):
self.x = x
def __bool__(self):
return not not self.x
def __hash__(self):
return hash(self.x)
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __ge__(self, other):
return self.x >= other
def __gt__(self, other):
return self.x > other
def __le__(self, other):
return self.x <= other
def __lt__(self, other):
return self.x < other
def __str__(self):
return "Proxy:%s" % self.x
def __repr__(self):
return "Proxy(%r)" % self.x
def __contains__(self, value):
return value in self.x
p0 = Proxy(0)
p1 = Proxy(1)
p_1 = Proxy(-1)
self.assertFalse(p0)
self.assertFalse(not p1)
self.assertEqual(hash(p0), hash(0))
self.assertEqual(p0, p0)
self.assertNotEqual(p0, p1)
self.assertFalse(p0 != p0)
self.assertEqual(not p0, p1)
self.assertTrue(p0 < p1)
self.assertTrue(p0 <= p1)
self.assertTrue(p1 > p0)
self.assertTrue(p1 >= p0)
self.assertEqual(str(p0), "Proxy:0")
self.assertEqual(repr(p0), "Proxy(0)")
p10 = Proxy(range(10))
self.assertNotIn(-1, p10)
for i in range(10):
self.assertIn(i, p10)
self.assertNotIn(10, p10)
def test_weakrefs(self):
# Testing weak references...
import weakref
class C(object):
pass
c = C()
r = weakref.ref(c)
self.assertEqual(r(), c)
del c
support.gc_collect()
self.assertEqual(r(), None)
del r
class NoWeak(object):
__slots__ = ['foo']
no = NoWeak()
try:
weakref.ref(no)
except TypeError as msg:
self.assertIn("weak reference", str(msg))
else:
self.fail("weakref.ref(no) should be illegal")
class Weak(object):
__slots__ = ['foo', '__weakref__']
yes = Weak()
r = weakref.ref(yes)
self.assertEqual(r(), yes)
del yes
support.gc_collect()
self.assertEqual(r(), None)
del r
def test_properties(self):
# Testing property...
class C(object):
def getx(self):
return self.__x
def setx(self, value):
self.__x = value
def delx(self):
del self.__x
x = property(getx, setx, delx, doc="I'm the x property.")
a = C()
self.assertNotHasAttr(a, "x")
a.x = 42
self.assertEqual(a._C__x, 42)
self.assertEqual(a.x, 42)
del a.x
self.assertNotHasAttr(a, "x")
self.assertNotHasAttr(a, "_C__x")
C.x.__set__(a, 100)
self.assertEqual(C.x.__get__(a), 100)
C.x.__delete__(a)
self.assertNotHasAttr(a, "x")
raw = C.__dict__['x']
self.assertIsInstance(raw, property)
attrs = dir(raw)
self.assertIn("__doc__", attrs)
self.assertIn("fget", attrs)
self.assertIn("fset", attrs)
self.assertIn("fdel", attrs)
self.assertEqual(raw.__doc__, "I'm the x property.")
self.assertIs(raw.fget, C.__dict__['getx'])
self.assertIs(raw.fset, C.__dict__['setx'])
self.assertIs(raw.fdel, C.__dict__['delx'])
for attr in "fget", "fset", "fdel":
try:
setattr(raw, attr, 42)
except AttributeError as msg:
if str(msg).find('readonly') < 0:
self.fail("when setting readonly attr %r on a property, "
"got unexpected AttributeError msg %r" % (attr, str(msg)))
else:
self.fail("expected AttributeError from trying to set readonly %r "
"attr on a property" % attr)
raw.__doc__ = 42
self.assertEqual(raw.__doc__, 42)
class D(object):
__getitem__ = property(lambda s: 1/0)
d = D()
try:
for i in d:
str(i)
except ZeroDivisionError:
pass
else:
self.fail("expected ZeroDivisionError from bad property")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_properties_doc_attrib(self):
class E(object):
def getter(self):
"getter method"
return 0
def setter(self_, value):
"setter method"
pass
prop = property(getter)
self.assertEqual(prop.__doc__, "getter method")
prop2 = property(fset=setter)
self.assertEqual(prop2.__doc__, None)
@support.cpython_only
def test_testcapi_no_segfault(self):
# this segfaulted in 2.5b2
try:
import _testcapi
except ImportError:
pass
else:
class X(object):
p = property(_testcapi.test_with_docstring)
def test_properties_plus(self):
class C(object):
foo = property(doc="hello")
@foo.getter
def foo(self):
return self._foo
@foo.setter
def foo(self, value):
self._foo = abs(value)
@foo.deleter
def foo(self):
del self._foo
c = C()
self.assertEqual(C.foo.__doc__, "hello")
self.assertNotHasAttr(c, "foo")
c.foo = -42
self.assertHasAttr(c, '_foo')
self.assertEqual(c._foo, 42)
self.assertEqual(c.foo, 42)
del c.foo
self.assertNotHasAttr(c, '_foo')
self.assertNotHasAttr(c, "foo")
class D(C):
@C.foo.deleter
def foo(self):
try:
del self._foo
except AttributeError:
pass
d = D()
d.foo = 24
self.assertEqual(d.foo, 24)
del d.foo
del d.foo
class E(object):
@property
def foo(self):
return self._foo
@foo.setter
def foo(self, value):
raise RuntimeError
@foo.setter
def foo(self, value):
self._foo = abs(value)
@foo.deleter
def foo(self, value=None):
del self._foo
e = E()
e.foo = -42
self.assertEqual(e.foo, 42)
del e.foo
class F(E):
@E.foo.deleter
def foo(self):
del self._foo
@foo.setter
def foo(self, value):
self._foo = max(0, value)
f = F()
f.foo = -10
self.assertEqual(f.foo, 0)
del f.foo
def test_dict_constructors(self):
# Testing dict constructor ...
d = dict()
self.assertEqual(d, {})
d = dict({})
self.assertEqual(d, {})
d = dict({1: 2, 'a': 'b'})
self.assertEqual(d, {1: 2, 'a': 'b'})
self.assertEqual(d, dict(list(d.items())))
self.assertEqual(d, dict(iter(d.items())))
d = dict({'one':1, 'two':2})
self.assertEqual(d, dict(one=1, two=2))
self.assertEqual(d, dict(**d))
self.assertEqual(d, dict({"one": 1}, two=2))
self.assertEqual(d, dict([("two", 2)], one=1))
self.assertEqual(d, dict([("one", 100), ("two", 200)], **d))
self.assertEqual(d, dict(**d))
for badarg in 0, 0, 0j, "0", [0], (0,):
try:
dict(badarg)
except TypeError:
pass
except ValueError:
if badarg == "0":
# It's a sequence, and its elements are also sequences (gotta
# love strings <wink>), but they aren't of length 2, so this
# one seemed better as a ValueError than a TypeError.
pass
else:
self.fail("no TypeError from dict(%r)" % badarg)
else:
self.fail("no TypeError from dict(%r)" % badarg)
try:
dict({}, {})
except TypeError:
pass
else:
self.fail("no TypeError from dict({}, {})")
class Mapping:
# Lacks a .keys() method; will be added later.
dict = {1:2, 3:4, 'a':1j}
try:
dict(Mapping())
except TypeError:
pass
else:
self.fail("no TypeError from dict(incomplete mapping)")
Mapping.keys = lambda self: list(self.dict.keys())
Mapping.__getitem__ = lambda self, i: self.dict[i]
d = dict(Mapping())
self.assertEqual(d, Mapping.dict)
# Init from sequence of iterable objects, each producing a 2-sequence.
class AddressBookEntry:
def __init__(self, first, last):
self.first = first
self.last = last
def __iter__(self):
return iter([self.first, self.last])
d = dict([AddressBookEntry('Tim', 'Warsaw'),
AddressBookEntry('Barry', 'Peters'),
AddressBookEntry('Tim', 'Peters'),
AddressBookEntry('Barry', 'Warsaw')])
self.assertEqual(d, {'Barry': 'Warsaw', 'Tim': 'Peters'})
d = dict(zip(range(4), range(1, 5)))
self.assertEqual(d, dict([(i, i+1) for i in range(4)]))
# Bad sequence lengths.
for bad in [('tooshort',)], [('too', 'long', 'by 1')]:
try:
dict(bad)
except ValueError:
pass
else:
self.fail("no ValueError from dict(%r)" % bad)
def test_dir(self):
# Testing dir() ...
junk = 12
self.assertEqual(dir(), ['junk', 'self'])
del junk
# Just make sure these don't blow up!
for arg in 2, 2, 2j, 2e0, [2], "2", b"2", (2,), {2:2}, type, self.test_dir:
dir(arg)
# Test dir on new-style classes. Since these have object as a
# base class, a lot more gets sucked in.
def interesting(strings):
return [s for s in strings if not s.startswith('_')]
class C(object):
Cdata = 1
def Cmethod(self): pass
cstuff = ['Cdata', 'Cmethod']
self.assertEqual(interesting(dir(C)), cstuff)
c = C()
self.assertEqual(interesting(dir(c)), cstuff)
## self.assertIn('__self__', dir(C.Cmethod))
c.cdata = 2
c.cmethod = lambda self: 0
self.assertEqual(interesting(dir(c)), cstuff + ['cdata', 'cmethod'])
## self.assertIn('__self__', dir(c.Cmethod))
class A(C):
Adata = 1
def Amethod(self): pass
astuff = ['Adata', 'Amethod'] + cstuff
self.assertEqual(interesting(dir(A)), astuff)
## self.assertIn('__self__', dir(A.Amethod))
a = A()
self.assertEqual(interesting(dir(a)), astuff)
a.adata = 42
a.amethod = lambda self: 3
self.assertEqual(interesting(dir(a)), astuff + ['adata', 'amethod'])
## self.assertIn('__self__', dir(a.Amethod))
# Try a module subclass.
class M(type(sys)):
pass
minstance = M("m")
minstance.b = 2
minstance.a = 1
default_attributes = ['__name__', '__doc__', '__package__',
'__loader__', '__spec__']
names = [x for x in dir(minstance) if x not in default_attributes]
self.assertEqual(names, ['a', 'b'])
class M2(M):
def getdict(self):
return "Not a dict!"
__dict__ = property(getdict)
m2instance = M2("m2")
m2instance.b = 2
m2instance.a = 1
self.assertEqual(m2instance.__dict__, "Not a dict!")
try:
dir(m2instance)
except TypeError:
pass
# Two essentially featureless objects, just inheriting stuff from
# object.
self.assertEqual(dir(NotImplemented), dir(Ellipsis))
# Nasty test case for proxied objects
class Wrapper(object):
def __init__(self, obj):
self.__obj = obj
def __repr__(self):
return "Wrapper(%s)" % repr(self.__obj)
def __getitem__(self, key):
return Wrapper(self.__obj[key])
def __len__(self):
return len(self.__obj)
def __getattr__(self, name):
return Wrapper(getattr(self.__obj, name))
class C(object):
def __getclass(self):
return Wrapper(type(self))
__class__ = property(__getclass)
dir(C()) # This used to segfault
def test_supers(self):
# Testing super...
class A(object):
def meth(self, a):
return "A(%r)" % a
self.assertEqual(A().meth(1), "A(1)")
class B(A):
def __init__(self):
self.__super = super(B, self)
def meth(self, a):
return "B(%r)" % a + self.__super.meth(a)
self.assertEqual(B().meth(2), "B(2)A(2)")
class C(A):
def meth(self, a):
return "C(%r)" % a + self.__super.meth(a)
C._C__super = super(C)
self.assertEqual(C().meth(3), "C(3)A(3)")
class D(C, B):
def meth(self, a):
return "D(%r)" % a + super(D, self).meth(a)
self.assertEqual(D().meth(4), "D(4)C(4)B(4)A(4)")
# Test for subclassing super
class mysuper(super):
def __init__(self, *args):
return super(mysuper, self).__init__(*args)
class E(D):
def meth(self, a):
return "E(%r)" % a + mysuper(E, self).meth(a)
self.assertEqual(E().meth(5), "E(5)D(5)C(5)B(5)A(5)")
class F(E):
def meth(self, a):
s = self.__super # == mysuper(F, self)
return "F(%r)[%s]" % (a, s.__class__.__name__) + s.meth(a)
F._F__super = mysuper(F)
self.assertEqual(F().meth(6), "F(6)[mysuper]E(6)D(6)C(6)B(6)A(6)")
# Make sure certain errors are raised
try:
super(D, 42)
except TypeError:
pass
else:
self.fail("shouldn't allow super(D, 42)")
try:
super(D, C())
except TypeError:
pass
else:
self.fail("shouldn't allow super(D, C())")
try:
super(D).__get__(12)
except TypeError:
pass
else:
self.fail("shouldn't allow super(D).__get__(12)")
try:
super(D).__get__(C())
except TypeError:
pass
else:
self.fail("shouldn't allow super(D).__get__(C())")
# Make sure data descriptors can be overridden and accessed via super
# (new feature in Python 2.3)
class DDbase(object):
def getx(self): return 42
x = property(getx)
class DDsub(DDbase):
def getx(self): return "hello"
x = property(getx)
dd = DDsub()
self.assertEqual(dd.x, "hello")
self.assertEqual(super(DDsub, dd).x, 42)
# Ensure that super() lookup of descriptor from classmethod
# works (SF ID# 743627)
class Base(object):
aProp = property(lambda self: "foo")
class Sub(Base):
@classmethod
def test(klass):
return super(Sub,klass).aProp
self.assertEqual(Sub.test(), Base.aProp)
# Verify that super() doesn't allow keyword args
try:
super(Base, kw=1)
except TypeError:
pass
else:
self.assertEqual("super shouldn't accept keyword args")
def test_basic_inheritance(self):
# Testing inheritance from basic types...
class hexint(int):
def __repr__(self):
return hex(self)
def __add__(self, other):
return hexint(int.__add__(self, other))
# (Note that overriding __radd__ doesn't work,
# because the int type gets first dibs.)
self.assertEqual(repr(hexint(7) + 9), "0x10")
self.assertEqual(repr(hexint(1000) + 7), "0x3ef")
a = hexint(12345)
self.assertEqual(a, 12345)
self.assertEqual(int(a), 12345)
self.assertIs(int(a).__class__, int)
self.assertEqual(hash(a), hash(12345))
self.assertIs((+a).__class__, int)
self.assertIs((a >> 0).__class__, int)
self.assertIs((a << 0).__class__, int)
self.assertIs((hexint(0) << 12).__class__, int)
self.assertIs((hexint(0) >> 12).__class__, int)
class octlong(int):
__slots__ = []
def __str__(self):
return oct(self)
def __add__(self, other):
return self.__class__(super(octlong, self).__add__(other))
__radd__ = __add__
self.assertEqual(str(octlong(3) + 5), "0o10")
# (Note that overriding __radd__ here only seems to work
# because the example uses a short int left argument.)
self.assertEqual(str(5 + octlong(3000)), "0o5675")
a = octlong(12345)
self.assertEqual(a, 12345)
self.assertEqual(int(a), 12345)
self.assertEqual(hash(a), hash(12345))
self.assertIs(int(a).__class__, int)
self.assertIs((+a).__class__, int)
self.assertIs((-a).__class__, int)
self.assertIs((-octlong(0)).__class__, int)
self.assertIs((a >> 0).__class__, int)
self.assertIs((a << 0).__class__, int)
self.assertIs((a - 0).__class__, int)
self.assertIs((a * 1).__class__, int)
self.assertIs((a ** 1).__class__, int)
self.assertIs((a // 1).__class__, int)
self.assertIs((1 * a).__class__, int)
self.assertIs((a | 0).__class__, int)
self.assertIs((a ^ 0).__class__, int)
self.assertIs((a & -1).__class__, int)
self.assertIs((octlong(0) << 12).__class__, int)
self.assertIs((octlong(0) >> 12).__class__, int)
self.assertIs(abs(octlong(0)).__class__, int)
# Because octlong overrides __add__, we can't check the absence of +0
# optimizations using octlong.
class longclone(int):
pass
a = longclone(1)
self.assertIs((a + 0).__class__, int)
self.assertIs((0 + a).__class__, int)
# Check that negative clones don't segfault
a = longclone(-1)
self.assertEqual(a.__dict__, {})
self.assertEqual(int(a), -1) # self.assertTrue PyNumber_Long() copies the sign bit
class precfloat(float):
__slots__ = ['prec']
def __init__(self, value=0.0, prec=12):
self.prec = int(prec)
def __repr__(self):
return "%.*g" % (self.prec, self)
self.assertEqual(repr(precfloat(1.1)), "1.1")
a = precfloat(12345)
self.assertEqual(a, 12345.0)
self.assertEqual(float(a), 12345.0)
self.assertIs(float(a).__class__, float)
self.assertEqual(hash(a), hash(12345.0))
self.assertIs((+a).__class__, float)
class madcomplex(complex):
def __repr__(self):
return "%.17gj%+.17g" % (self.imag, self.real)
a = madcomplex(-3, 4)
self.assertEqual(repr(a), "4j-3")
base = complex(-3, 4)
self.assertEqual(base.__class__, complex)
self.assertEqual(a, base)
self.assertEqual(complex(a), base)
self.assertEqual(complex(a).__class__, complex)
a = madcomplex(a) # just trying another form of the constructor
self.assertEqual(repr(a), "4j-3")
self.assertEqual(a, base)
self.assertEqual(complex(a), base)
self.assertEqual(complex(a).__class__, complex)
self.assertEqual(hash(a), hash(base))
self.assertEqual((+a).__class__, complex)
self.assertEqual((a + 0).__class__, complex)
self.assertEqual(a + 0, base)
self.assertEqual((a - 0).__class__, complex)
self.assertEqual(a - 0, base)
self.assertEqual((a * 1).__class__, complex)
self.assertEqual(a * 1, base)
self.assertEqual((a / 1).__class__, complex)
self.assertEqual(a / 1, base)
class madtuple(tuple):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__(L)
return self._rev
a = madtuple((1,2,3,4,5,6,7,8,9,0))
self.assertEqual(a, (1,2,3,4,5,6,7,8,9,0))
self.assertEqual(a.rev(), madtuple((0,9,8,7,6,5,4,3,2,1)))
self.assertEqual(a.rev().rev(), madtuple((1,2,3,4,5,6,7,8,9,0)))
for i in range(512):
t = madtuple(range(i))
u = t.rev()
v = u.rev()
self.assertEqual(v, t)
a = madtuple((1,2,3,4,5))
self.assertEqual(tuple(a), (1,2,3,4,5))
self.assertIs(tuple(a).__class__, tuple)
self.assertEqual(hash(a), hash((1,2,3,4,5)))
self.assertIs(a[:].__class__, tuple)
self.assertIs((a * 1).__class__, tuple)
self.assertIs((a * 0).__class__, tuple)
self.assertIs((a + ()).__class__, tuple)
a = madtuple(())
self.assertEqual(tuple(a), ())
self.assertIs(tuple(a).__class__, tuple)
self.assertIs((a + a).__class__, tuple)
self.assertIs((a * 0).__class__, tuple)
self.assertIs((a * 1).__class__, tuple)
self.assertIs((a * 2).__class__, tuple)
self.assertIs(a[:].__class__, tuple)
class madstring(str):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__("".join(L))
return self._rev
s = madstring("abcdefghijklmnopqrstuvwxyz")
self.assertEqual(s, "abcdefghijklmnopqrstuvwxyz")
self.assertEqual(s.rev(), madstring("zyxwvutsrqponmlkjihgfedcba"))
self.assertEqual(s.rev().rev(), madstring("abcdefghijklmnopqrstuvwxyz"))
for i in range(256):
s = madstring("".join(map(chr, range(i))))
t = s.rev()
u = t.rev()
self.assertEqual(u, s)
s = madstring("12345")
self.assertEqual(str(s), "12345")
self.assertIs(str(s).__class__, str)
base = "\x00" * 5
s = madstring(base)
self.assertEqual(s, base)
self.assertEqual(str(s), base)
self.assertIs(str(s).__class__, str)
self.assertEqual(hash(s), hash(base))
self.assertEqual({s: 1}[base], 1)
self.assertEqual({base: 1}[s], 1)
self.assertIs((s + "").__class__, str)
self.assertEqual(s + "", base)
self.assertIs(("" + s).__class__, str)
self.assertEqual("" + s, base)
self.assertIs((s * 0).__class__, str)
self.assertEqual(s * 0, "")
self.assertIs((s * 1).__class__, str)
self.assertEqual(s * 1, base)
self.assertIs((s * 2).__class__, str)
self.assertEqual(s * 2, base + base)
self.assertIs(s[:].__class__, str)
self.assertEqual(s[:], base)
self.assertIs(s[0:0].__class__, str)
self.assertEqual(s[0:0], "")
self.assertIs(s.strip().__class__, str)
self.assertEqual(s.strip(), base)
self.assertIs(s.lstrip().__class__, str)
self.assertEqual(s.lstrip(), base)
self.assertIs(s.rstrip().__class__, str)
self.assertEqual(s.rstrip(), base)
identitytab = {}
self.assertIs(s.translate(identitytab).__class__, str)
self.assertEqual(s.translate(identitytab), base)
self.assertIs(s.replace("x", "x").__class__, str)
self.assertEqual(s.replace("x", "x"), base)
self.assertIs(s.ljust(len(s)).__class__, str)
self.assertEqual(s.ljust(len(s)), base)
self.assertIs(s.rjust(len(s)).__class__, str)
self.assertEqual(s.rjust(len(s)), base)
self.assertIs(s.center(len(s)).__class__, str)
self.assertEqual(s.center(len(s)), base)
self.assertIs(s.lower().__class__, str)
self.assertEqual(s.lower(), base)
class madunicode(str):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__("".join(L))
return self._rev
u = madunicode("ABCDEF")
self.assertEqual(u, "ABCDEF")
self.assertEqual(u.rev(), madunicode("FEDCBA"))
self.assertEqual(u.rev().rev(), madunicode("ABCDEF"))
base = "12345"
u = madunicode(base)
self.assertEqual(str(u), base)
self.assertIs(str(u).__class__, str)
self.assertEqual(hash(u), hash(base))
self.assertEqual({u: 1}[base], 1)
self.assertEqual({base: 1}[u], 1)
self.assertIs(u.strip().__class__, str)
self.assertEqual(u.strip(), base)
self.assertIs(u.lstrip().__class__, str)
self.assertEqual(u.lstrip(), base)
self.assertIs(u.rstrip().__class__, str)
self.assertEqual(u.rstrip(), base)
self.assertIs(u.replace("x", "x").__class__, str)
self.assertEqual(u.replace("x", "x"), base)
self.assertIs(u.replace("xy", "xy").__class__, str)
self.assertEqual(u.replace("xy", "xy"), base)
self.assertIs(u.center(len(u)).__class__, str)
self.assertEqual(u.center(len(u)), base)
self.assertIs(u.ljust(len(u)).__class__, str)
self.assertEqual(u.ljust(len(u)), base)
self.assertIs(u.rjust(len(u)).__class__, str)
self.assertEqual(u.rjust(len(u)), base)
self.assertIs(u.lower().__class__, str)
self.assertEqual(u.lower(), base)
self.assertIs(u.upper().__class__, str)
self.assertEqual(u.upper(), base)
self.assertIs(u.capitalize().__class__, str)
self.assertEqual(u.capitalize(), base)
self.assertIs(u.title().__class__, str)
self.assertEqual(u.title(), base)
self.assertIs((u + "").__class__, str)
self.assertEqual(u + "", base)
self.assertIs(("" + u).__class__, str)
self.assertEqual("" + u, base)
self.assertIs((u * 0).__class__, str)
self.assertEqual(u * 0, "")
self.assertIs((u * 1).__class__, str)
self.assertEqual(u * 1, base)
self.assertIs((u * 2).__class__, str)
self.assertEqual(u * 2, base + base)
self.assertIs(u[:].__class__, str)
self.assertEqual(u[:], base)
self.assertIs(u[0:0].__class__, str)
self.assertEqual(u[0:0], "")
class sublist(list):
pass
a = sublist(range(5))
self.assertEqual(a, list(range(5)))
a.append("hello")
self.assertEqual(a, list(range(5)) + ["hello"])
a[5] = 5
self.assertEqual(a, list(range(6)))
a.extend(range(6, 20))
self.assertEqual(a, list(range(20)))
a[-5:] = []
self.assertEqual(a, list(range(15)))
del a[10:15]
self.assertEqual(len(a), 10)
self.assertEqual(a, list(range(10)))
self.assertEqual(list(a), list(range(10)))
self.assertEqual(a[0], 0)
self.assertEqual(a[9], 9)
self.assertEqual(a[-10], 0)
self.assertEqual(a[-1], 9)
self.assertEqual(a[:5], list(range(5)))
## class CountedInput(file):
## """Counts lines read by self.readline().
##
## self.lineno is the 0-based ordinal of the last line read, up to
## a maximum of one greater than the number of lines in the file.
##
## self.ateof is true if and only if the final "" line has been read,
## at which point self.lineno stops incrementing, and further calls
## to readline() continue to return "".
## """
##
## lineno = 0
## ateof = 0
## def readline(self):
## if self.ateof:
## return ""
## s = file.readline(self)
## # Next line works too.
## # s = super(CountedInput, self).readline()
## self.lineno += 1
## if s == "":
## self.ateof = 1
## return s
##
## f = file(name=support.TESTFN, mode='w')
## lines = ['a\n', 'b\n', 'c\n']
## try:
## f.writelines(lines)
## f.close()
## f = CountedInput(support.TESTFN)
## for (i, expected) in zip(range(1, 5) + [4], lines + 2 * [""]):
## got = f.readline()
## self.assertEqual(expected, got)
## self.assertEqual(f.lineno, i)
## self.assertEqual(f.ateof, (i > len(lines)))
## f.close()
## finally:
## try:
## f.close()
## except:
## pass
## support.unlink(support.TESTFN)
def test_keywords(self):
# Testing keyword args to basic type constructors ...
with self.assertRaisesRegex(TypeError, 'keyword argument'):
int(x=1)
with self.assertRaisesRegex(TypeError, 'keyword argument'):
float(x=2)
with self.assertRaisesRegex(TypeError, 'keyword argument'):
bool(x=2)
self.assertEqual(complex(imag=42, real=666), complex(666, 42))
self.assertEqual(str(object=500), '500')
self.assertEqual(str(object=b'abc', errors='strict'), 'abc')
with self.assertRaisesRegex(TypeError, 'keyword argument'):
tuple(sequence=range(3))
with self.assertRaisesRegex(TypeError, 'keyword argument'):
list(sequence=(0, 1, 2))
# note: as of Python 2.3, dict() no longer has an "items" keyword arg
for constructor in (int, float, int, complex, str, str,
tuple, list):
try:
constructor(bogus_keyword_arg=1)
except TypeError:
pass
else:
self.fail("expected TypeError from bogus keyword argument to %r"
% constructor)
def test_str_subclass_as_dict_key(self):
# Testing a str subclass used as dict key ..
class cistr(str):
"""Sublcass of str that computes __eq__ case-insensitively.
Also computes a hash code of the string in canonical form.
"""
def __init__(self, value):
self.canonical = value.lower()
self.hashcode = hash(self.canonical)
def __eq__(self, other):
if not isinstance(other, cistr):
other = cistr(other)
return self.canonical == other.canonical
def __hash__(self):
return self.hashcode
self.assertEqual(cistr('ABC'), 'abc')
self.assertEqual('aBc', cistr('ABC'))
self.assertEqual(str(cistr('ABC')), 'ABC')
d = {cistr('one'): 1, cistr('two'): 2, cistr('tHree'): 3}
self.assertEqual(d[cistr('one')], 1)
self.assertEqual(d[cistr('tWo')], 2)
self.assertEqual(d[cistr('THrEE')], 3)
self.assertIn(cistr('ONe'), d)
self.assertEqual(d.get(cistr('thrEE')), 3)
def test_classic_comparisons(self):
# Testing classic comparisons...
class classic:
pass
for base in (classic, int, object):
class C(base):
def __init__(self, value):
self.value = int(value)
def __eq__(self, other):
if isinstance(other, C):
return self.value == other.value
if isinstance(other, int) or isinstance(other, int):
return self.value == other
return NotImplemented
def __ne__(self, other):
if isinstance(other, C):
return self.value != other.value
if isinstance(other, int) or isinstance(other, int):
return self.value != other
return NotImplemented
def __lt__(self, other):
if isinstance(other, C):
return self.value < other.value
if isinstance(other, int) or isinstance(other, int):
return self.value < other
return NotImplemented
def __le__(self, other):
if isinstance(other, C):
return self.value <= other.value
if isinstance(other, int) or isinstance(other, int):
return self.value <= other
return NotImplemented
def __gt__(self, other):
if isinstance(other, C):
return self.value > other.value
if isinstance(other, int) or isinstance(other, int):
return self.value > other
return NotImplemented
def __ge__(self, other):
if isinstance(other, C):
return self.value >= other.value
if isinstance(other, int) or isinstance(other, int):
return self.value >= other
return NotImplemented
c1 = C(1)
c2 = C(2)
c3 = C(3)
self.assertEqual(c1, 1)
c = {1: c1, 2: c2, 3: c3}
for x in 1, 2, 3:
for y in 1, 2, 3:
for op in "<", "<=", "==", "!=", ">", ">=":
self.assertEqual(eval("c[x] %s c[y]" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertEqual(eval("c[x] %s y" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertEqual(eval("x %s c[y]" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
def test_rich_comparisons(self):
# Testing rich comparisons...
class Z(complex):
pass
z = Z(1)
self.assertEqual(z, 1+0j)
self.assertEqual(1+0j, z)
class ZZ(complex):
def __eq__(self, other):
try:
return abs(self - other) <= 1e-6
except:
return NotImplemented
zz = ZZ(1.0000003)
self.assertEqual(zz, 1+0j)
self.assertEqual(1+0j, zz)
class classic:
pass
for base in (classic, int, object, list):
class C(base):
def __init__(self, value):
self.value = int(value)
def __cmp__(self_, other):
self.fail("shouldn't call __cmp__")
def __eq__(self, other):
if isinstance(other, C):
return self.value == other.value
if isinstance(other, int) or isinstance(other, int):
return self.value == other
return NotImplemented
def __ne__(self, other):
if isinstance(other, C):
return self.value != other.value
if isinstance(other, int) or isinstance(other, int):
return self.value != other
return NotImplemented
def __lt__(self, other):
if isinstance(other, C):
return self.value < other.value
if isinstance(other, int) or isinstance(other, int):
return self.value < other
return NotImplemented
def __le__(self, other):
if isinstance(other, C):
return self.value <= other.value
if isinstance(other, int) or isinstance(other, int):
return self.value <= other
return NotImplemented
def __gt__(self, other):
if isinstance(other, C):
return self.value > other.value
if isinstance(other, int) or isinstance(other, int):
return self.value > other
return NotImplemented
def __ge__(self, other):
if isinstance(other, C):
return self.value >= other.value
if isinstance(other, int) or isinstance(other, int):
return self.value >= other
return NotImplemented
c1 = C(1)
c2 = C(2)
c3 = C(3)
self.assertEqual(c1, 1)
c = {1: c1, 2: c2, 3: c3}
for x in 1, 2, 3:
for y in 1, 2, 3:
for op in "<", "<=", "==", "!=", ">", ">=":
self.assertEqual(eval("c[x] %s c[y]" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertEqual(eval("c[x] %s y" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertEqual(eval("x %s c[y]" % op),
eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
def test_descrdoc(self):
# Testing descriptor doc strings...
from _io import FileIO
def check(descr, what):
self.assertEqual(descr.__doc__, what)
check(FileIO.closed, "True if the file is closed") # getset descriptor
check(complex.real, "the real part of a complex number") # member descriptor
def test_doc_descriptor(self):
# Testing __doc__ descriptor...
# SF bug 542984
class DocDescr(object):
def __get__(self, object, otype):
if object:
object = object.__class__.__name__ + ' instance'
if otype:
otype = otype.__name__
return 'object=%s; type=%s' % (object, otype)
class OldClass:
__doc__ = DocDescr()
class NewClass(object):
__doc__ = DocDescr()
self.assertEqual(OldClass.__doc__, 'object=None; type=OldClass')
self.assertEqual(OldClass().__doc__, 'object=OldClass instance; type=OldClass')
self.assertEqual(NewClass.__doc__, 'object=None; type=NewClass')
self.assertEqual(NewClass().__doc__, 'object=NewClass instance; type=NewClass')
def test_set_class(self):
# Testing __class__ assignment...
class C(object): pass
class D(object): pass
class E(object): pass
class F(D, E): pass
for cls in C, D, E, F:
for cls2 in C, D, E, F:
x = cls()
x.__class__ = cls2
self.assertIs(x.__class__, cls2)
x.__class__ = cls
self.assertIs(x.__class__, cls)
def cant(x, C):
try:
x.__class__ = C
except TypeError:
pass
else:
self.fail("shouldn't allow %r.__class__ = %r" % (x, C))
try:
delattr(x, "__class__")
except (TypeError, AttributeError):
pass
else:
self.fail("shouldn't allow del %r.__class__" % x)
cant(C(), list)
cant(list(), C)
cant(C(), 1)
cant(C(), object)
cant(object(), list)
cant(list(), object)
class Int(int): __slots__ = []
cant(True, int)
cant(2, bool)
o = object()
cant(o, type(1))
cant(o, type(None))
del o
class G(object):
__slots__ = ["a", "b"]
class H(object):
__slots__ = ["b", "a"]
class I(object):
__slots__ = ["a", "b"]
class J(object):
__slots__ = ["c", "b"]
class K(object):
__slots__ = ["a", "b", "d"]
class L(H):
__slots__ = ["e"]
class M(I):
__slots__ = ["e"]
class N(J):
__slots__ = ["__weakref__"]
class P(J):
__slots__ = ["__dict__"]
class Q(J):
pass
class R(J):
__slots__ = ["__dict__", "__weakref__"]
for cls, cls2 in ((G, H), (G, I), (I, H), (Q, R), (R, Q)):
x = cls()
x.a = 1
x.__class__ = cls2
self.assertIs(x.__class__, cls2,
"assigning %r as __class__ for %r silently failed" % (cls2, x))
self.assertEqual(x.a, 1)
x.__class__ = cls
self.assertIs(x.__class__, cls,
"assigning %r as __class__ for %r silently failed" % (cls, x))
self.assertEqual(x.a, 1)
for cls in G, J, K, L, M, N, P, R, list, Int:
for cls2 in G, J, K, L, M, N, P, R, list, Int:
if cls is cls2:
continue
cant(cls(), cls2)
# Issue5283: when __class__ changes in __del__, the wrong
# type gets DECREF'd.
class O(object):
pass
class A(object):
def __del__(self):
self.__class__ = O
l = [A() for x in range(100)]
del l
def test_set_dict(self):
# Testing __dict__ assignment...
class C(object): pass
a = C()
a.__dict__ = {'b': 1}
self.assertEqual(a.b, 1)
def cant(x, dict):
try:
x.__dict__ = dict
except (AttributeError, TypeError):
pass
else:
self.fail("shouldn't allow %r.__dict__ = %r" % (x, dict))
cant(a, None)
cant(a, [])
cant(a, 1)
del a.__dict__ # Deleting __dict__ is allowed
class Base(object):
pass
def verify_dict_readonly(x):
"""
x has to be an instance of a class inheriting from Base.
"""
cant(x, {})
try:
del x.__dict__
except (AttributeError, TypeError):
pass
else:
self.fail("shouldn't allow del %r.__dict__" % x)
dict_descr = Base.__dict__["__dict__"]
try:
dict_descr.__set__(x, {})
except (AttributeError, TypeError):
pass
else:
self.fail("dict_descr allowed access to %r's dict" % x)
# Classes don't allow __dict__ assignment and have readonly dicts
class Meta1(type, Base):
pass
class Meta2(Base, type):
pass
class D(object, metaclass=Meta1):
pass
class E(object, metaclass=Meta2):
pass
for cls in C, D, E:
verify_dict_readonly(cls)
class_dict = cls.__dict__
try:
class_dict["spam"] = "eggs"
except TypeError:
pass
else:
self.fail("%r's __dict__ can be modified" % cls)
# Modules also disallow __dict__ assignment
class Module1(types.ModuleType, Base):
pass
class Module2(Base, types.ModuleType):
pass
for ModuleType in Module1, Module2:
mod = ModuleType("spam")
verify_dict_readonly(mod)
mod.__dict__["spam"] = "eggs"
# Exception's __dict__ can be replaced, but not deleted
# (at least not any more than regular exception's __dict__ can
# be deleted; on CPython it is not the case, whereas on PyPy they
# can, just like any other new-style instance's __dict__.)
def can_delete_dict(e):
try:
del e.__dict__
except (TypeError, AttributeError):
return False
else:
return True
class Exception1(Exception, Base):
pass
class Exception2(Base, Exception):
pass
for ExceptionType in Exception, Exception1, Exception2:
e = ExceptionType()
e.__dict__ = {"a": 1}
self.assertEqual(e.a, 1)
self.assertEqual(can_delete_dict(e), can_delete_dict(ValueError()))
def test_binary_operator_override(self):
# Testing overrides of binary operations...
class I(int):
def __repr__(self):
return "I(%r)" % int(self)
def __add__(self, other):
return I(int(self) + int(other))
__radd__ = __add__
def __pow__(self, other, mod=None):
if mod is None:
return I(pow(int(self), int(other)))
else:
return I(pow(int(self), int(other), int(mod)))
def __rpow__(self, other, mod=None):
if mod is None:
return I(pow(int(other), int(self), mod))
else:
return I(pow(int(other), int(self), int(mod)))
self.assertEqual(repr(I(1) + I(2)), "I(3)")
self.assertEqual(repr(I(1) + 2), "I(3)")
self.assertEqual(repr(1 + I(2)), "I(3)")
self.assertEqual(repr(I(2) ** I(3)), "I(8)")
self.assertEqual(repr(2 ** I(3)), "I(8)")
self.assertEqual(repr(I(2) ** 3), "I(8)")
self.assertEqual(repr(pow(I(2), I(3), I(5))), "I(3)")
class S(str):
def __eq__(self, other):
return self.lower() == other.lower()
def test_subclass_propagation(self):
# Testing propagation of slot functions to subclasses...
class A(object):
pass
class B(A):
pass
class C(A):
pass
class D(B, C):
pass
d = D()
orig_hash = hash(d) # related to id(d) in platform-dependent ways
A.__hash__ = lambda self: 42
self.assertEqual(hash(d), 42)
C.__hash__ = lambda self: 314
self.assertEqual(hash(d), 314)
B.__hash__ = lambda self: 144
self.assertEqual(hash(d), 144)
D.__hash__ = lambda self: 100
self.assertEqual(hash(d), 100)
D.__hash__ = None
self.assertRaises(TypeError, hash, d)
del D.__hash__
self.assertEqual(hash(d), 144)
B.__hash__ = None
self.assertRaises(TypeError, hash, d)
del B.__hash__
self.assertEqual(hash(d), 314)
C.__hash__ = None
self.assertRaises(TypeError, hash, d)
del C.__hash__
self.assertEqual(hash(d), 42)
A.__hash__ = None
self.assertRaises(TypeError, hash, d)
del A.__hash__
self.assertEqual(hash(d), orig_hash)
d.foo = 42
d.bar = 42
self.assertEqual(d.foo, 42)
self.assertEqual(d.bar, 42)
def __getattribute__(self, name):
if name == "foo":
return 24
return object.__getattribute__(self, name)
A.__getattribute__ = __getattribute__
self.assertEqual(d.foo, 24)
self.assertEqual(d.bar, 42)
def __getattr__(self, name):
if name in ("spam", "foo", "bar"):
return "hello"
raise AttributeError(name)
B.__getattr__ = __getattr__
self.assertEqual(d.spam, "hello")
self.assertEqual(d.foo, 24)
self.assertEqual(d.bar, 42)
del A.__getattribute__
self.assertEqual(d.foo, 42)
del d.foo
self.assertEqual(d.foo, "hello")
self.assertEqual(d.bar, 42)
del B.__getattr__
try:
d.foo
except AttributeError:
pass
else:
self.fail("d.foo should be undefined now")
# Test a nasty bug in recurse_down_subclasses()
class A(object):
pass
class B(A):
pass
del B
support.gc_collect()
A.__setitem__ = lambda *a: None # crash
def test_buffer_inheritance(self):
# Testing that buffer interface is inherited ...
import binascii
# SF bug [#470040] ParseTuple t# vs subclasses.
class MyBytes(bytes):
pass
base = b'abc'
m = MyBytes(base)
# b2a_hex uses the buffer interface to get its argument's value, via
# PyArg_ParseTuple 't#' code.
self.assertEqual(binascii.b2a_hex(m), binascii.b2a_hex(base))
class MyInt(int):
pass
m = MyInt(42)
try:
binascii.b2a_hex(m)
self.fail('subclass of int should not have a buffer interface')
except TypeError:
pass
def test_str_of_str_subclass(self):
# Testing __str__ defined in subclass of str ...
import binascii
import io
class octetstring(str):
def __str__(self):
return binascii.b2a_hex(self.encode('ascii')).decode("ascii")
def __repr__(self):
return self + " repr"
o = octetstring('A')
self.assertEqual(type(o), octetstring)
self.assertEqual(type(str(o)), str)
self.assertEqual(type(repr(o)), str)
self.assertEqual(ord(o), 0x41)
self.assertEqual(str(o), '41')
self.assertEqual(repr(o), 'A repr')
self.assertEqual(o.__str__(), '41')
self.assertEqual(o.__repr__(), 'A repr')
capture = io.StringIO()
# Calling str() or not exercises different internal paths.
print(o, file=capture)
print(str(o), file=capture)
self.assertEqual(capture.getvalue(), '41\n41\n')
capture.close()
def test_keyword_arguments(self):
# Testing keyword arguments to __init__, __call__...
def f(a): return a
self.assertEqual(f.__call__(a=42), 42)
ba = bytearray()
bytearray.__init__(ba, 'abc\xbd\u20ac',
encoding='latin1', errors='replace')
self.assertEqual(ba, b'abc\xbd?')
def test_recursive_call(self):
# Testing recursive __call__() by setting to instance of class...
class A(object):
pass
A.__call__ = A()
try:
A()()
except RecursionError:
pass
else:
self.fail("Recursion limit should have been reached for __call__()")
def test_delete_hook(self):
# Testing __del__ hook...
log = []
class C(object):
def __del__(self):
log.append(1)
c = C()
self.assertEqual(log, [])
del c
support.gc_collect()
self.assertEqual(log, [1])
class D(object): pass
d = D()
try: del d[0]
except TypeError: pass
else: self.fail("invalid del() didn't raise TypeError")
def test_hash_inheritance(self):
# Testing hash of mutable subclasses...
class mydict(dict):
pass
d = mydict()
try:
hash(d)
except TypeError:
pass
else:
self.fail("hash() of dict subclass should fail")
class mylist(list):
pass
d = mylist()
try:
hash(d)
except TypeError:
pass
else:
self.fail("hash() of list subclass should fail")
def test_str_operations(self):
try: 'a' + 5
except TypeError: pass
else: self.fail("'' + 5 doesn't raise TypeError")
try: ''.split('')
except ValueError: pass
else: self.fail("''.split('') doesn't raise ValueError")
try: ''.join([0])
except TypeError: pass
else: self.fail("''.join([0]) doesn't raise TypeError")
try: ''.rindex('5')
except ValueError: pass
else: self.fail("''.rindex('5') doesn't raise ValueError")
try: '%(n)s' % None
except TypeError: pass
else: self.fail("'%(n)s' % None doesn't raise TypeError")
try: '%(n' % {}
except ValueError: pass
else: self.fail("'%(n' % {} '' doesn't raise ValueError")
try: '%*s' % ('abc')
except TypeError: pass
else: self.fail("'%*s' % ('abc') doesn't raise TypeError")
try: '%*.*s' % ('abc', 5)
except TypeError: pass
else: self.fail("'%*.*s' % ('abc', 5) doesn't raise TypeError")
try: '%s' % (1, 2)
except TypeError: pass
else: self.fail("'%s' % (1, 2) doesn't raise TypeError")
try: '%' % None
except ValueError: pass
else: self.fail("'%' % None doesn't raise ValueError")
self.assertEqual('534253'.isdigit(), 1)
self.assertEqual('534253x'.isdigit(), 0)
self.assertEqual('%c' % 5, '\x05')
self.assertEqual('%c' % '5', '5')
def test_deepcopy_recursive(self):
# Testing deepcopy of recursive objects...
class Node:
pass
a = Node()
b = Node()
a.b = b
b.a = a
z = deepcopy(a) # This blew up before
def test_uninitialized_modules(self):
# Testing uninitialized module objects...
from types import ModuleType as M
m = M.__new__(M)
str(m)
self.assertNotHasAttr(m, "__name__")
self.assertNotHasAttr(m, "__file__")
self.assertNotHasAttr(m, "foo")
self.assertFalse(m.__dict__) # None or {} are both reasonable answers
m.foo = 1
self.assertEqual(m.__dict__, {"foo": 1})
def test_funny_new(self):
# Testing __new__ returning something unexpected...
class C(object):
def __new__(cls, arg):
if isinstance(arg, str): return [1, 2, 3]
elif isinstance(arg, int): return object.__new__(D)
else: return object.__new__(cls)
class D(C):
def __init__(self, arg):
self.foo = arg
self.assertEqual(C("1"), [1, 2, 3])
self.assertEqual(D("1"), [1, 2, 3])
d = D(None)
self.assertEqual(d.foo, None)
d = C(1)
self.assertIsInstance(d, D)
self.assertEqual(d.foo, 1)
d = D(1)
self.assertIsInstance(d, D)
self.assertEqual(d.foo, 1)
class C(object):
@staticmethod
def __new__(*args):
return args
self.assertEqual(C(1, 2), (C, 1, 2))
class D(C):
pass
self.assertEqual(D(1, 2), (D, 1, 2))
class C(object):
@classmethod
def __new__(*args):
return args
self.assertEqual(C(1, 2), (C, C, 1, 2))
class D(C):
pass
self.assertEqual(D(1, 2), (D, D, 1, 2))
def test_imul_bug(self):
# Testing for __imul__ problems...
# SF bug 544647
class C(object):
def __imul__(self, other):
return (self, other)
x = C()
y = x
y *= 1.0
self.assertEqual(y, (x, 1.0))
y = x
y *= 2
self.assertEqual(y, (x, 2))
y = x
y *= 3
self.assertEqual(y, (x, 3))
y = x
y *= 1<<100
self.assertEqual(y, (x, 1<<100))
y = x
y *= None
self.assertEqual(y, (x, None))
y = x
y *= "foo"
self.assertEqual(y, (x, "foo"))
def test_copy_setstate(self):
# Testing that copy.*copy() correctly uses __setstate__...
import copy
class C(object):
def __init__(self, foo=None):
self.foo = foo
self.__foo = foo
def setfoo(self, foo=None):
self.foo = foo
def getfoo(self):
return self.__foo
def __getstate__(self):
return [self.foo]
def __setstate__(self_, lst):
self.assertEqual(len(lst), 1)
self_.__foo = self_.foo = lst[0]
a = C(42)
a.setfoo(24)
self.assertEqual(a.foo, 24)
self.assertEqual(a.getfoo(), 42)
b = copy.copy(a)
self.assertEqual(b.foo, 24)
self.assertEqual(b.getfoo(), 24)
b = copy.deepcopy(a)
self.assertEqual(b.foo, 24)
self.assertEqual(b.getfoo(), 24)
def test_slices(self):
# Testing cases with slices and overridden __getitem__ ...
# Strings
self.assertEqual("hello"[:4], "hell")
self.assertEqual("hello"[slice(4)], "hell")
self.assertEqual(str.__getitem__("hello", slice(4)), "hell")
class S(str):
def __getitem__(self, x):
return str.__getitem__(self, x)
self.assertEqual(S("hello")[:4], "hell")
self.assertEqual(S("hello")[slice(4)], "hell")
self.assertEqual(S("hello").__getitem__(slice(4)), "hell")
# Tuples
self.assertEqual((1,2,3)[:2], (1,2))
self.assertEqual((1,2,3)[slice(2)], (1,2))
self.assertEqual(tuple.__getitem__((1,2,3), slice(2)), (1,2))
class T(tuple):
def __getitem__(self, x):
return tuple.__getitem__(self, x)
self.assertEqual(T((1,2,3))[:2], (1,2))
self.assertEqual(T((1,2,3))[slice(2)], (1,2))
self.assertEqual(T((1,2,3)).__getitem__(slice(2)), (1,2))
# Lists
self.assertEqual([1,2,3][:2], [1,2])
self.assertEqual([1,2,3][slice(2)], [1,2])
self.assertEqual(list.__getitem__([1,2,3], slice(2)), [1,2])
class L(list):
def __getitem__(self, x):
return list.__getitem__(self, x)
self.assertEqual(L([1,2,3])[:2], [1,2])
self.assertEqual(L([1,2,3])[slice(2)], [1,2])
self.assertEqual(L([1,2,3]).__getitem__(slice(2)), [1,2])
# Now do lists and __setitem__
a = L([1,2,3])
a[slice(1, 3)] = [3,2]
self.assertEqual(a, [1,3,2])
a[slice(0, 2, 1)] = [3,1]
self.assertEqual(a, [3,1,2])
a.__setitem__(slice(1, 3), [2,1])
self.assertEqual(a, [3,2,1])
a.__setitem__(slice(0, 2, 1), [2,3])
self.assertEqual(a, [2,3,1])
def test_subtype_resurrection(self):
# Testing resurrection of new-style instance...
class C(object):
container = []
def __del__(self):
# resurrect the instance
C.container.append(self)
c = C()
c.attr = 42
# The most interesting thing here is whether this blows up, due to
# flawed GC tracking logic in typeobject.c's call_finalizer() (a 2.2.1
# bug).
del c
support.gc_collect()
self.assertEqual(len(C.container), 1)
# Make c mortal again, so that the test framework with -l doesn't report
# it as a leak.
del C.__del__
def test_slots_trash(self):
# Testing slot trash...
# Deallocating deeply nested slotted trash caused stack overflows
class trash(object):
__slots__ = ['x']
def __init__(self, x):
self.x = x
o = None
for i in range(50000):
o = trash(o)
del o
def test_slots_multiple_inheritance(self):
# SF bug 575229, multiple inheritance w/ slots dumps core
class A(object):
__slots__=()
class B(object):
pass
class C(A,B) :
__slots__=()
if support.check_impl_detail():
self.assertEqual(C.__basicsize__, B.__basicsize__)
self.assertHasAttr(C, '__dict__')
self.assertHasAttr(C, '__weakref__')
C().x = 2
def test_rmul(self):
# Testing correct invocation of __rmul__...
# SF patch 592646
class C(object):
def __mul__(self, other):
return "mul"
def __rmul__(self, other):
return "rmul"
a = C()
self.assertEqual(a*2, "mul")
self.assertEqual(a*2.2, "mul")
self.assertEqual(2*a, "rmul")
self.assertEqual(2.2*a, "rmul")
def test_ipow(self):
# Testing correct invocation of __ipow__...
# [SF bug 620179]
class C(object):
def __ipow__(self, other):
pass
a = C()
a **= 2
def test_mutable_bases(self):
# Testing mutable bases...
# stuff that should work:
class C(object):
pass
class C2(object):
def __getattribute__(self, attr):
if attr == 'a':
return 2
else:
return super(C2, self).__getattribute__(attr)
def meth(self):
return 1
class D(C):
pass
class E(D):
pass
d = D()
e = E()
D.__bases__ = (C,)
D.__bases__ = (C2,)
self.assertEqual(d.meth(), 1)
self.assertEqual(e.meth(), 1)
self.assertEqual(d.a, 2)
self.assertEqual(e.a, 2)
self.assertEqual(C2.__subclasses__(), [D])
try:
del D.__bases__
except (TypeError, AttributeError):
pass
else:
self.fail("shouldn't be able to delete .__bases__")
try:
D.__bases__ = ()
except TypeError as msg:
if str(msg) == "a new-style class can't have only classic bases":
self.fail("wrong error message for .__bases__ = ()")
else:
self.fail("shouldn't be able to set .__bases__ to ()")
try:
D.__bases__ = (D,)
except TypeError:
pass
else:
# actually, we'll have crashed by here...
self.fail("shouldn't be able to create inheritance cycles")
try:
D.__bases__ = (C, C)
except TypeError:
pass
else:
self.fail("didn't detect repeated base classes")
try:
D.__bases__ = (E,)
except TypeError:
pass
else:
self.fail("shouldn't be able to create inheritance cycles")
def test_builtin_bases(self):
# Make sure all the builtin types can have their base queried without
# segfaulting. See issue #5787.
builtin_types = [tp for tp in builtins.__dict__.values()
if isinstance(tp, type)]
for tp in builtin_types:
object.__getattribute__(tp, "__bases__")
if tp is not object:
self.assertEqual(len(tp.__bases__), 1, tp)
class L(list):
pass
class C(object):
pass
class D(C):
pass
try:
L.__bases__ = (dict,)
except TypeError:
pass
else:
self.fail("shouldn't turn list subclass into dict subclass")
try:
list.__bases__ = (dict,)
except TypeError:
pass
else:
self.fail("shouldn't be able to assign to list.__bases__")
try:
D.__bases__ = (C, list)
except TypeError:
pass
else:
assert 0, "best_base calculation found wanting"
def test_unsubclassable_types(self):
with self.assertRaises(TypeError):
class X(type(None)):
pass
with self.assertRaises(TypeError):
class X(object, type(None)):
pass
with self.assertRaises(TypeError):
class X(type(None), object):
pass
class O(object):
pass
with self.assertRaises(TypeError):
class X(O, type(None)):
pass
with self.assertRaises(TypeError):
class X(type(None), O):
pass
class X(object):
pass
with self.assertRaises(TypeError):
X.__bases__ = type(None),
with self.assertRaises(TypeError):
X.__bases__ = object, type(None)
with self.assertRaises(TypeError):
X.__bases__ = type(None), object
with self.assertRaises(TypeError):
X.__bases__ = O, type(None)
with self.assertRaises(TypeError):
X.__bases__ = type(None), O
def test_mutable_bases_with_failing_mro(self):
# Testing mutable bases with failing mro...
class WorkOnce(type):
def __new__(self, name, bases, ns):
self.flag = 0
return super(WorkOnce, self).__new__(WorkOnce, name, bases, ns)
def mro(self):
if self.flag > 0:
raise RuntimeError("bozo")
else:
self.flag += 1
return type.mro(self)
class WorkAlways(type):
def mro(self):
# this is here to make sure that .mro()s aren't called
# with an exception set (which was possible at one point).
# An error message will be printed in a debug build.
# What's a good way to test for this?
return type.mro(self)
class C(object):
pass
class C2(object):
pass
class D(C):
pass
class E(D):
pass
class F(D, metaclass=WorkOnce):
pass
class G(D, metaclass=WorkAlways):
pass
# Immediate subclasses have their mro's adjusted in alphabetical
# order, so E's will get adjusted before adjusting F's fails. We
# check here that E's gets restored.
E_mro_before = E.__mro__
D_mro_before = D.__mro__
try:
D.__bases__ = (C2,)
except RuntimeError:
self.assertEqual(E.__mro__, E_mro_before)
self.assertEqual(D.__mro__, D_mro_before)
else:
self.fail("exception not propagated")
def test_mutable_bases_catch_mro_conflict(self):
# Testing mutable bases catch mro conflict...
class A(object):
pass
class B(object):
pass
class C(A, B):
pass
class D(A, B):
pass
class E(C, D):
pass
try:
C.__bases__ = (B, A)
except TypeError:
pass
else:
self.fail("didn't catch MRO conflict")
def test_mutable_names(self):
# Testing mutable names...
class C(object):
pass
# C.__module__ could be 'test_descr' or '__main__'
mod = C.__module__
C.__name__ = 'D'
self.assertEqual((C.__module__, C.__name__), (mod, 'D'))
C.__name__ = 'D.E'
self.assertEqual((C.__module__, C.__name__), (mod, 'D.E'))
def test_evil_type_name(self):
# A badly placed Py_DECREF in type_set_name led to arbitrary code
# execution while the type structure was not in a sane state, and a
# possible segmentation fault as a result. See bug #16447.
class Nasty(str):
def __del__(self):
C.__name__ = "other"
class C:
pass
C.__name__ = Nasty("abc")
C.__name__ = "normal"
def test_subclass_right_op(self):
# Testing correct dispatch of subclass overloading __r<op>__...
# This code tests various cases where right-dispatch of a subclass
# should be preferred over left-dispatch of a base class.
# Case 1: subclass of int; this tests code in abstract.c::binary_op1()
class B(int):
def __floordiv__(self, other):
return "B.__floordiv__"
def __rfloordiv__(self, other):
return "B.__rfloordiv__"
self.assertEqual(B(1) // 1, "B.__floordiv__")
self.assertEqual(1 // B(1), "B.__rfloordiv__")
# Case 2: subclass of object; this is just the baseline for case 3
class C(object):
def __floordiv__(self, other):
return "C.__floordiv__"
def __rfloordiv__(self, other):
return "C.__rfloordiv__"
self.assertEqual(C() // 1, "C.__floordiv__")
self.assertEqual(1 // C(), "C.__rfloordiv__")
# Case 3: subclass of new-style class; here it gets interesting
class D(C):
def __floordiv__(self, other):
return "D.__floordiv__"
def __rfloordiv__(self, other):
return "D.__rfloordiv__"
self.assertEqual(D() // C(), "D.__floordiv__")
self.assertEqual(C() // D(), "D.__rfloordiv__")
# Case 4: this didn't work right in 2.2.2 and 2.3a1
class E(C):
pass
self.assertEqual(E.__rfloordiv__, C.__rfloordiv__)
self.assertEqual(E() // 1, "C.__floordiv__")
self.assertEqual(1 // E(), "C.__rfloordiv__")
self.assertEqual(E() // C(), "C.__floordiv__")
self.assertEqual(C() // E(), "C.__floordiv__") # This one would fail
@support.impl_detail("testing an internal kind of method object")
def test_meth_class_get(self):
# Testing __get__ method of METH_CLASS C methods...
# Full coverage of descrobject.c::classmethod_get()
# Baseline
arg = [1, 2, 3]
res = {1: None, 2: None, 3: None}
self.assertEqual(dict.fromkeys(arg), res)
self.assertEqual({}.fromkeys(arg), res)
# Now get the descriptor
descr = dict.__dict__["fromkeys"]
# More baseline using the descriptor directly
self.assertEqual(descr.__get__(None, dict)(arg), res)
self.assertEqual(descr.__get__({})(arg), res)
# Now check various error cases
try:
descr.__get__(None, None)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, None)")
try:
descr.__get__(42)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(42)")
try:
descr.__get__(None, 42)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, 42)")
try:
descr.__get__(None, int)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, int)")
def test_isinst_isclass(self):
# Testing proxy isinstance() and isclass()...
class Proxy(object):
def __init__(self, obj):
self.__obj = obj
def __getattribute__(self, name):
if name.startswith("_Proxy__"):
return object.__getattribute__(self, name)
else:
return getattr(self.__obj, name)
# Test with a classic class
class C:
pass
a = C()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a classic subclass
class D(C):
pass
a = D()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a new-style class
class C(object):
pass
a = C()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a new-style subclass
class D(C):
pass
a = D()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
def test_proxy_super(self):
# Testing super() for a proxy object...
class Proxy(object):
def __init__(self, obj):
self.__obj = obj
def __getattribute__(self, name):
if name.startswith("_Proxy__"):
return object.__getattribute__(self, name)
else:
return getattr(self.__obj, name)
class B(object):
def f(self):
return "B.f"
class C(B):
def f(self):
return super(C, self).f() + "->C.f"
obj = C()
p = Proxy(obj)
self.assertEqual(C.__dict__["f"](p), "B.f->C.f")
def test_carloverre(self):
# Testing prohibition of Carlo Verre's hack...
try:
object.__setattr__(str, "foo", 42)
except TypeError:
pass
else:
self.fail("Carlo Verre __setattr__ succeeded!")
try:
object.__delattr__(str, "lower")
except TypeError:
pass
else:
self.fail("Carlo Verre __delattr__ succeeded!")
def test_weakref_segfault(self):
# Testing weakref segfault...
# SF 742911
import weakref
class Provoker:
def __init__(self, referrent):
self.ref = weakref.ref(referrent)
def __del__(self):
x = self.ref()
class Oops(object):
pass
o = Oops()
o.whatever = Provoker(o)
del o
def test_wrapper_segfault(self):
# SF 927248: deeply nested wrappers could cause stack overflow
f = lambda:None
for i in range(1000000):
f = f.__call__
f = None
def test_file_fault(self):
# Testing sys.stdout is changed in getattr...
test_stdout = sys.stdout
class StdoutGuard:
def __getattr__(self, attr):
sys.stdout = sys.__stdout__
raise RuntimeError("Premature access to sys.stdout.%s" % attr)
sys.stdout = StdoutGuard()
try:
print("Oops!")
except RuntimeError:
pass
finally:
sys.stdout = test_stdout
def test_vicious_descriptor_nonsense(self):
# Testing vicious_descriptor_nonsense...
# A potential segfault spotted by Thomas Wouters in mail to
# python-dev 2003-04-17, turned into an example & fixed by Michael
# Hudson just less than four months later...
class Evil(object):
def __hash__(self):
return hash('attr')
def __eq__(self, other):
del C.attr
return 0
class Descr(object):
def __get__(self, ob, type=None):
return 1
class C(object):
attr = Descr()
c = C()
c.__dict__[Evil()] = 0
self.assertEqual(c.attr, 1)
# this makes a crash more likely:
support.gc_collect()
self.assertNotHasAttr(c, 'attr')
def test_init(self):
# SF 1155938
class Foo(object):
def __init__(self):
return 10
try:
Foo()
except TypeError:
pass
else:
self.fail("did not test __init__() for None return")
def test_method_wrapper(self):
# Testing method-wrapper objects...
# <type 'method-wrapper'> did not support any reflection before 2.5
# XXX should methods really support __eq__?
l = []
self.assertEqual(l.__add__, l.__add__)
self.assertEqual(l.__add__, [].__add__)
self.assertNotEqual(l.__add__, [5].__add__)
self.assertNotEqual(l.__add__, l.__mul__)
self.assertEqual(l.__add__.__name__, '__add__')
if hasattr(l.__add__, '__self__'):
# CPython
self.assertIs(l.__add__.__self__, l)
self.assertIs(l.__add__.__objclass__, list)
else:
# Python implementations where [].__add__ is a normal bound method
self.assertIs(l.__add__.im_self, l)
self.assertIs(l.__add__.im_class, list)
self.assertEqual(l.__add__.__doc__, list.__add__.__doc__)
try:
hash(l.__add__)
except TypeError:
pass
else:
self.fail("no TypeError from hash([].__add__)")
t = ()
t += (7,)
self.assertEqual(t.__add__, (7,).__add__)
self.assertEqual(hash(t.__add__), hash((7,).__add__))
def test_not_implemented(self):
# Testing NotImplemented...
# all binary methods should be able to return a NotImplemented
import operator
def specialmethod(self, other):
return NotImplemented
def check(expr, x, y):
try:
exec(expr, {'x': x, 'y': y, 'operator': operator})
except TypeError:
pass
else:
self.fail("no TypeError from %r" % (expr,))
N1 = sys.maxsize + 1 # might trigger OverflowErrors instead of
# TypeErrors
N2 = sys.maxsize # if sizeof(int) < sizeof(long), might trigger
# ValueErrors instead of TypeErrors
for name, expr, iexpr in [
('__add__', 'x + y', 'x += y'),
('__sub__', 'x - y', 'x -= y'),
('__mul__', 'x * y', 'x *= y'),
('__matmul__', 'x @ y', 'x @= y'),
('__truediv__', 'x / y', 'x /= y'),
('__floordiv__', 'x // y', 'x //= y'),
('__mod__', 'x % y', 'x %= y'),
('__divmod__', 'divmod(x, y)', None),
('__pow__', 'x ** y', 'x **= y'),
('__lshift__', 'x << y', 'x <<= y'),
('__rshift__', 'x >> y', 'x >>= y'),
('__and__', 'x & y', 'x &= y'),
('__or__', 'x | y', 'x |= y'),
('__xor__', 'x ^ y', 'x ^= y')]:
rname = '__r' + name[2:]
A = type('A', (), {name: specialmethod})
a = A()
check(expr, a, a)
check(expr, a, N1)
check(expr, a, N2)
if iexpr:
check(iexpr, a, a)
check(iexpr, a, N1)
check(iexpr, a, N2)
iname = '__i' + name[2:]
C = type('C', (), {iname: specialmethod})
c = C()
check(iexpr, c, a)
check(iexpr, c, N1)
check(iexpr, c, N2)
def test_assign_slice(self):
# ceval.c's assign_slice used to check for
# tp->tp_as_sequence->sq_slice instead of
# tp->tp_as_sequence->sq_ass_slice
class C(object):
def __setitem__(self, idx, value):
self.value = value
c = C()
c[1:2] = 3
self.assertEqual(c.value, 3)
def test_set_and_no_get(self):
# See
# http://mail.python.org/pipermail/python-dev/2010-January/095637.html
class Descr(object):
def __init__(self, name):
self.name = name
def __set__(self, obj, value):
obj.__dict__[self.name] = value
descr = Descr("a")
class X(object):
a = descr
x = X()
self.assertIs(x.a, descr)
x.a = 42
self.assertEqual(x.a, 42)
# Also check type_getattro for correctness.
class Meta(type):
pass
class X(metaclass=Meta):
pass
X.a = 42
Meta.a = Descr("a")
self.assertEqual(X.a, 42)
def test_getattr_hooks(self):
# issue 4230
class Descriptor(object):
counter = 0
def __get__(self, obj, objtype=None):
def getter(name):
self.counter += 1
raise AttributeError(name)
return getter
descr = Descriptor()
class A(object):
__getattribute__ = descr
class B(object):
__getattr__ = descr
class C(object):
__getattribute__ = descr
__getattr__ = descr
self.assertRaises(AttributeError, getattr, A(), "attr")
self.assertEqual(descr.counter, 1)
self.assertRaises(AttributeError, getattr, B(), "attr")
self.assertEqual(descr.counter, 2)
self.assertRaises(AttributeError, getattr, C(), "attr")
self.assertEqual(descr.counter, 4)
class EvilGetattribute(object):
# This used to segfault
def __getattr__(self, name):
raise AttributeError(name)
def __getattribute__(self, name):
del EvilGetattribute.__getattr__
for i in range(5):
gc.collect()
raise AttributeError(name)
self.assertRaises(AttributeError, getattr, EvilGetattribute(), "attr")
def test_type___getattribute__(self):
self.assertRaises(TypeError, type.__getattribute__, list, type)
def test_abstractmethods(self):
# type pretends not to have __abstractmethods__.
self.assertRaises(AttributeError, getattr, type, "__abstractmethods__")
class meta(type):
pass
self.assertRaises(AttributeError, getattr, meta, "__abstractmethods__")
class X(object):
pass
with self.assertRaises(AttributeError):
del X.__abstractmethods__
def test_proxy_call(self):
class FakeStr:
__class__ = str
fake_str = FakeStr()
# isinstance() reads __class__
self.assertIsInstance(fake_str, str)
# call a method descriptor
with self.assertRaises(TypeError):
str.split(fake_str)
# call a slot wrapper descriptor
with self.assertRaises(TypeError):
str.__add__(fake_str, "abc")
def test_repr_as_str(self):
# Issue #11603: crash or infinite loop when rebinding __str__ as
# __repr__.
class Foo:
pass
Foo.__repr__ = Foo.__str__
foo = Foo()
self.assertRaises(RecursionError, str, foo)
self.assertRaises(RecursionError, repr, foo)
def test_mixing_slot_wrappers(self):
class X(dict):
__setattr__ = dict.__setitem__
x = X()
x.y = 42
self.assertEqual(x["y"], 42)
def test_slot_shadows_class_variable(self):
with self.assertRaises(ValueError) as cm:
class X:
__slots__ = ["foo"]
foo = None
m = str(cm.exception)
self.assertEqual("'foo' in __slots__ conflicts with class variable", m)
def test_set_doc(self):
class X:
"elephant"
X.__doc__ = "banana"
self.assertEqual(X.__doc__, "banana")
with self.assertRaises(TypeError) as cm:
type(list).__dict__["__doc__"].__set__(list, "blah")
self.assertIn("can't set list.__doc__", str(cm.exception))
with self.assertRaises(TypeError) as cm:
type(X).__dict__["__doc__"].__delete__(X)
self.assertIn("can't delete X.__doc__", str(cm.exception))
self.assertEqual(X.__doc__, "banana")
def test_qualname(self):
descriptors = [str.lower, complex.real, float.real, int.__add__]
types = ['method', 'member', 'getset', 'wrapper']
# make sure we have an example of each type of descriptor
for d, n in zip(descriptors, types):
self.assertEqual(type(d).__name__, n + '_descriptor')
for d in descriptors:
qualname = d.__objclass__.__qualname__ + '.' + d.__name__
self.assertEqual(d.__qualname__, qualname)
self.assertEqual(str.lower.__qualname__, 'str.lower')
self.assertEqual(complex.real.__qualname__, 'complex.real')
self.assertEqual(float.real.__qualname__, 'float.real')
self.assertEqual(int.__add__.__qualname__, 'int.__add__')
class X:
pass
with self.assertRaises(TypeError):
del X.__qualname__
self.assertRaises(TypeError, type.__dict__['__qualname__'].__set__,
str, 'Oink')
global Y
class Y:
class Inside:
pass
self.assertEqual(Y.__qualname__, 'Y')
self.assertEqual(Y.Inside.__qualname__, 'Y.Inside')
def test_qualname_dict(self):
ns = {'__qualname__': 'some.name'}
tp = type('Foo', (), ns)
self.assertEqual(tp.__qualname__, 'some.name')
self.assertNotIn('__qualname__', tp.__dict__)
self.assertEqual(ns, {'__qualname__': 'some.name'})
ns = {'__qualname__': 1}
self.assertRaises(TypeError, type, 'Foo', (), ns)
def test_cycle_through_dict(self):
# See bug #1469629
class X(dict):
def __init__(self):
dict.__init__(self)
self.__dict__ = self
x = X()
x.attr = 42
wr = weakref.ref(x)
del x
support.gc_collect()
self.assertIsNone(wr())
for o in gc.get_objects():
self.assertIsNot(type(o), X)
def test_object_new_and_init_with_parameters(self):
# See issue #1683368
class OverrideNeither:
pass
self.assertRaises(TypeError, OverrideNeither, 1)
self.assertRaises(TypeError, OverrideNeither, kw=1)
class OverrideNew:
def __new__(cls, foo, kw=0, *args, **kwds):
return object.__new__(cls, *args, **kwds)
class OverrideInit:
def __init__(self, foo, kw=0, *args, **kwargs):
return object.__init__(self, *args, **kwargs)
class OverrideBoth(OverrideNew, OverrideInit):
pass
for case in OverrideNew, OverrideInit, OverrideBoth:
case(1)
case(1, kw=2)
self.assertRaises(TypeError, case, 1, 2, 3)
self.assertRaises(TypeError, case, 1, 2, foo=3)
def test_subclassing_does_not_duplicate_dict_descriptors(self):
class Base:
pass
class Sub(Base):
pass
self.assertIn("__dict__", Base.__dict__)
self.assertNotIn("__dict__", Sub.__dict__)
def test_bound_method_repr(self):
class Foo:
def method(self):
pass
self.assertRegex(repr(Foo().method),
r"<bound method .*Foo\.method of <.*Foo object at .*>>")
class Base:
def method(self):
pass
class Derived1(Base):
pass
class Derived2(Base):
def method(self):
pass
base = Base()
derived1 = Derived1()
derived2 = Derived2()
super_d2 = super(Derived2, derived2)
self.assertRegex(repr(base.method),
r"<bound method .*Base\.method of <.*Base object at .*>>")
self.assertRegex(repr(derived1.method),
r"<bound method .*Base\.method of <.*Derived1 object at .*>>")
self.assertRegex(repr(derived2.method),
r"<bound method .*Derived2\.method of <.*Derived2 object at .*>>")
self.assertRegex(repr(super_d2.method),
r"<bound method .*Base\.method of <.*Derived2 object at .*>>")
class Foo:
@classmethod
def method(cls):
pass
foo = Foo()
self.assertRegex(repr(foo.method), # access via instance
r"<bound method .*Foo\.method of <class '.*Foo'>>")
self.assertRegex(repr(Foo.method), # access via the class
r"<bound method .*Foo\.method of <class '.*Foo'>>")
class MyCallable:
def __call__(self, arg):
pass
func = MyCallable() # func has no __name__ or __qualname__ attributes
instance = object()
method = types.MethodType(func, instance)
self.assertRegex(repr(method),
r"<bound method \? of <object object at .*>>")
func.__name__ = "name"
self.assertRegex(repr(method),
r"<bound method name of <object object at .*>>")
func.__qualname__ = "qualname"
self.assertRegex(repr(method),
r"<bound method qualname of <object object at .*>>")
class DictProxyTests(unittest.TestCase):
def setUp(self):
class C(object):
def meth(self):
pass
self.C = C
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'trace function introduces __local__')
def test_iter_keys(self):
# Testing dict-proxy keys...
it = self.C.__dict__.keys()
self.assertNotIsInstance(it, list)
keys = list(it)
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
'__weakref__', 'meth'])
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'trace function introduces __local__')
def test_iter_values(self):
# Testing dict-proxy values...
it = self.C.__dict__.values()
self.assertNotIsInstance(it, list)
values = list(it)
self.assertEqual(len(values), 5)
@unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(),
'trace function introduces __local__')
def test_iter_items(self):
# Testing dict-proxy iteritems...
it = self.C.__dict__.items()
self.assertNotIsInstance(it, list)
keys = [item[0] for item in it]
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
'__weakref__', 'meth'])
def test_dict_type_with_metaclass(self):
# Testing type of __dict__ when metaclass set...
class B(object):
pass
class M(type):
pass
class C(metaclass=M):
# In 2.3a1, C.__dict__ was a real dict rather than a dict proxy
pass
self.assertEqual(type(C.__dict__), type(B.__dict__))
def test_repr(self):
# Testing mappingproxy.__repr__.
# We can't blindly compare with the repr of another dict as ordering
# of keys and values is arbitrary and may differ.
r = repr(self.C.__dict__)
self.assertTrue(r.startswith('mappingproxy('), r)
self.assertTrue(r.endswith(')'), r)
for k, v in self.C.__dict__.items():
self.assertIn('{!r}: {!r}'.format(k, v), r)
class PTypesLongInitTest(unittest.TestCase):
# This is in its own TestCase so that it can be run before any other tests.
def test_pytype_long_ready(self):
# Testing SF bug 551412 ...
# This dumps core when SF bug 551412 isn't fixed --
# but only when test_descr.py is run separately.
# (That can't be helped -- as soon as PyType_Ready()
# is called for PyLong_Type, the bug is gone.)
class UserLong(object):
def __pow__(self, *args):
pass
try:
pow(0, UserLong(), 0)
except:
pass
# Another segfault only when run early
# (before PyType_Ready(tuple) is called)
type.mro(tuple)
class MiscTests(unittest.TestCase):
def test_type_lookup_mro_reference(self):
# Issue #14199: _PyType_Lookup() has to keep a strong reference to
# the type MRO because it may be modified during the lookup, if
# __bases__ is set during the lookup for example.
class MyKey(object):
def __hash__(self):
return hash('mykey')
def __eq__(self, other):
X.__bases__ = (Base2,)
class Base(object):
mykey = 'from Base'
mykey2 = 'from Base'
class Base2(object):
mykey = 'from Base2'
mykey2 = 'from Base2'
X = type('X', (Base,), {MyKey(): 5})
# mykey is read from Base
self.assertEqual(X.mykey, 'from Base')
# mykey2 is read from Base2 because MyKey.__eq__ has set __bases__
self.assertEqual(X.mykey2, 'from Base2')
class PicklingTests(unittest.TestCase):
def _check_reduce(self, proto, obj, args=(), kwargs={}, state=None,
listitems=None, dictitems=None):
if proto >= 2:
reduce_value = obj.__reduce_ex__(proto)
if kwargs:
self.assertEqual(reduce_value[0], copyreg.__newobj_ex__)
self.assertEqual(reduce_value[1], (type(obj), args, kwargs))
else:
self.assertEqual(reduce_value[0], copyreg.__newobj__)
self.assertEqual(reduce_value[1], (type(obj),) + args)
self.assertEqual(reduce_value[2], state)
if listitems is not None:
self.assertListEqual(list(reduce_value[3]), listitems)
else:
self.assertIsNone(reduce_value[3])
if dictitems is not None:
self.assertDictEqual(dict(reduce_value[4]), dictitems)
else:
self.assertIsNone(reduce_value[4])
else:
base_type = type(obj).__base__
reduce_value = (copyreg._reconstructor,
(type(obj),
base_type,
None if base_type is object else base_type(obj)))
if state is not None:
reduce_value += (state,)
self.assertEqual(obj.__reduce_ex__(proto), reduce_value)
self.assertEqual(obj.__reduce__(), reduce_value)
def test_reduce(self):
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
args = (-101, "spam")
kwargs = {'bacon': -201, 'fish': -301}
state = {'cheese': -401}
class C1:
def __getnewargs__(self):
return args
obj = C1()
for proto in protocols:
self._check_reduce(proto, obj, args)
for name, value in state.items():
setattr(obj, name, value)
for proto in protocols:
self._check_reduce(proto, obj, args, state=state)
class C2:
def __getnewargs__(self):
return "bad args"
obj = C2()
for proto in protocols:
if proto >= 2:
with self.assertRaises(TypeError):
obj.__reduce_ex__(proto)
class C3:
def __getnewargs_ex__(self):
return (args, kwargs)
obj = C3()
for proto in protocols:
if proto >= 2:
self._check_reduce(proto, obj, args, kwargs)
class C4:
def __getnewargs_ex__(self):
return (args, "bad dict")
class C5:
def __getnewargs_ex__(self):
return ("bad tuple", kwargs)
class C6:
def __getnewargs_ex__(self):
return ()
class C7:
def __getnewargs_ex__(self):
return "bad args"
for proto in protocols:
for cls in C4, C5, C6, C7:
obj = cls()
if proto >= 2:
with self.assertRaises((TypeError, ValueError)):
obj.__reduce_ex__(proto)
class C9:
def __getnewargs_ex__(self):
return (args, {})
obj = C9()
for proto in protocols:
self._check_reduce(proto, obj, args)
class C10:
def __getnewargs_ex__(self):
raise IndexError
obj = C10()
for proto in protocols:
if proto >= 2:
with self.assertRaises(IndexError):
obj.__reduce_ex__(proto)
class C11:
def __getstate__(self):
return state
obj = C11()
for proto in protocols:
self._check_reduce(proto, obj, state=state)
class C12:
def __getstate__(self):
return "not dict"
obj = C12()
for proto in protocols:
self._check_reduce(proto, obj, state="not dict")
class C13:
def __getstate__(self):
raise IndexError
obj = C13()
for proto in protocols:
with self.assertRaises(IndexError):
obj.__reduce_ex__(proto)
if proto < 2:
with self.assertRaises(IndexError):
obj.__reduce__()
class C14:
__slots__ = tuple(state)
def __init__(self):
for name, value in state.items():
setattr(self, name, value)
obj = C14()
for proto in protocols:
if proto >= 2:
self._check_reduce(proto, obj, state=(None, state))
else:
with self.assertRaises(TypeError):
obj.__reduce_ex__(proto)
with self.assertRaises(TypeError):
obj.__reduce__()
class C15(dict):
pass
obj = C15({"quebec": -601})
for proto in protocols:
self._check_reduce(proto, obj, dictitems=dict(obj))
class C16(list):
pass
obj = C16(["yukon"])
for proto in protocols:
self._check_reduce(proto, obj, listitems=list(obj))
def test_special_method_lookup(self):
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
class Picky:
def __getstate__(self):
return {}
def __getattr__(self, attr):
if attr in ("__getnewargs__", "__getnewargs_ex__"):
raise AssertionError(attr)
return None
for protocol in protocols:
state = {} if protocol >= 2 else None
self._check_reduce(protocol, Picky(), state=state)
def _assert_is_copy(self, obj, objcopy, msg=None):
"""Utility method to verify if two objects are copies of each others.
"""
if msg is None:
msg = "{!r} is not a copy of {!r}".format(obj, objcopy)
if type(obj).__repr__ is object.__repr__:
# We have this limitation for now because we use the object's repr
# to help us verify that the two objects are copies. This allows
# us to delegate the non-generic verification logic to the objects
# themselves.
raise ValueError("object passed to _assert_is_copy must " +
"override the __repr__ method.")
self.assertIsNot(obj, objcopy, msg=msg)
self.assertIs(type(obj), type(objcopy), msg=msg)
if hasattr(obj, '__dict__'):
self.assertDictEqual(obj.__dict__, objcopy.__dict__, msg=msg)
self.assertIsNot(obj.__dict__, objcopy.__dict__, msg=msg)
if hasattr(obj, '__slots__'):
self.assertListEqual(obj.__slots__, objcopy.__slots__, msg=msg)
for slot in obj.__slots__:
self.assertEqual(
hasattr(obj, slot), hasattr(objcopy, slot), msg=msg)
self.assertEqual(getattr(obj, slot, None),
getattr(objcopy, slot, None), msg=msg)
self.assertEqual(repr(obj), repr(objcopy), msg=msg)
@staticmethod
def _generate_pickle_copiers():
"""Utility method to generate the many possible pickle configurations.
"""
class PickleCopier:
"This class copies object using pickle."
def __init__(self, proto, dumps, loads):
self.proto = proto
self.dumps = dumps
self.loads = loads
def copy(self, obj):
return self.loads(self.dumps(obj, self.proto))
def __repr__(self):
# We try to be as descriptive as possible here since this is
# the string which we will allow us to tell the pickle
# configuration we are using during debugging.
return ("PickleCopier(proto={}, dumps={}.{}, loads={}.{})"
.format(self.proto,
self.dumps.__module__, self.dumps.__qualname__,
self.loads.__module__, self.loads.__qualname__))
return (PickleCopier(*args) for args in
itertools.product(range(pickle.HIGHEST_PROTOCOL + 1),
{pickle.dumps, pickle._dumps},
{pickle.loads, pickle._loads}))
def test_pickle_slots(self):
# Tests pickling of classes with __slots__.
# Pickling of classes with __slots__ but without __getstate__ should
# fail (if using protocol 0 or 1)
global C
class C:
__slots__ = ['a']
with self.assertRaises(TypeError):
pickle.dumps(C(), 0)
global D
class D(C):
pass
with self.assertRaises(TypeError):
pickle.dumps(D(), 0)
class C:
"A class with __getstate__ and __setstate__ implemented."
__slots__ = ['a']
def __getstate__(self):
state = getattr(self, '__dict__', {}).copy()
for cls in type(self).__mro__:
for slot in cls.__dict__.get('__slots__', ()):
try:
state[slot] = getattr(self, slot)
except AttributeError:
pass
return state
def __setstate__(self, state):
for k, v in state.items():
setattr(self, k, v)
def __repr__(self):
return "%s()<%r>" % (type(self).__name__, self.__getstate__())
class D(C):
"A subclass of a class with slots."
pass
global E
class E(C):
"A subclass with an extra slot."
__slots__ = ['b']
# Now it should work
for pickle_copier in self._generate_pickle_copiers():
with self.subTest(pickle_copier=pickle_copier):
x = C()
y = pickle_copier.copy(x)
self._assert_is_copy(x, y)
x.a = 42
y = pickle_copier.copy(x)
self._assert_is_copy(x, y)
x = D()
x.a = 42
x.b = 100
y = pickle_copier.copy(x)
self._assert_is_copy(x, y)
x = E()
x.a = 42
x.b = "foo"
y = pickle_copier.copy(x)
self._assert_is_copy(x, y)
def test_reduce_copying(self):
# Tests pickling and copying new-style classes and objects.
global C1
class C1:
"The state of this class is copyable via its instance dict."
ARGS = (1, 2)
NEED_DICT_COPYING = True
def __init__(self, a, b):
super().__init__()
self.a = a
self.b = b
def __repr__(self):
return "C1(%r, %r)" % (self.a, self.b)
global C2
class C2(list):
"A list subclass copyable via __getnewargs__."
ARGS = (1, 2)
NEED_DICT_COPYING = False
def __new__(cls, a, b):
self = super().__new__(cls)
self.a = a
self.b = b
return self
def __init__(self, *args):
super().__init__()
# This helps testing that __init__ is not called during the
# unpickling process, which would cause extra appends.
self.append("cheese")
@classmethod
def __getnewargs__(cls):
return cls.ARGS
def __repr__(self):
return "C2(%r, %r)<%r>" % (self.a, self.b, list(self))
global C3
class C3(list):
"A list subclass copyable via __getstate__."
ARGS = (1, 2)
NEED_DICT_COPYING = False
def __init__(self, a, b):
self.a = a
self.b = b
# This helps testing that __init__ is not called during the
# unpickling process, which would cause extra appends.
self.append("cheese")
@classmethod
def __getstate__(cls):
return cls.ARGS
def __setstate__(self, state):
a, b = state
self.a = a
self.b = b
def __repr__(self):
return "C3(%r, %r)<%r>" % (self.a, self.b, list(self))
global C4
class C4(int):
"An int subclass copyable via __getnewargs__."
ARGS = ("hello", "world", 1)
NEED_DICT_COPYING = False
def __new__(cls, a, b, value):
self = super().__new__(cls, value)
self.a = a
self.b = b
return self
@classmethod
def __getnewargs__(cls):
return cls.ARGS
def __repr__(self):
return "C4(%r, %r)<%r>" % (self.a, self.b, int(self))
global C5
class C5(int):
"An int subclass copyable via __getnewargs_ex__."
ARGS = (1, 2)
KWARGS = {'value': 3}
NEED_DICT_COPYING = False
def __new__(cls, a, b, *, value=0):
self = super().__new__(cls, value)
self.a = a
self.b = b
return self
@classmethod
def __getnewargs_ex__(cls):
return (cls.ARGS, cls.KWARGS)
def __repr__(self):
return "C5(%r, %r)<%r>" % (self.a, self.b, int(self))
test_classes = (C1, C2, C3, C4, C5)
# Testing copying through pickle
pickle_copiers = self._generate_pickle_copiers()
for cls, pickle_copier in itertools.product(test_classes, pickle_copiers):
with self.subTest(cls=cls, pickle_copier=pickle_copier):
kwargs = getattr(cls, 'KWARGS', {})
obj = cls(*cls.ARGS, **kwargs)
proto = pickle_copier.proto
objcopy = pickle_copier.copy(obj)
self._assert_is_copy(obj, objcopy)
# For test classes that supports this, make sure we didn't go
# around the reduce protocol by simply copying the attribute
# dictionary. We clear attributes using the previous copy to
# not mutate the original argument.
if proto >= 2 and not cls.NEED_DICT_COPYING:
objcopy.__dict__.clear()
objcopy2 = pickle_copier.copy(objcopy)
self._assert_is_copy(obj, objcopy2)
# Testing copying through copy.deepcopy()
for cls in test_classes:
with self.subTest(cls=cls):
kwargs = getattr(cls, 'KWARGS', {})
obj = cls(*cls.ARGS, **kwargs)
objcopy = deepcopy(obj)
self._assert_is_copy(obj, objcopy)
# For test classes that supports this, make sure we didn't go
# around the reduce protocol by simply copying the attribute
# dictionary. We clear attributes using the previous copy to
# not mutate the original argument.
if not cls.NEED_DICT_COPYING:
objcopy.__dict__.clear()
objcopy2 = deepcopy(objcopy)
self._assert_is_copy(obj, objcopy2)
def test_issue24097(self):
# Slot name is freed inside __getattr__ and is later used.
class S(str): # Not interned
pass
class A:
__slotnames__ = [S('spam')]
def __getattr__(self, attr):
if attr == 'spam':
A.__slotnames__[:] = [S('spam')]
return 42
else:
raise AttributeError
import copyreg
expected = (copyreg.__newobj__, (A,), (None, {'spam': 42}), None, None)
self.assertEqual(A().__reduce_ex__(2), expected) # Shouldn't crash
def test_object_reduce(self):
# Issue #29914
# __reduce__() takes no arguments
object().__reduce__()
with self.assertRaises(TypeError):
object().__reduce__(0)
# __reduce_ex__() takes one integer argument
object().__reduce_ex__(0)
with self.assertRaises(TypeError):
object().__reduce_ex__()
with self.assertRaises(TypeError):
object().__reduce_ex__(None)
class SharedKeyTests(unittest.TestCase):
@support.cpython_only
def test_subclasses(self):
# Verify that subclasses can share keys (per PEP 412)
class A:
pass
class B(A):
pass
a, b = A(), B()
self.assertEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(b)))
self.assertLess(sys.getsizeof(vars(a)), sys.getsizeof({}))
# Initial hash table can contain at most 5 elements.
# Set 6 attributes to cause internal resizing.
a.x, a.y, a.z, a.w, a.v, a.u = range(6)
self.assertNotEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(b)))
a2 = A()
self.assertEqual(sys.getsizeof(vars(a)), sys.getsizeof(vars(a2)))
self.assertLess(sys.getsizeof(vars(a)), sys.getsizeof({}))
b.u, b.v, b.w, b.t, b.s, b.r = range(6)
self.assertLess(sys.getsizeof(vars(b)), sys.getsizeof({}))
class DebugHelperMeta(type):
"""
Sets default __doc__ and simplifies repr() output.
"""
def __new__(mcls, name, bases, attrs):
if attrs.get('__doc__') is None:
attrs['__doc__'] = name # helps when debugging with gdb
return type.__new__(mcls, name, bases, attrs)
def __repr__(cls):
return repr(cls.__name__)
class MroTest(unittest.TestCase):
"""
Regressions for some bugs revealed through
mcsl.mro() customization (typeobject.c: mro_internal()) and
cls.__bases__ assignment (typeobject.c: type_set_bases()).
"""
def setUp(self):
self.step = 0
self.ready = False
def step_until(self, limit):
ret = (self.step < limit)
if ret:
self.step += 1
return ret
def test_incomplete_set_bases_on_self(self):
"""
type_set_bases must be aware that type->tp_mro can be NULL.
"""
class M(DebugHelperMeta):
def mro(cls):
if self.step_until(1):
assert cls.__mro__ is None
cls.__bases__ += ()
return type.mro(cls)
class A(metaclass=M):
pass
def test_reent_set_bases_on_base(self):
"""
Deep reentrancy must not over-decref old_mro.
"""
class M(DebugHelperMeta):
def mro(cls):
if cls.__mro__ is not None and cls.__name__ == 'B':
# 4-5 steps are usually enough to make it crash somewhere
if self.step_until(10):
A.__bases__ += ()
return type.mro(cls)
class A(metaclass=M):
pass
class B(A):
pass
B.__bases__ += ()
def test_reent_set_bases_on_direct_base(self):
"""
Similar to test_reent_set_bases_on_base, but may crash differently.
"""
class M(DebugHelperMeta):
def mro(cls):
base = cls.__bases__[0]
if base is not object:
if self.step_until(5):
base.__bases__ += ()
return type.mro(cls)
class A(metaclass=M):
pass
class B(A):
pass
class C(B):
pass
def test_reent_set_bases_tp_base_cycle(self):
"""
type_set_bases must check for an inheritance cycle not only through
MRO of the type, which may be not yet updated in case of reentrance,
but also through tp_base chain, which is assigned before diving into
inner calls to mro().
Otherwise, the following snippet can loop forever:
do {
// ...
type = type->tp_base;
} while (type != NULL);
Functions that rely on tp_base (like solid_base and PyType_IsSubtype)
would not be happy in that case, causing a stack overflow.
"""
class M(DebugHelperMeta):
def mro(cls):
if self.ready:
if cls.__name__ == 'B1':
B2.__bases__ = (B1,)
if cls.__name__ == 'B2':
B1.__bases__ = (B2,)
return type.mro(cls)
class A(metaclass=M):
pass
class B1(A):
pass
class B2(A):
pass
self.ready = True
with self.assertRaises(TypeError):
B1.__bases__ += ()
def test_tp_subclasses_cycle_in_update_slots(self):
"""
type_set_bases must check for reentrancy upon finishing its job
by updating tp_subclasses of old/new bases of the type.
Otherwise, an implicit inheritance cycle through tp_subclasses
can break functions that recurse on elements of that field
(like recurse_down_subclasses and mro_hierarchy) eventually
leading to a stack overflow.
"""
class M(DebugHelperMeta):
def mro(cls):
if self.ready and cls.__name__ == 'C':
self.ready = False
C.__bases__ = (B2,)
return type.mro(cls)
class A(metaclass=M):
pass
class B1(A):
pass
class B2(A):
pass
class C(A):
pass
self.ready = True
C.__bases__ = (B1,)
B1.__bases__ = (C,)
self.assertEqual(C.__bases__, (B2,))
self.assertEqual(B2.__subclasses__(), [C])
self.assertEqual(B1.__subclasses__(), [])
self.assertEqual(B1.__bases__, (C,))
self.assertEqual(C.__subclasses__(), [B1])
def test_tp_subclasses_cycle_error_return_path(self):
"""
The same as test_tp_subclasses_cycle_in_update_slots, but tests
a code path executed on error (goto bail).
"""
class E(Exception):
pass
class M(DebugHelperMeta):
def mro(cls):
if self.ready and cls.__name__ == 'C':
if C.__bases__ == (B2,):
self.ready = False
else:
C.__bases__ = (B2,)
raise E
return type.mro(cls)
class A(metaclass=M):
pass
class B1(A):
pass
class B2(A):
pass
class C(A):
pass
self.ready = True
with self.assertRaises(E):
C.__bases__ = (B1,)
B1.__bases__ = (C,)
self.assertEqual(C.__bases__, (B2,))
self.assertEqual(C.__mro__, tuple(type.mro(C)))
def test_incomplete_extend(self):
"""
Extending an unitialized type with type->tp_mro == NULL must
throw a reasonable TypeError exception, instead of failing
with PyErr_BadInternalCall.
"""
class M(DebugHelperMeta):
def mro(cls):
if cls.__mro__ is None and cls.__name__ != 'X':
with self.assertRaises(TypeError):
class X(cls):
pass
return type.mro(cls)
class A(metaclass=M):
pass
def test_incomplete_super(self):
"""
Attrubute lookup on a super object must be aware that
its target type can be uninitialized (type->tp_mro == NULL).
"""
class M(DebugHelperMeta):
def mro(cls):
if cls.__mro__ is None:
with self.assertRaises(AttributeError):
super(cls, cls).xxx
return type.mro(cls)
class A(metaclass=M):
pass
def test_main():
# Run all local test cases, with PTypesLongInitTest first.
support.run_unittest(PTypesLongInitTest, OperatorsTest,
ClassPropertiesAndMethods, DictProxyTests,
MiscTests, PicklingTests, SharedKeyTests,
MroTest)
if __name__ == "__main__":
test_main()
| {
"content_hash": "bb3ff6b47b06cd28495586d985589a2c",
"timestamp": "",
"source": "github",
"line_count": 5535,
"max_line_length": 91,
"avg_line_length": 34.2579945799458,
"alnum_prop": 0.48586632070794966,
"repo_name": "FFMG/myoddweb.piger",
"id": "0e7728ebf2d7a2d9f7bf1debfa95be4854b7f716",
"size": "189618",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "monitor/api/python/Python-3.7.2/Lib/test/test_descr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "89079"
},
{
"name": "Assembly",
"bytes": "399228"
},
{
"name": "Batchfile",
"bytes": "93889"
},
{
"name": "C",
"bytes": "32256857"
},
{
"name": "C#",
"bytes": "197461"
},
{
"name": "C++",
"bytes": "200544641"
},
{
"name": "CMake",
"bytes": "192771"
},
{
"name": "CSS",
"bytes": "441704"
},
{
"name": "CWeb",
"bytes": "174166"
},
{
"name": "Common Lisp",
"bytes": "24481"
},
{
"name": "Cuda",
"bytes": "52444"
},
{
"name": "DIGITAL Command Language",
"bytes": "33549"
},
{
"name": "DTrace",
"bytes": "2157"
},
{
"name": "Fortran",
"bytes": "1856"
},
{
"name": "HTML",
"bytes": "181677643"
},
{
"name": "IDL",
"bytes": "14"
},
{
"name": "Inno Setup",
"bytes": "9647"
},
{
"name": "JavaScript",
"bytes": "705756"
},
{
"name": "Lex",
"bytes": "1231"
},
{
"name": "Lua",
"bytes": "3332"
},
{
"name": "M4",
"bytes": "259214"
},
{
"name": "Makefile",
"bytes": "1262318"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "Objective-C",
"bytes": "2167778"
},
{
"name": "Objective-C++",
"bytes": "630"
},
{
"name": "PHP",
"bytes": "59030"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "Pascal",
"bytes": "75208"
},
{
"name": "Perl",
"bytes": "42080"
},
{
"name": "PostScript",
"bytes": "13803"
},
{
"name": "PowerShell",
"bytes": "11781"
},
{
"name": "Python",
"bytes": "30377308"
},
{
"name": "QML",
"bytes": "593"
},
{
"name": "QMake",
"bytes": "16692"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Rich Text Format",
"bytes": "6743"
},
{
"name": "Roff",
"bytes": "55661"
},
{
"name": "Ruby",
"bytes": "5532"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Shell",
"bytes": "783974"
},
{
"name": "TSQL",
"bytes": "1201"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "32117"
},
{
"name": "Visual Basic",
"bytes": "70"
},
{
"name": "XSLT",
"bytes": "552736"
},
{
"name": "Yacc",
"bytes": "19623"
}
],
"symlink_target": ""
} |
try:
from Tkinter import *
import tkMessageBox
import Pmw
from random import randint
from wireless.menu_functions import *
from wireless.iface_list import ListInterfaces, GetInterfaceList
except ImportError, e:
pass
class DisplayBox:
def __init__(self, parent):
self.myParent = parent
Pmw.initialise()
self.balloon = Pmw.Balloon(parent)
# Color declaration
self.bg = "#009900"
self.fg = "#ffffff"
# Creating panel containing main actions' icons
panel = Frame(parent, background="white")
# Adding Start Scan button to panel
self.start_img = PhotoImage(file='gui/images/scan.gif')
self.start_btn = Button(panel, state='disabled', image=self.start_img, width=25, height=25)
self.start_btn.pack(side=LEFT, padx=2)
self.balloon.bind(self.start_btn, 'Start Scanning')
# Adding Stop Scan button to panel
self.stop_img = PhotoImage(file='gui/images/stop.gif')
self.stop_btn = Button(panel, image=self.stop_img, state='disabled')
self.stop_btn.pack(side=LEFT, padx=2)
self.balloon.bind(self.stop_btn, 'Stop Scanning')
# Adding Dump scan results button to panel
self.dump_img = PhotoImage(file='gui/images/dump.gif')
dump_btn = Button(panel, image=self.dump_img, state='disabled', width=25, height=25)
dump_btn.pack(side=LEFT, padx=2)
self.balloon.bind(dump_btn, 'Dump Scan Results')
# Adding in-depth scanning of selected device/network button to panel
self.sniff_img = PhotoImage(file='gui/images/duck.gif')
self.sniff_btn = Button(panel, state='disabled', image=self.sniff_img, width=25, height=25)
self.sniff_btn.pack(side=LEFT, padx=2)
self.balloon.bind(self.sniff_btn, 'Sniff or Capture packets on selected interface')
# Adding button to choose wireless interface
self.choose_iface = Button(panel, text="Interface: None", fg="red", activeforeground="blue")
self.choose_iface.pack(side=LEFT, padx=20, pady=2)
self.balloon.bind(self.choose_iface, 'The %s has been selected for packet capture.'%self.choose_iface['text'])
# Adding scan filter area
filter_area = Frame(panel, highlightbackground="gray", highlightthickness=1)
filter_lbl = Label(filter_area, text="Filter:").pack(side=LEFT)
filter_entry = Entry(filter_area, width=30, borderwidth=0).pack(side=LEFT,expand=YES, fill=X)
filter_area.pack(side=LEFT, expand=YES, fill=X, padx=30)
self.balloon.bind(filter_area, 'Enter the scan filter value here')
# Adding labels for graph and table views
self.graph_btn = Button(panel, text="Graph", activeforeground="blue", command=lambda x="graph": self.create_items(x))
self.graph_btn.pack(side=RIGHT)
self.balloon.bind(self.graph_btn, 'Click to view Graphical plot of wireless scan results')
self.table_btn = Button(panel, text="Table", activeforeground="blue", command=lambda y="table": self.create_items(y))
self.table_btn.pack(side=RIGHT)
self.balloon.bind(self.table_btn, 'Click to view Tabular view of wireless scan results')
panel.pack(anchor=N, side=TOP, fill=X, expand=YES)
# Container contains the canvas
self.container = Frame(parent, bg="white")
self.container.configure(highlightbackground="black", highlightthickness=5)
# Creating a scrollbar for the canvas
self.scrollY1 = Scrollbar(self.container, orient=VERTICAL)
self.CANVAS_SIZE = 900
self.canvas1 = Canvas(self.container, width=self.CANVAS_SIZE, height=670, background='white')
self.canvas1["scrollregion"] = (0,0,self.CANVAS_SIZE,self.CANVAS_SIZE)
global canvas
canvas = self.canvas1
self.scrollY1.configure(command=self.canvas1.yview)
# Binding Mousewheel event to canvas for vertical scrolling
self.canvas1.bind("<MouseWheel>", lambda event: self.canvas1.yview('scroll', 1, 'units'))
self.canvas1.bind('<4>', lambda event : self.canvas1.yview('scroll', -1, 'units'))
self.canvas1.bind('<5>', lambda event : self.canvas1.yview('scroll', 1, 'units'))
self.canvas1["yscrollcommand"] = self.scrollY1.set
self.listresult = Frame(self.container, bg="gray", width=1200, height=700)
self.create_welcome()
self.container.pack(fill=BOTH, expand=YES, anchor='center')
self.count = 0
# Creating the Home display or Welcome window of the Program
# It lists the available interfaces in the computer on Left side and the Image slider on the right side.
def create_welcome(self):
# Main container in welcome window
self.win = Frame(self.container, bg="gray")
# Creates Title or introduction of the Program.
win_title = Label(self.win, text="Wireless Scanner and Analyzer", font="Verdana 18 bold", bg = self.bg, fg=self.fg).pack(ipadx=10, ipady=10, fill=BOTH, padx=5, pady=5)
win_body = Frame(self.win, background="gray")
# Container containing list of available interfaces in the Computer.
iface_box = Frame(win_body, height=450, bg="gray", highlightbackground="gray", highlightthickness=1)
# Defining images representing interfaces
self.img_eth = PhotoImage(file="gui/images/eth.gif")
self.img_wlan = PhotoImage(file="gui/images/wlan.gif")
self.img_help = PhotoImage(file="gui/images/help.gif")
# Getting list of Interfaces available in the computer
self.interfaces = ListInterfaces().getAllInterfaces()
iface_box_top = Frame(iface_box, highlightbackground=self.bg, highlightthickness=2)
iface_title = Label(iface_box_top, text="Interfaces", width=20, bg=self.bg, fg=self.fg, font="Verdana 16 bold").pack(fill=X)
for i in self.interfaces:
# Displays Interfaces as a Label
iface_lbl1 = Label(iface_box_top, text=i.title(), width=20, font="Verdana 12 bold", cursor="hand2", bg="gray")
img1_lbl = Label(iface_lbl1, image=self.img_eth, bg="gray").pack(side=LEFT, padx=5)
iface_lbl1.pack(anchor=W, ipady=5, pady=5, padx=2, fill=X)
iface_lbl1.bind("<Enter>", lambda e: e.widget.config(background="tan"))
iface_lbl1.bind("<Leave>", lambda e: e.widget.config(background="gray"))
iface_lbl1.bind("<Button-1>", lambda event, iface=i: self.select_interface(event, iface))
iface_box_top.pack(fill=X)
# Container for list of Wireless Interfaces.
iface_box_bottom = Frame(iface_box, highlightbackground=self.bg, highlightthickness=2)
iface_title2 = Label(iface_box_bottom, text="Wireless Interfaces", bg=self.bg, fg=self.fg, width=20, font="Verdana 16 bold").pack(fill=X)
# Getting list of wireless interfaces in the system
self.w_interfaces = GetInterfaceList().getIface()
for iface, mode in self.w_interfaces:
text = "%s (Mode: %s)" % (iface.title(), mode.title())
iface_lbl2 = Label(iface_box_bottom, text=text, width=20, font="Verdana 12 bold", cursor="hand2", bg="gray")
img2_lbl = Label(iface_lbl2, image=self.img_wlan, bg="gray").pack(side=LEFT, padx=5)
iface_lbl2.pack(anchor=W, ipady=5, pady=5, padx=2, fill=X)
iface_lbl2.bind("<Enter>", lambda e: e.widget.config(background="tan"))
iface_lbl2.bind("<Leave>", lambda e: e.widget.config(background="gray"))
iface_lbl2.bind("<Button-1>", lambda event, iface=iface, mode=mode: self.select_interface(event, iface, mode))
iface_box_bottom.pack(pady=10, fill=X)
# Container for putting Help information
help_box = Frame(iface_box, highlightbackground=self.bg, highlightthickness=2)
help_title = Label(help_box, text="Help", width=20, bg=self.bg, fg=self.fg, font="Verdana 16 bold").pack(fill=X)
# Link to Documentation of the program.
help_lbl = Label(help_box, text="Read Documentation", width=20, font="Verdana 12 bold", cursor="hand2", bg="gray")
help_img = Label(help_lbl, image=self.img_help, bg="gray").pack(side=LEFT, padx=5)
help_lbl.pack(anchor=W, ipady=5, pady=5, padx=2, fill=X)
help_lbl.bind("<Enter>", lambda e: e.widget.config(background="tan"))
help_lbl.bind("<Leave>", lambda e: e.widget.config(background="gray"))
help_lbl.bind("<Button-1>", read_documentation)
help_box.pack(fill=X)
iface_box.pack(side=LEFT, anchor=NW, ipadx=5, ipady=5, padx=5, pady=5, expand=YES, fill=X)
right_box = Frame(win_body, width=500, height=500, highlightbackground="green", bg="green", highlightthickness=1)
self.slide1_image = PhotoImage(file="gui/images/slide1.gif")
slide1 = Label(right_box, image=self.slide1_image, width=500, height=500).pack(expand=YES, fill=BOTH)
right_box.pack(side=LEFT, anchor=NE, ipadx=5, ipady=5, padx=5, pady=5, expand=YES, fill=BOTH)
win_body.pack(ipadx=5, ipady=5, fill=BOTH, expand=YES)
bottom_lbl = Label(self.win, text="Copyright: Sajjan Bhattarai\n2014", font="Verdana 8 underline").pack(fill=X, padx=5, pady=5)
self.win.pack(expand=YES, fill=BOTH)
# Displays or creates items in the main window according to user's view preference.
# This function is aimed to be run after the capture interface is selected and the Sniffer or Scanner is initiated.
def create_items(self, view):
if view == "graph":
self.graph_btn.configure(background=self.bg, foreground=self.fg)
self.table_btn.configure(background=self.fg, foreground="black")
self.scrollY1.pack(fill=Y, side=RIGHT, expand=FALSE)
self.canvas1.pack()
self.win.pack_forget()
self.listresult.pack_forget()
elif view == "table":
self.table_btn.configure(background=self.bg, foreground=self.fg)
self.graph_btn.configure(background=self.fg, foreground="black")
self.listresult.pack(expand=YES, fill=BOTH)
self.scrollY1.pack_forget()
self.canvas1.pack_forget()
def scan_ap(self, iface, canvas):
self.create_items("graph")
table = Toplevel()
table.title("Wireless Scanning: Access Points")
scan = ThreadedClient()
scan.main(table, iface, canvas, "position")
self.stop_btn.configure(state='normal', command=scan.endApplication)
table.mainloop()
# Creating function to handle events occured on interfaces' list
def select_interface(self, event, iface, mode=""):
if mode != "":
if mode != "monitor":
ask_user = tkMessageBox.askokcancel(title="Turn on Monitor mode", message="Monitor Mode isn't yet enabled on this interface. Turn Monitor mode on %s interface?"%iface)
if ask_user > 0 :
os.system("sudo -A ifconfig %s down" % iface)
os.system("sudo iwconfig %s mode monitor" % iface)
os.system("sudo ifconfig %s up" % iface)
event.widget.configure(text="%s (Mode: Monitor)" %iface.title())
else:
return
self.start_btn.configure(state='normal', command=lambda i=iface, canvas=self.canvas1: self.scan_ap(i, canvas))
else:
self.start_btn.configure(state='disabled')
self.interface = iface
global interface
interface = self.interface
self.sniff_btn.configure(state='normal', command=lambda i=iface: self.sniffall(i))
self.choose_iface.configure(fg=self.bg, text="Interface: %s"%self.interface)
self.balloon.bind(self.choose_iface, 'The %s has been selected for packet capture.'%self.choose_iface['text'])
def sniffall(self, iface):
self.listresult.pack(expand=YES, fill=BOTH)
if self.count == 0:
st = self.SniffTable(self.listresult)
sniff = ThreadSniffer()
sniff.main(self.table, self.canvas, iface)
self.stop_btn.configure(state='normal', command=sniff.endApplication)
self.win.pack_forget()
self.count += 1
def SniffTable(self, parent):
# Creating vertical scrollbar on the right side
vscrollbar = Scrollbar(parent, orient=VERTICAL)
# Creating canvas in order to make the widgets be scrollable.
self.canvas = Canvas(parent, yscrollcommand=vscrollbar.set, height=700)
vscrollbar.pack(fill=Y, side=RIGHT, expand=FALSE)
# Creating table using Frame
self.table = Frame(self.canvas, height=700)
# Creating color definitions
self.bg = "#009900"
self.fg = "#ffffff"
# Creating Table header
table_head = Frame(parent)
col0 = Label(table_head, width=3, text="SN", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthickness=1).pack(side=LEFT,fill=X)
col1 = Label(table_head, width=7, text="Protocol", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
col2 = Label(table_head, width=20, text="Source", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthickness=1).pack(side=LEFT,fill=X)
col3 = Label(table_head, width=20, text="Destination", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
col4 = Label(table_head, width=12, text="Type", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
col5 = Label(table_head, width=8, text="Subtype", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
col6= Label(table_head, width=16, text="SSID", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
col8 = Label(table_head, width=100, text="Info", background=self.bg, foreground=self.fg, highlightbackground=self.fg, highlightthicknes=1).pack(side=LEFT,fill=X)
# Packing table header and table into the GUI layout
table_head.pack(side=TOP, fill=X, expand=YES, padx=2)
#self.table.pack(expand=YES,fill=BOTH)
# Packing canvas widget into the window
self.canvas.pack(expand=YES, padx=2, anchor=NW)
#self.canvas['scrollregion'] = '0 0 800 9999999999'
vscrollbar.configure(command=self.canvas.yview)
self.cw = self.canvas.create_window(0,0, window=self.table, anchor=NW, tags="table")
# track changes to the canvas and frame width and sync them also updating the scrollbar
self.table.bind('<Configure>', self._configure_table)
self.canvas.bind('<Configure>', self._configure_canvas)
# Functions to track and update canvas's size and table's sizes accordingly.
def _configure_table(self, event):
# update the scrollbars to match the size of the table
size = (self.table.winfo_reqwidth(), self.table.winfo_reqheight())
self.canvas.configure(scrollregion="0 0 %s %s" % size)
if self.table.winfo_reqwidth() != self.canvas.winfo_width():
# update the canvas's width to fit the inner table
self.canvas.configure(width=self.table.winfo_reqwidth())
def _configure_canvas(self, event):
if self.table.winfo_reqwidth() != self.canvas.winfo_width():
# update the inner table's width to fill the canvas
self.canvas.itemconfigure(self.cw, width=self.canvas.winfo_width())
| {
"content_hash": "dfb9ccd904418ad4ff5136326d612d24",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 171,
"avg_line_length": 52.18978102189781,
"alnum_prop": 0.7227972027972028,
"repo_name": "sajjanbh/WLAN-Monitoring",
"id": "4a57d7da8effe37655a1cfd70bff034d22b5e0b4",
"size": "14323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gui/container.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "816"
},
{
"name": "Python",
"bytes": "65863"
}
],
"symlink_target": ""
} |
class SimulationCostStructure(object):
def __init__(self):
self.expandTechnology = 0
self.reagent = 0
self.labor = 0
self.facility = 0
self.equipmentUsage = 0
self.qualityControl = 0
self.totalCost = 0
self.costPerDose = 0
| {
"content_hash": "a482bc152ed66bf2ab3e349e5fec365f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 38,
"avg_line_length": 31.272727272727273,
"alnum_prop": 0.49127906976744184,
"repo_name": "catiabandeiras/StemFactory",
"id": "ec77d510eb1554d1bda249fe25798d7cba5e139f",
"size": "369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webserver/lib/simulation/cost_structure.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3075"
},
{
"name": "HTML",
"bytes": "26135"
},
{
"name": "JavaScript",
"bytes": "3432"
},
{
"name": "Python",
"bytes": "138387"
}
],
"symlink_target": ""
} |
from itty import *
@get('/simple_post')
def simple_post(request):
return open('examples/html/simple_post.html', 'r').read()
@post('/test_post')
def test_post(request):
return "'foo' is: %s" % request.POST.get('foo', 'not specified')
@get('/complex_post')
def complex_post(request):
return open('examples/html/complex_post.html', 'r').read()
@post('/test_complex_post')
def test_complex_post(request):
html = """
'foo' is: %s<br>
'bar' is: %s
""" % (request.POST.get('foo', 'not specified'), request.POST.get('bar', 'not specified'))
return html
run_itty()
| {
"content_hash": "540e01afed0cdee12b4c5759c00040a1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 94,
"avg_line_length": 25.782608695652176,
"alnum_prop": 0.6290050590219224,
"repo_name": "rystecher/itty",
"id": "be03673026f7356ba6c7a296d512155c4af48cbe",
"size": "593",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/posting_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "25614"
}
],
"symlink_target": ""
} |
""" BVT tests for Templates ISO
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from nose.plugins.attrib import attr
import urllib
from random import random
#Import System modules
import time
_multiprocess_shared_ = True
class Services:
"""Test ISO Services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended in create account to
# ensure unique username generated each time
"password": "password",
},
"iso_1":
{
"displaytext": "Test ISO 1",
"name": "ISO 1",
"url": "http://people.apache.org/~tsp/dummy.iso",
# Source URL where ISO is located
"isextractable": True,
"isfeatured": True,
"ispublic": True,
"ostype": "CentOS 5.3 (64-bit)",
},
"iso_2":
{
"displaytext": "Test ISO 2",
"name": "ISO 2",
"url": "http://people.apache.org/~tsp/dummy.iso",
# Source URL where ISO is located
"isextractable": True,
"isfeatured": True,
"ispublic": True,
"ostype": "CentOS 5.3 (64-bit)",
"mode": 'HTTP_DOWNLOAD',
# Used in Extract template, value must be HTTP_DOWNLOAD
},
"isfeatured": True,
"ispublic": True,
"isextractable": True,
"bootable": True, # For edit template
"passwordenabled": True,
"sleep": 60,
"timeout": 10,
"ostype": "CentOS 5.3 (64-bit)",
# CentOS 5.3 (64 bit)
}
class TestCreateIso(cloudstackTestCase):
def setUp(self):
self.services = Services().services
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
# Get Zone, Domain and templates
self.domain = get_domain(self.apiclient, self.services)
self.zone = get_zone(self.apiclient, self.services)
self.services['mode'] = self.zone.networktype
self.services["domainid"] = self.domain.id
self.services["iso_2"]["zoneid"] = self.zone.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
# Finding the OsTypeId from Ostype
ostypes = list_os_types(
self.apiclient,
description=self.services["ostype"]
)
if not isinstance(ostypes, list):
raise unittest.SkipTest("OSTypeId for given description not found")
self.services["iso_1"]["ostypeid"] = ostypes[0].id
self.services["iso_2"]["ostypeid"] = ostypes[0].id
self.services["ostypeid"] = ostypes[0].id
self.cleanup = [self.account]
return
def tearDown(self):
try:
#Clean up, terminate the created ISOs
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke"])
def test_01_create_iso(self):
"""Test create public & private ISO
"""
# Validate the following:
# 1. database (vm_template table) should be
# updated with newly created ISO
# 2. UI should show the newly added ISO
# 3. listIsos API should show the newly added ISO
iso = Iso.create(
self.apiclient,
self.services["iso_2"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("ISO created with ID: %s" % iso.id)
try:
iso.download(self.apiclient)
except Exception as e:
self.fail("Exception while downloading ISO %s: %s"\
% (iso.id, e))
list_iso_response = list_isos(
self.apiclient,
id=iso.id
)
self.assertEqual(
isinstance(list_iso_response, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_iso_response),
0,
"Check template available in List ISOs"
)
iso_response = list_iso_response[0]
self.assertEqual(
iso_response.displaytext,
self.services["iso_2"]["displaytext"],
"Check display text of newly created ISO"
)
self.assertEqual(
iso_response.name,
self.services["iso_2"]["name"],
"Check name of newly created ISO"
)
self.assertEqual(
iso_response.zoneid,
self.services["iso_2"]["zoneid"],
"Check zone ID of newly created ISO"
)
return
class TestISO(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.services = Services().services
cls.api_client = super(TestISO, cls).getClsTestClient().getApiClient()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.services["domainid"] = cls.domain.id
cls.services["iso_1"]["zoneid"] = cls.zone.id
cls.services["iso_2"]["zoneid"] = cls.zone.id
cls.services["sourcezoneid"] = cls.zone.id
#populate second zone id for iso copy
cmd = listZones.listZonesCmd()
cls.zones = cls.api_client.listZones(cmd)
if not isinstance(cls.zones, list):
raise Exception("Failed to find zones.")
#Create an account, ISOs etc.
cls.account = Account.create(
cls.api_client,
cls.services["account"],
domainid=cls.domain.id
)
cls.services["account"] = cls.account.name
# Finding the OsTypeId from Ostype
ostypes = list_os_types(
cls.api_client,
description=cls.services["ostype"]
)
if not isinstance(ostypes, list):
raise unittest.SkipTest("OSTypeId for given description not found")
cls.services["iso_1"]["ostypeid"] = ostypes[0].id
cls.services["iso_2"]["ostypeid"] = ostypes[0].id
cls.services["ostypeid"] = ostypes[0].id
cls.iso_1 = Iso.create(
cls.api_client,
cls.services["iso_1"],
account=cls.account.name,
domainid=cls.account.domainid
)
try:
cls.iso_1.download(cls.api_client)
except Exception as e:
raise Exception("Exception while downloading ISO %s: %s"\
% (cls.iso_1.id, e))
cls.iso_2 = Iso.create(
cls.api_client,
cls.services["iso_2"],
account=cls.account.name,
domainid=cls.account.domainid
)
try:
cls.iso_2.download(cls.api_client)
except Exception as e:
raise Exception("Exception while downloading ISO %s: %s"\
% (cls.iso_2.id, e))
cls._cleanup = [cls.account]
return
@classmethod
def tearDownClass(cls):
try:
cls.api_client = super(TestISO, cls).getClsTestClient().getApiClient()
#Clean up, terminate the created templates
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
def tearDown(self):
try:
#Clean up, terminate the created ISOs, VMs
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke"])
def test_02_edit_iso(self):
"""Test Edit ISO
"""
# Validate the following:
# 1. UI should show the edited values for ISO
# 2. database (vm_template table) should have updated values
#Generate random values for updating ISO name and Display text
new_displayText = random_gen()
new_name = random_gen()
self.debug("Updating ISO permissions for ISO: %s" % self.iso_1.id)
cmd = updateIso.updateIsoCmd()
#Assign new values to attributes
cmd.id = self.iso_1.id
cmd.displaytext = new_displayText
cmd.name = new_name
cmd.bootable = self.services["bootable"]
cmd.passwordenabled = self.services["passwordenabled"]
self.apiclient.updateIso(cmd)
#Check whether attributes are updated in ISO using listIsos
list_iso_response = list_isos(
self.apiclient,
id=self.iso_1.id
)
self.assertEqual(
isinstance(list_iso_response, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_iso_response),
0,
"Check template available in List ISOs"
)
iso_response = list_iso_response[0]
self.assertEqual(
iso_response.displaytext,
new_displayText,
"Check display text of updated ISO"
)
self.assertEqual(
iso_response.name,
new_name,
"Check name of updated ISO"
)
self.assertEqual(
iso_response.bootable,
self.services["bootable"],
"Check if image is bootable of updated ISO"
)
self.assertEqual(
iso_response.ostypeid,
self.services["ostypeid"],
"Check OSTypeID of updated ISO"
)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke"])
def test_03_delete_iso(self):
"""Test delete ISO
"""
# Validate the following:
# 1. UI should not show the deleted ISP
# 2. database (vm_template table) should not contain deleted ISO
self.debug("Deleting ISO with ID: %s" % self.iso_1.id)
self.iso_1.delete(self.apiclient)
# Sleep to ensure that ISO state is reflected in other calls
time.sleep(self.services["sleep"])
#ListIsos to verify deleted ISO is properly deleted
list_iso_response = list_isos(
self.apiclient,
id=self.iso_1.id
)
self.assertEqual(
list_iso_response,
None,
"Check if ISO exists in ListIsos"
)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke"])
def test_04_extract_Iso(self):
"Test for extract ISO"
# Validate the following
# 1. Admin should able extract and download the ISO
# 2. ListIsos should display all the public templates
# for all kind of users
# 3 .ListIsos should not display the system templates
self.debug("Extracting ISO with ID: %s" % self.iso_2.id)
cmd = extractIso.extractIsoCmd()
cmd.id = self.iso_2.id
cmd.mode = self.services["iso_2"]["mode"]
cmd.zoneid = self.services["iso_2"]["zoneid"]
list_extract_response = self.apiclient.extractIso(cmd)
try:
#Format URL to ASCII to retrieve response code
formatted_url = urllib.unquote_plus(list_extract_response.url)
url_response = urllib.urlopen(formatted_url)
response_code = url_response.getcode()
except Exception:
self.fail(
"Extract ISO Failed with invalid URL %s (ISO id: %s)" \
% (formatted_url, self.iso_2.id)
)
self.assertEqual(
list_extract_response.id,
self.iso_2.id,
"Check ID of the downloaded ISO"
)
self.assertEqual(
list_extract_response.extractMode,
self.services["iso_2"]["mode"],
"Check mode of extraction"
)
self.assertEqual(
list_extract_response.zoneid,
self.services["iso_2"]["zoneid"],
"Check zone ID of extraction"
)
self.assertEqual(
response_code,
200,
"Check for a valid response of download URL"
)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke"])
def test_05_iso_permissions(self):
"""Update & Test for ISO permissions"""
# validate the following
# 1. listIsos returns valid permissions set for ISO
# 2. permission changes should be reflected in vm_template
# table in database
self.debug("Updating permissions for ISO: %s" % self.iso_2.id)
cmd = updateIsoPermissions.updateIsoPermissionsCmd()
cmd.id = self.iso_2.id
#Update ISO permissions
cmd.isfeatured = self.services["isfeatured"]
cmd.ispublic = self.services["ispublic"]
cmd.isextractable = self.services["isextractable"]
self.apiclient.updateIsoPermissions(cmd)
#Verify ListIsos have updated permissions for the ISO for normal user
list_iso_response = list_isos(
self.apiclient,
id=self.iso_2.id,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(list_iso_response, list),
True,
"Check list response returns a valid list"
)
iso_response = list_iso_response[0]
self.assertEqual(
iso_response.id,
self.iso_2.id,
"Check ISO ID"
)
self.assertEqual(
iso_response.ispublic,
self.services["ispublic"],
"Check ispublic permission of ISO"
)
self.assertEqual(
iso_response.isfeatured,
self.services["isfeatured"],
"Check isfeatured permission of ISO"
)
return
@attr(tags = ["advanced", "basic", "eip", "sg", "advancedns", "smoke", "multizone"])
def test_06_copy_iso(self):
"""Test for copy ISO from one zone to another"""
#Validate the following
#1. copy ISO should be successful and secondary storage
# should contain new copied ISO.
if len(self.zones) <= 1:
self.skipTest("Not enough zones available to perform copy template")
self.services["destzoneid"] = filter(lambda z: z.id != self.zone.id, self.zones)[0].id
self.debug("Copy ISO from %s to %s" % (
self.zone.id,
self.services["destzoneid"]
))
cmd = copyIso.copyIsoCmd()
cmd.id = self.iso_2.id
cmd.destzoneid = self.services["destzoneid"]
cmd.sourcezoneid = self.zone.id
self.apiclient.copyIso(cmd)
#Verify ISO is copied to another zone using ListIsos
list_iso_response = list_isos(
self.apiclient,
id=self.iso_2.id,
zoneid=self.services["destzoneid"]
)
self.assertEqual(
isinstance(list_iso_response, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_iso_response),
0,
"Check template extracted in List ISO"
)
iso_response = list_iso_response[0]
self.assertEqual(
iso_response.id,
self.iso_2.id,
"Check ID of the downloaded ISO"
)
self.assertEqual(
iso_response.zoneid,
self.services["destzoneid"],
"Check zone ID of the copied ISO"
)
self.debug("Cleanup copied ISO: %s" % iso_response.id)
# Cleanup- Delete the copied ISO
timeout = self.services["timeout"]
while True:
time.sleep(self.services["sleep"])
list_iso_response = list_isos(
self.apiclient,
id=self.iso_2.id,
zoneid=self.services["destzoneid"]
)
self.assertEqual(
isinstance(list_iso_response, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(list_iso_response),
0,
"Check template extracted in List ISO"
)
iso_response = list_iso_response[0]
if iso_response.isready == True:
break
if timeout == 0:
raise Exception(
"Failed to download copied iso(ID: %s)" % iso_response.id)
timeout = timeout - 1
cmd = deleteIso.deleteIsoCmd()
cmd.id = iso_response.id
cmd.zoneid = self.services["destzoneid"]
self.apiclient.deleteIso(cmd)
return
| {
"content_hash": "6ec4c76b2ed44a3f7e6355af6acdd412",
"timestamp": "",
"source": "github",
"line_count": 550,
"max_line_length": 94,
"avg_line_length": 37.97818181818182,
"alnum_prop": 0.46557832248180775,
"repo_name": "mufaddalq/cloudstack-datera-driver",
"id": "75289b8fbe3e1878a0d3a3439d58e28931a12fbe",
"size": "21675",
"binary": false,
"copies": "1",
"ref": "refs/heads/4.2",
"path": "test/integration/smoke/test_iso.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "250"
},
{
"name": "Batchfile",
"bytes": "6317"
},
{
"name": "CSS",
"bytes": "302008"
},
{
"name": "FreeMarker",
"bytes": "4917"
},
{
"name": "HTML",
"bytes": "38671"
},
{
"name": "Java",
"bytes": "79758943"
},
{
"name": "JavaScript",
"bytes": "4237188"
},
{
"name": "Perl",
"bytes": "1879"
},
{
"name": "Python",
"bytes": "5187499"
},
{
"name": "Shell",
"bytes": "803262"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, unicode_literals)
"""
==========
PyOrganism
==========
:Authors:
Moritz Emanuel Beber
:Date:
2012-05-22
:Copyright:
Copyright(c) 2012 Jacobs University of Bremen. All rights reserved.
:File:
organism.py
"""
__all__ = ["Organism"]
import logging
from builtins import str
from future.utils import python_2_unicode_compatible
from . import miscellaneous as misc
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(misc.NullHandler())
OPTIONS = misc.OptionsManager.get_instance()
@python_2_unicode_compatible
class Organism(object):
"""
A representation of a living organism with multiple layers of organisation.
As many layers of organisation as are available or desired may be included
in the `Organism` object.
Notes
-----
Examples
--------
"""
def __init__(self, name, **kw_args):
"""
Parameters
----------
name: str
The name of the organism. Should be unique but that's not a
requirement.
"""
super(Organism, self).__init__(**kw_args)
self.name = str(name)
self.genes = None
self.trn = None
self.gpn = None
self.go = None
self.couplons = None
self.metabolism = None
self.metabolic_network = None
self.activity = dict()
self.significant = dict()
def __str__(self):
return self.name
| {
"content_hash": "8d07d2effc90b6049e128dc234b799f2",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 79,
"avg_line_length": 19.31578947368421,
"alnum_prop": 0.6008174386920981,
"repo_name": "Midnighter/pyorganism",
"id": "1ee041a2e7de1b5c5122f1d81cdcce2bf0ee4704",
"size": "1494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyorganism/organism.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3259"
},
{
"name": "Python",
"bytes": "533487"
},
{
"name": "Shell",
"bytes": "3679"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import collections
import logging
import warnings
import netaddr
from django.conf import settings
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from keystoneclient import exceptions as keystone_exceptions
from neutronclient.common import exceptions as neutron_exc
from neutronclient.v2_0 import client as neutron_client
from cloud.utils import messages
from cloud.utils.memoized import memoized # noqa
from cloud.api import base
from cloud.api import network_base
from cloud.api import nova
#from cloud import policy
LOG = logging.getLogger(__name__)
IP_VERSION_DICT = {4: 'IPv4', 6: 'IPv6'}
OFF_STATE = 'OFF'
ON_STATE = 'ON'
class NeutronAPIDictWrapper(base.APIDictWrapper):
def set_id_as_name_if_empty(self, length=8):
try:
if not self._apidict['name']:
id = self._apidict['id']
if length:
id = id[:length]
self._apidict['name'] = '(%s)' % id
except KeyError:
pass
def items(self):
return self._apidict.items()
@property
def name_or_id(self):
return (self._apidict.get('name') or
'(%s)' % self._apidict['id'][:13])
class Agent(NeutronAPIDictWrapper):
"""Wrapper for neutron agents."""
def __init__(self, apiresource):
apiresource['admin_state'] = \
'UP' if apiresource['admin_state_up'] else 'DOWN'
super(Agent, self).__init__(apiresource)
class Network(NeutronAPIDictWrapper):
"""Wrapper for neutron Networks."""
def __init__(self, apiresource):
apiresource['admin_state'] = \
'UP' if apiresource['admin_state_up'] else 'DOWN'
# Django cannot handle a key name with a colon, so remap another key
for key in apiresource.keys():
if key.find(':'):
apiresource['__'.join(key.split(':'))] = apiresource[key]
super(Network, self).__init__(apiresource)
class Subnet(NeutronAPIDictWrapper):
"""Wrapper for neutron subnets."""
def __init__(self, apiresource):
apiresource['ipver_str'] = get_ipver_str(apiresource['ip_version'])
super(Subnet, self).__init__(apiresource)
class Port(NeutronAPIDictWrapper):
"""Wrapper for neutron ports."""
def __init__(self, apiresource):
apiresource['admin_state'] = \
'UP' if apiresource['admin_state_up'] else 'DOWN'
if 'mac_learning_enabled' in apiresource:
apiresource['mac_state'] = \
ON_STATE if apiresource['mac_learning_enabled'] else OFF_STATE
super(Port, self).__init__(apiresource)
class Profile(NeutronAPIDictWrapper):
"""Wrapper for neutron profiles."""
_attrs = ['profile_id', 'name', 'segment_type', 'segment_range',
'sub_type', 'multicast_ip_index', 'multicast_ip_range']
def __init__(self, apiresource):
super(Profile, self).__init__(apiresource)
class Router(NeutronAPIDictWrapper):
"""Wrapper for neutron routers."""
def __init__(self, apiresource):
apiresource['admin_state'] = \
'UP' if apiresource['admin_state_up'] else 'DOWN'
super(Router, self).__init__(apiresource)
class SecurityGroup(NeutronAPIDictWrapper):
# Required attributes: id, name, description, tenant_id, rules
def __init__(self, sg, sg_dict=None):
if sg_dict is None:
sg_dict = {sg['id']: sg['name']}
sg['rules'] = [SecurityGroupRule(rule, sg_dict)
for rule in sg['security_group_rules']]
super(SecurityGroup, self).__init__(sg)
class SecurityGroupRule(NeutronAPIDictWrapper):
# Required attributes:
# id, parent_group_id
# ip_protocol, from_port, to_port, ip_range, group
# ethertype, direction (Neutron specific)
def _get_secgroup_name(self, sg_id, sg_dict):
if sg_id:
if sg_dict is None:
sg_dict = {}
# If sg name not found in sg_dict,
# first two parts of UUID is used as sg name.
return sg_dict.get(sg_id, sg_id[:13])
else:
return u''
def __init__(self, sgr, sg_dict=None):
# In Neutron, if both remote_ip_prefix and remote_group_id are None,
# it means all remote IP range is allowed, i.e., 0.0.0.0/0 or ::/0.
if not sgr['remote_ip_prefix'] and not sgr['remote_group_id']:
if sgr['ethertype'] == 'IPv6':
sgr['remote_ip_prefix'] = '::/0'
else:
sgr['remote_ip_prefix'] = '0.0.0.0/0'
rule = {
'id': sgr['id'],
'parent_group_id': sgr['security_group_id'],
'direction': sgr['direction'],
'ethertype': sgr['ethertype'],
'ip_protocol': sgr['protocol'],
'from_port': sgr['port_range_min'],
'to_port': sgr['port_range_max'],
}
cidr = sgr['remote_ip_prefix']
rule['ip_range'] = {'cidr': cidr} if cidr else {}
group = self._get_secgroup_name(sgr['remote_group_id'], sg_dict)
rule['group'] = {'name': group} if group else {}
super(SecurityGroupRule, self).__init__(rule)
def __unicode__(self):
if 'name' in self.group:
remote = self.group['name']
elif 'cidr' in self.ip_range:
remote = self.ip_range['cidr']
else:
remote = 'ANY'
direction = 'to' if self.direction == 'egress' else 'from'
if self.from_port:
if self.from_port == self.to_port:
proto_port = ("%s/%s" %
(self.from_port, self.ip_protocol.lower()))
else:
proto_port = ("%s-%s/%s" %
(self.from_port, self.to_port,
self.ip_protocol.lower()))
elif self.ip_protocol:
try:
ip_proto = int(self.ip_protocol)
proto_port = "ip_proto=%d" % ip_proto
except Exception:
# well-defined IP protocol name like TCP, UDP, ICMP.
proto_port = self.ip_protocol
else:
proto_port = ''
return (_('ALLOW %(ethertype)s %(proto_port)s '
'%(direction)s %(remote)s') %
{'ethertype': self.ethertype,
'proto_port': proto_port,
'remote': remote,
'direction': direction})
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'neutron'
def __init__(self, request):
self.request = request
self.client = neutronclient(request)
def _list(self, **filters):
secgroups = self.client.list_security_groups(**filters)
return [SecurityGroup(sg) for sg in secgroups.get('security_groups')]
def list(self):
tenant_id = self.request.get("tenant_uuid")#self.request.user.tenant_id
return self._list(tenant_id=tenant_id)
def _sg_name_dict(self, sg_id, rules):
"""Create a mapping dict from secgroup id to its name."""
related_ids = set([sg_id])
related_ids |= set(filter(None, [r['remote_group_id'] for r in rules]))
related_sgs = self.client.list_security_groups(id=related_ids,
fields=['id', 'name'])
related_sgs = related_sgs.get('security_groups')
return dict((sg['id'], sg['name']) for sg in related_sgs)
def get(self, sg_id):
secgroup = self.client.show_security_group(sg_id).get('security_group')
sg_dict = self._sg_name_dict(sg_id, secgroup['security_group_rules'])
return SecurityGroup(secgroup, sg_dict)
def create(self, name, desc):
body = {'security_group': {'name': name,
'description': desc}}
secgroup = self.client.create_security_group(body)
return SecurityGroup(secgroup.get('security_group'))
def update(self, sg_id, name, desc):
body = {'security_group': {'name': name,
'description': desc}}
secgroup = self.client.update_security_group(sg_id, body)
return SecurityGroup(secgroup.get('security_group'))
def delete(self, sg_id):
self.client.delete_security_group(sg_id)
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None):
if not cidr:
cidr = None
if from_port < 0:
from_port = None
if to_port < 0:
to_port = None
if isinstance(ip_protocol, int) and ip_protocol < 0:
ip_protocol = None
body = {'security_group_rule':
{'security_group_id': parent_group_id,
'direction': direction,
'ethertype': ethertype,
'protocol': ip_protocol,
'port_range_min': from_port,
'port_range_max': to_port,
'remote_ip_prefix': cidr,
'remote_group_id': group_id}}
rule = self.client.create_security_group_rule(body)
rule = rule.get('security_group_rule')
sg_dict = self._sg_name_dict(parent_group_id, [rule])
return SecurityGroupRule(rule, sg_dict)
def rule_delete(self, sgr_id):
self.client.delete_security_group_rule(sgr_id)
def list_by_instance(self, instance_id):
"""Gets security groups of an instance."""
ports = port_list(self.request, device_id=instance_id)
sg_ids = []
for p in ports:
sg_ids += p.security_groups
return self._list(id=set(sg_ids)) if sg_ids else []
def update_instance_security_group(self, instance_id,
new_security_group_ids):
ports = port_list(self.request, device_id=instance_id)
for p in ports:
params = {'security_groups': new_security_group_ids}
port_update(self.request, p.id, **params)
class FloatingIp(base.APIDictWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id',
'instance_type', 'pool']
def __init__(self, fip):
fip['ip'] = fip['floating_ip_address']
fip['fixed_ip'] = fip['fixed_ip_address']
fip['pool'] = fip['floating_network_id']
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
pass
class FloatingIpTarget(base.APIDictWrapper):
pass
class FloatingIpManager(network_base.FloatingIpManager):
device_owner_map = {
'compute:': 'compute',
'neutron:LOADBALANCER': 'loadbalancer',
}
def __init__(self, request):
self.request = request
self.client = neutronclient(request)
def list_pools(self):
search_opts = {'router:external': True}
return [FloatingIpPool(pool) for pool
in self.client.list_networks(**search_opts).get('networks')]
def _get_instance_type_from_device_owner(self, device_owner):
for key, value in self.device_owner_map.items():
if device_owner.startswith(key):
return value
return device_owner
def _set_instance_info(self, fip, port=None):
if fip['port_id']:
if not port:
port = port_get(self.request, fip['port_id'])
fip['instance_id'] = port.device_id
fip['instance_type'] = self._get_instance_type_from_device_owner(
port.device_owner)
else:
fip['instance_id'] = None
fip['instance_type'] = None
def list(self, all_tenants=False, **search_opts):
if not all_tenants:
tenant_id = self.request.user.tenant_id
# In Neutron, list_floatingips returns Floating IPs from
# all tenants when the API is called with admin role, so
# we need to filter them with tenant_id.
search_opts['tenant_id'] = tenant_id
port_search_opts = {'tenant_id': tenant_id}
else:
port_search_opts = {}
fips = self.client.list_floatingips(**search_opts)
fips = fips.get('floatingips')
# Get port list to add instance_id to floating IP list
# instance_id is stored in device_id attribute
ports = port_list(self.request, **port_search_opts)
port_dict = SortedDict([(p['id'], p) for p in ports])
for fip in fips:
self._set_instance_info(fip, port_dict.get(fip['port_id']))
return [FloatingIp(fip) for fip in fips]
def get(self, floating_ip_id):
fip = self.client.show_floatingip(floating_ip_id).get('floatingip')
self._set_instance_info(fip)
return FloatingIp(fip)
def allocate(self, pool):
body = {'floatingip': {'floating_network_id': pool}}
fip = self.client.create_floatingip(body).get('floatingip')
self._set_instance_info(fip)
return FloatingIp(fip)
def release(self, floating_ip_id):
self.client.delete_floatingip(floating_ip_id)
def associate(self, floating_ip_id, port_id):
# NOTE: In Neutron Horizon floating IP support, port_id is
# "<port_id>_<ip_address>" format to identify multiple ports.
pid, ip_address = port_id.split('_', 1)
update_dict = {'port_id': pid,
'fixed_ip_address': ip_address}
self.client.update_floatingip(floating_ip_id,
{'floatingip': update_dict})
def disassociate(self, floating_ip_id, port_id):
update_dict = {'port_id': None}
self.client.update_floatingip(floating_ip_id,
{'floatingip': update_dict})
def _get_reachable_subnets(self, ports):
# Retrieve subnet list reachable from external network
ext_net_ids = [ext_net.id for ext_net in self.list_pools()]
gw_routers = [r.id for r in router_list(self.request)
if (r.external_gateway_info and
r.external_gateway_info.get('network_id')
in ext_net_ids)]
reachable_subnets = set([p.fixed_ips[0]['subnet_id'] for p in ports
if ((p.device_owner ==
'network:router_interface')
and (p.device_id in gw_routers))])
# we have to include any shared subnets as well because we may not
# have permission to see the router interface to infer connectivity
shared = set([s.id for n in network_list(self.request, shared=True)
for s in n.subnets])
return reachable_subnets | shared
def list_targets(self):
tenant_id = self.request.user.tenant_id
ports = port_list(self.request, tenant_id=tenant_id)
servers, has_more = nova.server_list(self.request)
server_dict = SortedDict([(s.id, s.name) for s in servers])
reachable_subnets = self._get_reachable_subnets(ports)
targets = []
for p in ports:
# Remove network ports from Floating IP targets
if p.device_owner.startswith('network:'):
continue
port_id = p.id
server_name = server_dict.get(p.device_id)
for ip in p.fixed_ips:
if ip['subnet_id'] not in reachable_subnets:
continue
target = {'name': '%s: %s' % (server_name, ip['ip_address']),
'id': '%s_%s' % (port_id, ip['ip_address']),
'instance_id': p.device_id}
targets.append(FloatingIpTarget(target))
return targets
def _target_ports_by_instance(self, instance_id):
if not instance_id:
return None
search_opts = {'device_id': instance_id}
return port_list(self.request, **search_opts)
def get_target_id_by_instance(self, instance_id, target_list=None):
if target_list is not None:
targets = [target for target in target_list
if target['instance_id'] == instance_id]
if not targets:
return None
return targets[0]['id']
else:
# In Neutron one port can have multiple ip addresses, so this
# method picks up the first one and generate target id.
ports = self._target_ports_by_instance(instance_id)
if not ports:
return None
return '{0}_{1}'.format(ports[0].id,
ports[0].fixed_ips[0]['ip_address'])
def list_target_id_by_instance(self, instance_id, target_list=None):
if target_list is not None:
return [target['id'] for target in target_list
if target['instance_id'] == instance_id]
else:
ports = self._target_ports_by_instance(instance_id)
return ['{0}_{1}'.format(p.id, p.fixed_ips[0]['ip_address'])
for p in ports]
def is_simple_associate_supported(self):
# NOTE: There are two reason that simple association support
# needs more considerations. (1) Neutron does not support the
# default floating IP pool at the moment. It can be avoided
# in case where only one floating IP pool exists.
# (2) Neutron floating IP is associated with each VIF and
# we need to check whether such VIF is only one for an instance
# to enable simple association support.
return False
def is_supported(self):
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
return network_config.get('enable_router', True)
def get_ipver_str(ip_version):
"""Convert an ip version number to a human-friendly string."""
return IP_VERSION_DICT.get(ip_version, '')
#@memoized
def neutronclient(request):
"""
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
LOG.debug('neutronclient connection created using token "%s" and url "%s"'
% (request.user.token.id, base.url_for(request, 'network')))
LOG.debug('user_id=%(user)s, tenant_id=%(tenant)s' %
{'user': request.user.id, 'tenant': request.user.tenant_id})
c = neutron_client.Client(token=request.user.token.id,
auth_url=base.url_for(request, 'identity'),
endpoint_url=base.url_for(request, 'network'),
insecure=insecure, ca_cert=cacert)
"""
c = neutron_client.Client(username=request.get("username"),
password=request.get("password"),
tenant_name=request.get("tenant_name"),
auth_url=request.get("auth_url"))
return c
def list_resources_with_long_filters(list_method,
filter_attr, filter_values, **params):
"""List neutron resources with handling RequestURITooLong exception.
If filter parameters are long, list resources API request leads to
414 error (URL is too long). For such case, this method split
list parameters specified by a list_field argument into chunks
and call the specified list_method repeatedly.
:param list_method: Method used to retrieve resource list.
:param filter_attr: attribute name to be filtered. The value corresponding
to this attribute is specified by "filter_values".
If you want to specify more attributes for a filter condition,
pass them as keyword arguments like "attr2=values2".
:param filter_values: values of "filter_attr" to be filtered.
If filter_values are too long and the total URI lenght exceed the
maximum lenght supported by the neutron server, filter_values will
be split into sub lists if filter_values is a list.
:param params: parameters to pass a specified listing API call
without any changes. You can specify more filter conditions
in addition to a pair of filter_attr and filter_values.
"""
try:
params[filter_attr] = filter_values
return list_method(**params)
except neutron_exc.RequestURITooLong as uri_len_exc:
# The URI is too long because of too many filter values.
# Use the excess attribute of the exception to know how many
# filter values can be inserted into a single request.
# We consider only the filter condition from (filter_attr,
# filter_values) and do not consider other filter conditions
# which may be specified in **params.
if type(filter_values) != list:
filter_values = [filter_values]
# Length of each query filter is:
# <key>=<value>& (e.g., id=<uuid>)
# The length will be key_len + value_maxlen + 2
all_filter_len = sum(len(filter_attr) + len(val) + 2
for val in filter_values)
allowed_filter_len = all_filter_len - uri_len_exc.excess
val_maxlen = max(len(val) for val in filter_values)
filter_maxlen = len(filter_attr) + val_maxlen + 2
chunk_size = allowed_filter_len / filter_maxlen
resources = []
for i in range(0, len(filter_values), chunk_size):
params[filter_attr] = filter_values[i:i + chunk_size]
resources.extend(list_method(**params))
return resources
def network_list(request, **params):
LOG.debug("network_list(): params=%s", params)
networks = neutronclient(request).list_networks(**params).get('networks')
# Get subnet list to expand subnet info in network list.
subnets = subnet_list(request)
subnet_dict = dict([(s['id'], s) for s in subnets])
# Expand subnet list from subnet_id to values.
for n in networks:
# Due to potential timing issues, we can't assume the subnet_dict data
# is in sync with the network data.
n['subnets'] = [subnet_dict[s] for s in n.get('subnets', []) if
s in subnet_dict]
return [Network(n) for n in networks]
def network_list_for_tenant(request, tenant_id, **params):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If requested_networks specified, it searches requested_networks only.
"""
LOG.debug("network_list_for_tenant(): tenant_id=%s, params=%s"
% (tenant_id, params))
# If a user has admin role, network list returned by Neutron API
# contains networks that do not belong to that tenant.
# So we need to specify tenant_id when calling network_list().
networks = network_list(request, tenant_id=tenant_id,
shared=False, **params)
# In the current Neutron API, there is no way to retrieve
# both owner networks and public networks in a single API call.
networks += network_list(request, shared=True, **params)
return networks
def network_get(request, network_id, expand_subnet=True, **params):
LOG.debug("network_get(): netid=%s, params=%s" % (network_id, params))
network = neutronclient(request).show_network(network_id,
**params).get('network')
# Since the number of subnets per network must be small,
# call subnet_get() for each subnet instead of calling
# subnet_list() once.
if expand_subnet:
network['subnets'] = [subnet_get(request, sid)
for sid in network['subnets']]
return Network(network)
def network_create(request, **kwargs):
"""Create a subnet on a specified network.
:param request: request context
:param tenant_id: (optional) tenant id of the network created
:param name: (optional) name of the network created
:returns: Subnet object
"""
LOG.debug("network_create(): kwargs = %s" % kwargs)
# In the case network profiles are being used, profile id is needed.
if 'net_profile_id' in kwargs:
kwargs['n1kv:profile_id'] = kwargs.pop('net_profile_id')
body = {'network': kwargs}
network = neutronclient(request).create_network(body=body).get('network')
return Network(network)
def network_update(request, network_id, **kwargs):
LOG.debug("network_update(): netid=%s, params=%s" % (network_id, kwargs))
body = {'network': kwargs}
network = neutronclient(request).update_network(network_id,
body=body).get('network')
return Network(network)
def network_delete(request, network_id):
LOG.debug("network_delete(): netid=%s" % network_id)
neutronclient(request).delete_network(network_id)
def subnet_list(request, **params):
LOG.debug("subnet_list(): params=%s" % (params))
subnets = neutronclient(request).list_subnets(**params).get('subnets')
return [Subnet(s) for s in subnets]
def subnet_get(request, subnet_id, **params):
LOG.debug("subnet_get(): subnetid=%s, params=%s" % (subnet_id, params))
subnet = neutronclient(request).show_subnet(subnet_id,
**params).get('subnet')
return Subnet(subnet)
def subnet_create(request, network_id, cidr, ip_version, **kwargs):
"""Create a subnet on a specified network.
:param request: request context
:param network_id: network id a subnet is created on
:param cidr: subnet IP address range
:param ip_version: IP version (4 or 6)
:param gateway_ip: (optional) IP address of gateway
:param tenant_id: (optional) tenant id of the subnet created
:param name: (optional) name of the subnet created
:returns: Subnet object
"""
LOG.debug("subnet_create(): netid=%s, cidr=%s, ipver=%d, kwargs=%s"
% (network_id, cidr, ip_version, kwargs))
body = {'subnet':
{'network_id': network_id,
'ip_version': ip_version,
'cidr': cidr}}
body['subnet'].update(kwargs)
subnet = neutronclient(request).create_subnet(body=body).get('subnet')
return Subnet(subnet)
def subnet_update(request, subnet_id, **kwargs):
LOG.debug("subnet_update(): subnetid=%s, kwargs=%s" % (subnet_id, kwargs))
body = {'subnet': kwargs}
subnet = neutronclient(request).update_subnet(subnet_id,
body=body).get('subnet')
return Subnet(subnet)
def subnet_delete(request, subnet_id):
LOG.debug("subnet_delete(): subnetid=%s" % subnet_id)
neutronclient(request).delete_subnet(subnet_id)
def port_list(request, **params):
LOG.debug("port_list(): params=%s" % (params))
ports = neutronclient(request).list_ports(**params).get('ports')
return [Port(p) for p in ports]
def port_get(request, port_id, **params):
LOG.debug("port_get(): portid=%s, params=%s" % (port_id, params))
port = neutronclient(request).show_port(port_id, **params).get('port')
return Port(port)
def port_create(request, network_id, **kwargs):
"""Create a port on a specified network.
:param request: request context
:param network_id: network id a subnet is created on
:param device_id: (optional) device id attached to the port
:param tenant_id: (optional) tenant id of the port created
:param name: (optional) name of the port created
:returns: Port object
"""
LOG.debug("port_create(): netid=%s, kwargs=%s" % (network_id, kwargs))
# In the case policy profiles are being used, profile id is needed.
if 'policy_profile_id' in kwargs:
kwargs['n1kv:profile_id'] = kwargs.pop('policy_profile_id')
body = {'port': {'network_id': network_id}}
body['port'].update(kwargs)
port = neutronclient(request).create_port(body=body).get('port')
return Port(port)
def port_delete(request, port_id):
LOG.debug("port_delete(): portid=%s" % port_id)
neutronclient(request).delete_port(port_id)
def port_update(request, port_id, **kwargs):
LOG.debug("port_update(): portid=%s, kwargs=%s" % (port_id, kwargs))
body = {'port': kwargs}
port = neutronclient(request).update_port(port_id, body=body).get('port')
return Port(port)
def profile_list(request, type_p, **params):
LOG.debug("profile_list(): "
"profile_type=%(profile_type)s, params=%(params)s",
{'profile_type': type_p, 'params': params})
if type_p == 'network':
profiles = neutronclient(request).list_network_profiles(
**params).get('network_profiles')
elif type_p == 'policy':
profiles = neutronclient(request).list_policy_profiles(
**params).get('policy_profiles')
return [Profile(n) for n in profiles]
def profile_get(request, profile_id, **params):
LOG.debug("profile_get(): "
"profileid=%(profileid)s, params=%(params)s",
{'profileid': profile_id, 'params': params})
profile = neutronclient(request).show_network_profile(
profile_id, **params).get('network_profile')
return Profile(profile)
def profile_create(request, **kwargs):
LOG.debug("profile_create(): kwargs=%s", kwargs)
body = {'network_profile': {}}
body['network_profile'].update(kwargs)
profile = neutronclient(request).create_network_profile(
body=body).get('network_profile')
return Profile(profile)
def profile_delete(request, profile_id):
LOG.debug("profile_delete(): profile_id=%s", profile_id)
neutronclient(request).delete_network_profile(profile_id)
def profile_update(request, profile_id, **kwargs):
LOG.debug("profile_update(): "
"profileid=%(profileid)s, kwargs=%(kwargs)s",
{'profileid': profile_id, 'kwargs': kwargs})
body = {'network_profile': kwargs}
profile = neutronclient(request).update_network_profile(
profile_id, body=body).get('network_profile')
return Profile(profile)
def profile_bindings_list(request, type_p, **params):
LOG.debug("profile_bindings_list(): "
"profile_type=%(profile_type)s params=%(params)s",
{'profile_type': type_p, 'params': params})
if type_p == 'network':
bindings = neutronclient(request).list_network_profile_bindings(
**params).get('network_profile_bindings')
elif type_p == 'policy':
bindings = neutronclient(request).list_policy_profile_bindings(
**params).get('policy_profile_bindings')
return [Profile(n) for n in bindings]
def router_create(request, **kwargs):
LOG.debug("router_create():, kwargs=%s" % kwargs)
body = {'router': {}}
body['router'].update(kwargs)
router = neutronclient(request).create_router(body=body).get('router')
return Router(router)
def router_update(request, r_id, **kwargs):
LOG.debug("router_update(): router_id=%s, kwargs=%s" % (r_id, kwargs))
body = {'router': {}}
body['router'].update(kwargs)
router = neutronclient(request).update_router(r_id, body=body)
return Router(router['router'])
def router_get(request, router_id, **params):
router = neutronclient(request).show_router(router_id,
**params).get('router')
return Router(router)
def router_list(request, **params):
routers = neutronclient(request).list_routers(**params).get('routers')
return [Router(r) for r in routers]
def router_delete(request, router_id):
neutronclient(request).delete_router(router_id)
def router_add_interface(request, router_id, subnet_id=None, port_id=None):
body = {}
if subnet_id:
body['subnet_id'] = subnet_id
if port_id:
body['port_id'] = port_id
client = neutronclient(request)
return client.add_interface_router(router_id, body)
def router_remove_interface(request, router_id, subnet_id=None, port_id=None):
body = {}
if subnet_id:
body['subnet_id'] = subnet_id
if port_id:
body['port_id'] = port_id
neutronclient(request).remove_interface_router(router_id, body)
def router_add_gateway(request, router_id, network_id):
body = {'network_id': network_id}
neutronclient(request).add_gateway_router(router_id, body)
def router_remove_gateway(request, router_id):
neutronclient(request).remove_gateway_router(router_id)
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(neutronclient(request).show_quota(tenant_id)['quota'])
def tenant_quota_update(request, tenant_id, **kwargs):
quotas = {'quota': kwargs}
return neutronclient(request).update_quota(tenant_id, quotas)
def agent_list(request, **params):
agents = neutronclient(request).list_agents(**params)
return [Agent(a) for a in agents['agents']]
def list_dhcp_agent_hosting_networks(request, network, **params):
agents = neutronclient(request).list_dhcp_agent_hosting_networks(network,
**params)
return [Agent(a) for a in agents['agents']]
def add_network_to_dhcp_agent(request, dhcp_agent, network_id):
body = {'network_id': network_id}
return neutronclient(request).add_network_to_dhcp_agent(dhcp_agent, body)
def remove_network_from_dhcp_agent(request, dhcp_agent, network_id):
return neutronclient(request).remove_network_from_dhcp_agent(dhcp_agent,
network_id)
def provider_list(request):
providers = neutronclient(request).list_service_providers()
return providers['service_providers']
def servers_update_addresses(request, servers, all_tenants=False):
"""Retrieve servers networking information from Neutron if enabled.
Should be used when up to date networking information is required,
and Nova's networking info caching mechanism is not fast enough.
"""
# Get all (filtered for relevant servers) information from Neutron
try:
ports = list_resources_with_long_filters(
port_list, 'device_id', [instance.id for instance in servers],
request=request)
fips = FloatingIpManager(request)
if fips.is_supported():
floating_ips = list_resources_with_long_filters(
fips.list, 'port_id', [port.id for port in ports],
all_tenants=all_tenants)
else:
floating_ips = []
networks = list_resources_with_long_filters(
network_list, 'id', set([port.network_id for port in ports]),
request=request)
except Exception:
error_message = _('Unable to connect to Neutron.')
LOG.error(error_message)
messages.error(request, error_message)
return
# Map instance to its ports
instances_ports = collections.defaultdict(list)
for port in ports:
instances_ports[port.device_id].append(port)
# Map port to its floating ips
ports_floating_ips = collections.defaultdict(list)
for fip in floating_ips:
ports_floating_ips[fip.port_id].append(fip)
# Map network id to its name
network_names = dict(((network.id, network.name) for network in networks))
for server in servers:
try:
addresses = _server_get_addresses(
request,
server,
instances_ports,
ports_floating_ips,
network_names)
except Exception as e:
LOG.error(e)
else:
server.addresses = addresses
def _server_get_addresses(request, server, ports, floating_ips, network_names):
def _format_address(mac, ip, type):
try:
version = netaddr.IPAddress(ip).version
except Exception as e:
error_message = _('Unable to parse IP address %s.') % ip
LOG.error(error_message)
messages.error(request, error_message)
raise e
return {u'OS-EXT-IPS-MAC:mac_addr': mac,
u'version': version,
u'addr': ip,
u'OS-EXT-IPS:type': type}
addresses = collections.defaultdict(list)
instance_ports = ports.get(server.id, [])
for port in instance_ports:
network_name = network_names.get(port.network_id)
if network_name is not None:
for fixed_ip in port.fixed_ips:
addresses[network_name].append(
_format_address(port.mac_address,
fixed_ip['ip_address'],
u'fixed'))
port_fips = floating_ips.get(port.id, [])
for fip in port_fips:
addresses[network_name].append(
_format_address(port.mac_address,
fip.floating_ip_address,
u'floating'))
return dict(addresses)
#@memoized
def list_extensions(request):
extensions_list = neutronclient(request).list_extensions()
if 'extensions' in extensions_list:
return extensions_list['extensions']
else:
return {}
#@memoized
def is_extension_supported(request, extension_alias):
extensions = list_extensions(request)
for extension in extensions:
if extension['alias'] == extension_alias:
return True
else:
return False
def is_enabled_by_config(name, default=True):
if hasattr(settings, 'OPENSTACK_QUANTUM_NETWORK'):
warnings.warn(
'OPENSTACK_QUANTUM_NETWORK setting is deprecated and will be '
'removed in the near future. '
'Please use OPENSTACK_NEUTRON_NETWORK instead.',
DeprecationWarning)
network_config = (getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {}) or
getattr(settings, 'OPENSTACK_QUANTUM_NETWORK', {}))
return network_config.get(name, default)
@memoized
def is_service_enabled(request, config_name, ext_name):
return (is_enabled_by_config(config_name) and
is_extension_supported(request, ext_name))
@memoized
def is_quotas_extension_supported(request):
if (is_enabled_by_config('enable_quotas', False) and
is_extension_supported(request, 'quotas')):
return True
else:
return False
# Using this mechanism till a better plugin/sub-plugin detection
# mechanism is available.
# When using specific plugins the profile_support can be
# turned on if needed to configure and/or use profiles.
# Since this is a temporary mechanism used to detect profile_support
# @memorize is not being used.
# TODO(absubram): Change this config variable check with
# subplugin/plugin detection API when it becomes available.
def is_port_profiles_supported():
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
# Can be used to check for vendor specific plugin
profile_support = network_config.get('profile_support', None)
if str(profile_support).lower() == 'cisco':
return True
# FEATURE_MAP is used to define:
# - related neutron extension name (key: "extension")
# - corresponding dashboard config (key: "config")
# - RBAC policies (key: "poclies")
# If a key is not contained, the corresponding permission check is skipped.
FEATURE_MAP = {
'dvr': {
'extension': 'dvr',
'config': {
'name': 'enable_distributed_router',
'default': False,
},
'policies': {
'get': 'get_router:distributed',
'create': 'create_router:distributed',
'update': 'update_router:distributed',
}
},
'l3-ha': {
'extension': 'l3-ha',
'config': {'name': 'enable_ha_router',
'default': False},
'policies': {
'get': 'get_router:ha',
'create': 'create_router:ha',
'update': 'update_router:ha',
}
},
}
def get_feature_permission(request, feature, operation=None):
"""Check if a feature-specific field can be displayed.
This method check a permission for a feature-specific field.
Such field is usually provided through Neutron extension.
:param request: Request Object
:param feature: feature name defined in FEATURE_MAP
:param operation (optional): Operation type. The valid value should be
defined in FEATURE_MAP[feature]['policies']
It must be specified if FEATURE_MAP[feature] has 'policies'.
"""
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
feature_info = FEATURE_MAP.get(feature)
if not feature_info:
# Translators: Only used inside Horizon code and invisible to users
raise ValueError(_("The requested feature '%(feature)s' is unknown. "
"Please make sure to specify a feature defined "
"in FEATURE_MAP."))
# Check dashboard settings
feature_config = feature_info.get('config')
if feature_config:
if not network_config.get(feature_config['name'],
feature_config['default']):
return False
# Check policy
feature_policies = feature_info.get('policies')
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if feature_policies and policy_check:
policy_name = feature_policies.get(operation)
if not policy_name:
# Translators: Only used inside Horizon code and invisible to users
raise ValueError(_("The 'operation' parameter for "
"get_feature_permission '%(feature)s' "
"is invalid. It should be one of %(allowed)s")
% {'feature': feature,
'allowed': ' '.join(feature_policies.keys())})
role = (('network', policy_name),)
if not policy.check(role, request):
return False
# Check if a required extension is enabled
feature_extension = feature_info.get('extension')
if feature_extension:
try:
return is_extension_supported(request, feature_extension)
except Exception:
msg = (_("Failed to check Neutron '%s' extension is not supported")
% feature_extension)
LOG.info(msg)
return False
# If all checks are passed, now a given feature is allowed.
return True
def is_neutron_enabled(request):
result = True
try:
nets = network_list(request)
except keystone_exceptions.EndpointNotFound as e:
result = False
except Exception as e:
pass
return result
| {
"content_hash": "59ffb99758ce4ec4f2307b41e70bc206",
"timestamp": "",
"source": "github",
"line_count": 1132,
"max_line_length": 79,
"avg_line_length": 37.91166077738516,
"alnum_prop": 0.6013840991704725,
"repo_name": "zhanghui9700/eonboard",
"id": "5c3143cc0e0de44e7131696103a6766e52fc665f",
"size": "43720",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "eoncloud_web/cloud/api/neutron.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1099594"
},
{
"name": "HTML",
"bytes": "400059"
},
{
"name": "JavaScript",
"bytes": "523547"
},
{
"name": "Python",
"bytes": "649118"
}
],
"symlink_target": ""
} |
import os
import sys
import time
from datetime import datetime
import traceback
import threading
import json
import shutil
import types
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
class Logger():
def __init__(self, buffer_size=0, file_name=None, roll_num=1):
self.buffer_lock = threading.Lock()
self.buffer = {} # id => line
self.buffer_size = buffer_size
self.last_no = 0
self.min_level = NOTSET
self.log_fd = None
self.set_color()
if file_name:
self.roll_num = roll_num
self.log_to_file(file_name)
def setLevel(self, level):
if level == "DEBUG":
self.min_level = DEBUG
elif level == "INFO":
self.min_level = INFO
elif level == "WARN":
self.min_level = WARN
elif level == "ERROR":
self.min_level = ERROR
elif level == "FATAL":
self.min_level = FATAL
else:
print("log level not support:%s", level)
def set_color(self):
self.err_color = None
self.warn_color = None
self.debug_color = None
self.reset_color = None
self.set_console_color = lambda x: None
if hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
if os.name == 'nt':
self.err_color = 0x04
self.warn_color = 0x06
self.debug_color = 0x002
self.reset_color = 0x07
import ctypes
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
GetStdHandle = ctypes.windll.kernel32.GetStdHandle
self.set_console_color = lambda color: SetConsoleTextAttribute(GetStdHandle(-11), color)
elif os.name == 'posix':
self.err_color = '\033[31m'
self.warn_color = '\033[33m'
self.debug_color = '\033[32m'
self.reset_color = '\033[0m'
self.set_console_color = lambda color: sys.stderr.write(color)
def log_to_file(self, file_name):
self.log_filename = file_name
if os.path.isfile(file_name) and os.path.getsize(file_name) > 1024 * 1024:
self.roll_log()
self.log_fd = open(file_name, "w")
def roll_log(self):
for i in range(self.roll_num, 1, -1):
new_name = "%s.%d" % (self.log_filename, i)
old_name = "%s.%d" % (self.log_filename, i - 1)
if not os.path.isfile(old_name):
continue
self.info("roll_log %s -> %s", old_name, new_name)
shutil.move(old_name, new_name)
shutil.move(self.log_filename, self.log_filename + ".0")
def log(self, level, console_color, html_color, fmt, *args, **kwargs):
now = datetime.now()
time_str = now.strftime("%b %d %H:%M:%S.%f")[:19]
string = '%s - [%s] %s\n' % (time_str, level, fmt % args)
self.buffer_lock.acquire()
try:
self.set_console_color(console_color)
sys.stderr.write(string)
self.set_console_color(self.reset_color)
if self.log_fd:
self.log_fd.write(string)
try:
self.log_fd.flush()
except:
pass
if self.buffer_size:
self.last_no += 1
self.buffer[self.last_no] = string
buffer_len = len(self.buffer)
if buffer_len > self.buffer_size:
del self.buffer[self.last_no - self.buffer_size]
except Exception as e:
string = '%s - [%s]LOG_EXCEPT: %s, Except:%s<br>' % (time.ctime()[4:-5], level, fmt % args, e)
self.last_no += 1
self.buffer[self.last_no] = string
buffer_len = len(self.buffer)
if buffer_len > self.buffer_size:
del self.buffer[self.last_no - self.buffer_size]
finally:
self.buffer_lock.release()
def debug(self, fmt, *args, **kwargs):
if self.min_level > DEBUG:
return
self.log('DEBUG', self.debug_color, '21610b', fmt, *args, **kwargs)
def info(self, fmt, *args, **kwargs):
if self.min_level > INFO:
return
self.log('INFO', self.reset_color, '000000', fmt, *args)
def warning(self, fmt, *args, **kwargs):
if self.min_level > WARN:
return
self.log('WARNING', self.warn_color, 'FF8000', fmt, *args, **kwargs)
def warn(self, fmt, *args, **kwargs):
self.warning(fmt, *args, **kwargs)
def error(self, fmt, *args, **kwargs):
if self.min_level > ERROR:
return
self.log('ERROR', self.err_color, 'FE2E2E', fmt, *args, **kwargs)
def exception(self, fmt, *args, **kwargs):
self.error(fmt, *args, **kwargs)
self.error("Except stack:%s", traceback.format_exc(), **kwargs)
def critical(self, fmt, *args, **kwargs):
if self.min_level > CRITICAL:
return
self.log('CRITICAL', self.err_color, 'D7DF01', fmt, *args, **kwargs)
#=================================================================
def set_buffer_size(self, set_size):
self.buffer_lock.acquire()
self.buffer_size = set_size
buffer_len = len(buffer)
if buffer_len > self.buffer_size:
for i in range(self.last_no - buffer_len, self.last_no - self.buffer_size):
try:
del self.buffer[i]
except:
pass
self.buffer_lock.release()
def get_last_lines(self, max_lines):
self.buffer_lock.acquire()
buffer_len = len(self.buffer)
if buffer_len > max_lines:
first_no = self.last_no - max_lines
else:
first_no = self.last_no - buffer_len + 1
jd = {}
if buffer_len > 0:
for i in range(first_no, self.last_no + 1):
jd[i] = self.unicode_line(self.buffer[i])
self.buffer_lock.release()
return json.dumps(jd)
def get_new_lines(self, from_no):
self.buffer_lock.acquire()
jd = {}
first_no = self.last_no - len(self.buffer) + 1
if from_no < first_no:
from_no = first_no
if self.last_no >= from_no:
for i in range(from_no, self.last_no + 1):
jd[i] = self.unicode_line(self.buffer[i])
self.buffer_lock.release()
return json.dumps(jd)
def unicode_line(self, line):
try:
if type(line) is types.UnicodeType:
return line
else:
return unicode(line, errors='ignore')
except Exception as e:
print("unicode err:%r" % e)
print("line can't decode:%s" % line)
print("Except stack:%s" % traceback.format_exc())
return ""
| {
"content_hash": "9c238ba3708e8d55f245041935d97c79",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 106,
"avg_line_length": 33.70952380952381,
"alnum_prop": 0.5187173329566322,
"repo_name": "hexlism/xx_net",
"id": "56602199eb923c4477ad77104792ea1a7e6ee834",
"size": "7080",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python27/1.0/lib/noarch/xlog.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "132"
},
{
"name": "CSS",
"bytes": "84687"
},
{
"name": "HTML",
"bytes": "106131"
},
{
"name": "JavaScript",
"bytes": "345998"
},
{
"name": "PHP",
"bytes": "10671"
},
{
"name": "Python",
"bytes": "6852171"
},
{
"name": "Shell",
"bytes": "1559"
},
{
"name": "Visual Basic",
"bytes": "388"
}
],
"symlink_target": ""
} |
from threading import Lock
from time import sleep, time
class Frequency(object):
"""Frequency controller, means concurrent running n tasks every interval seconds.
Basic Usage::
from torequests.frequency_controller.sync_tools import Frequency
from concurrent.futures import ThreadPoolExecutor
from time import time
# limit to 2 concurrent tasks each 1 second
frequency = Frequency(2, 1)
def test():
with frequency:
return time()
now = time()
pool = ThreadPoolExecutor()
tasks = []
for _ in range(5):
tasks.append(pool.submit(test))
result = [task.result() for task in tasks]
assert result[0] - now < 1
assert result[1] - now < 1
assert result[2] - now > 1
assert result[3] - now > 1
assert result[4] - now > 2
assert frequency.to_dict() == {'n': 2, 'interval': 1}
assert frequency.to_list() == [2, 1]
"""
__slots__ = ("gen", "repr", "lock", "__enter__", "n", "interval")
TIMER = time
def __init__(self, n=None, interval=0):
self.n = n
self.interval = interval
self.repr = "Frequency({n}, {interval})".format(n=n, interval=interval)
if n:
self.lock = Lock()
# generator is a little faster than Queue, and using little memory
self.gen = self.generator(n, interval)
self.__enter__ = self._acquire
else:
self.gen = None
self.__enter__ = self.__exit__
def to_list(self):
"""Return the [self.n, self.interval]"""
return [self.n, self.interval]
def to_dict(self):
"""Return the dict {'n': self.n, 'interval': self.interval}"""
return {'n': self.n, 'interval': self.interval}
def generator(self, n=2, interval=1):
q = [0] * n
while 1:
for index, i in enumerate(q):
# or timeit.default_timer()
now = self.TIMER()
diff = now - i
if diff < interval:
sleep(interval - diff)
now = self.TIMER()
q[index] = now
yield now
@classmethod
def ensure_frequency(cls, frequency):
"""Ensure the given args is Frequency.
:param frequency: args to create a Frequency instance.
:type frequency: Frequency / dict / list / tuple
:return: Frequency instance
:rtype: Frequency
"""
if isinstance(frequency, cls):
return frequency
elif isinstance(frequency, dict):
return cls(**frequency)
else:
return cls(*frequency)
def _acquire(self):
with self.lock:
return next(self.gen)
def __exit__(self, *args):
pass
def __str__(self):
return repr(self)
def __repr__(self):
return self.repr
def __bool__(self):
return bool(self.gen)
| {
"content_hash": "369e78d7c1b797a690ec6dec75bd18c3",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 85,
"avg_line_length": 30.470588235294116,
"alnum_prop": 0.5180180180180181,
"repo_name": "ClericPy/torequests",
"id": "cf245620755f9011f9a5a0d40135a72455b93746",
"size": "3108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "torequests/frequency_controller/sync_tools.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "221340"
}
],
"symlink_target": ""
} |
from loadwarrior.locustext import setup
from path import path
setup(__name__, conffile=path(__file__).parent/'test_locust.yml')
| {
"content_hash": "4ad5a1cbd0b8c0680d4d70c7847e8a24",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 65,
"avg_line_length": 42.666666666666664,
"alnum_prop": 0.7578125,
"repo_name": "whitmo/loadwarrior",
"id": "4d8e8a11dcff94324036ef8cea62c27501d83345",
"size": "128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "loadwarrior/tests/locustfile.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "28203"
}
],
"symlink_target": ""
} |
'''
Created on Jun 8, 2011
@author: evan
'''
from lxml import etree
from kayako.core.object import KayakoObject
class TicketPriority(KayakoObject):
'''
Kayako TicketPriority API Object.
id
title
displayorder
frcolorcode
bgcolorcode
displayicon
type
uservisibilitycustom
usergroupid
'''
controller = '/Tickets/TicketPriority'
__parameters__ = [
'id',
'title',
'displayorder',
'frcolorcode',
'bgcolorcode',
'displayicon',
'type',
'uservisibilitycustom',
'usergroupid',
]
@classmethod
def _parse_ticket_priority(cls, ticket_priority_tree):
params = dict(
id=cls._get_int(ticket_priority_tree.find('id')),
title=cls._get_string(ticket_priority_tree.find('title')),
displayorder=cls._get_int(ticket_priority_tree.find('displayorder')),
frcolorcode=cls._get_string(ticket_priority_tree.find('frcolorcode')),
bgcolorcode=cls._get_string(ticket_priority_tree.find('bgcolorcode')),
displayicon=cls._get_string(ticket_priority_tree.find('displayicon')),
type=cls._get_string(ticket_priority_tree.find('type')),
uservisibilitycustom=cls._get_boolean(ticket_priority_tree.find('uservisibilitycustom')),
usergroupid=cls._get_int(ticket_priority_tree.find('usergroupid'), required=False),
)
return params
@classmethod
def get_all(cls, api):
response = api._request(cls.controller, 'GET')
tree = etree.parse(response)
return [TicketPriority(api, **cls._parse_ticket_priority(ticket_priority_tree)) for ticket_priority_tree in tree.findall('ticketpriority')]
@classmethod
def get(cls, api, id):
response = api._request('%s/%s/' % (cls.controller, id), 'GET')
tree = etree.parse(response)
node = tree.find('ticketpriority')
if node is None:
return None
params = cls._parse_ticket_priority(tree.find('ticketpriority'))
return TicketPriority(api, **params)
def __str__(self):
return '<TicketPriority (%s): %s>' % (self.id, self.title)
class TicketStatus(KayakoObject):
'''
Kayako TicketStatus API Object.
id
title
displayorder
departmentid
displayicon
type
displayinmainlist
markasresolved
displaycount
statuscolor
statusbgcolor
resetduetime
triggersurvey
staffvisibilitycustom
'''
controller = '/Tickets/TicketStatus'
__parameters__ = [
'id',
'title',
'displayorder',
'departmentid',
'displayicon',
'type',
'displayinmainlist',
'markasresolved',
'displaycount',
'statuscolor',
'statusbgcolor',
'resetduetime',
'triggersurvey',
'staffvisibilitycustom',
]
@classmethod
def _parse_ticket_status(cls, ticket_status_tree):
params = dict(
id=cls._get_int(ticket_status_tree.find('id')),
title=cls._get_string(ticket_status_tree.find('title')),
displayorder=cls._get_int(ticket_status_tree.find('displayorder')),
departmentid=cls._get_int(ticket_status_tree.find('departmentid')),
displayicon=cls._get_string(ticket_status_tree.find('displayicon')),
type=cls._get_string(ticket_status_tree.find('type')),
displayinmainlist=cls._get_boolean(ticket_status_tree.find('displayinmainlist')),
markasresolved=cls._get_boolean(ticket_status_tree.find('markasresolved')),
displaycount=cls._get_int(ticket_status_tree.find('displaycount')),
statuscolor=cls._get_string(ticket_status_tree.find('statuscolor')),
statusbgcolor=cls._get_string(ticket_status_tree.find('statusbgcolor')),
resetduetime=cls._get_boolean(ticket_status_tree.find('resetduetime')),
triggersurvey=cls._get_boolean(ticket_status_tree.find('triggersurvey')),
staffvisibilitycustom=cls._get_boolean(ticket_status_tree.find('staffvisibilitycustom')),
)
return params
@classmethod
def get_all(cls, api):
response = api._request(cls.controller, 'GET')
tree = etree.parse(response)
return [TicketStatus(api, **cls._parse_ticket_status(ticket_status_tree)) for ticket_status_tree in tree.findall('ticketstatus')]
@classmethod
def get(cls, api, id):
response = api._request('%s/%s/' % (cls.controller, id), 'GET')
tree = etree.parse(response)
node = tree.find('ticketstatus')
if node is None:
return None
params = cls._parse_ticket_status(node)
return TicketStatus(api, **params)
def __str__(self):
return '<TicketStatus (%s): %s>' % (self.id, self.title)
class TicketType(KayakoObject):
'''
Kayako TicketType API Object.
id
title
displayorder
departmentid
displayicon
type
uservisibilitycustom
'''
controller = '/Tickets/TicketType'
__parameters__ = [
'id',
'title',
'displayorder',
'departmentid',
'displayicon',
'type',
'uservisibilitycustom',
]
@classmethod
def _parse_ticket_type(cls, ticket_type_tree):
params = dict(
id=cls._get_int(ticket_type_tree.find('id')),
title=cls._get_string(ticket_type_tree.find('title')),
displayorder=cls._get_int(ticket_type_tree.find('displayorder')),
departmentid=cls._get_int(ticket_type_tree.find('departmentid')),
displayicon=cls._get_string(ticket_type_tree.find('displayicon')),
type=cls._get_string(ticket_type_tree.find('type')),
uservisibilitycustom=cls._get_boolean(ticket_type_tree.find('uservisibilitycustom')),
)
return params
@classmethod
def get_all(cls, api):
response = api._request(cls.controller, 'GET')
tree = etree.parse(response)
return [TicketType(api, **cls._parse_ticket_type(ticket_type_tree)) for ticket_type_tree in tree.findall('tickettype')]
@classmethod
def get(cls, api, id):
response = api._request('%s/%s/' % (cls.controller, id), 'GET')
tree = etree.parse(response)
node = tree.find('tickettype')
if node is None:
return None
params = cls._parse_ticket_type(node)
return TicketType(api, **params)
def __str__(self):
return '<TicketType (%s): %s>' % (self.id, self.title)
| {
"content_hash": "0452a9126f0bbfe2987a0289bb096a0a",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 147,
"avg_line_length": 31.183098591549296,
"alnum_prop": 0.6102077687443541,
"repo_name": "ravi-sharma/python-api-library",
"id": "02526503717a223c43450464a263d95cfa4290c5",
"size": "6936",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/kayako/objects/ticket/ticket_enums.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "315429"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
# Import python libs
from StringIO import StringIO
import logging
# Import salt libs
from salt.exceptions import SaltRenderError
import salt.utils.templates
log = logging.getLogger(__name__)
def render(template_file, saltenv='base', sls='', argline='',
context=None, tmplpath=None, **kws):
'''
Render the template_file, passing the functions and grains into the
Jinja rendering system.
:rtype: string
'''
from_str = argline == '-s'
if not from_str and argline:
raise SaltRenderError(
'Unknown renderer option: {opt}'.format(opt=argline)
)
tmp_data = salt.utils.templates.JINJA(template_file,
to_str=True,
salt=__salt__,
grains=__grains__,
opts=__opts__,
pillar=__pillar__,
saltenv=saltenv,
sls=sls,
context=context,
tmplpath=tmplpath,
**kws)
if not tmp_data.get('result', False):
raise SaltRenderError(
tmp_data.get('data', 'Unknown render error in jinja renderer')
)
return StringIO(tmp_data['data'])
| {
"content_hash": "009f890854d88b7762480b233a0531d3",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 78,
"avg_line_length": 34.83720930232558,
"alnum_prop": 0.4666221628838451,
"repo_name": "MadeiraCloud/salt",
"id": "9ba05e47910d7a05cac88372465b5921c5e7c725",
"size": "1522",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sources/salt/renderers/jinja.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "10058"
},
{
"name": "Makefile",
"bytes": "1815"
},
{
"name": "Python",
"bytes": "4530204"
},
{
"name": "Shell",
"bytes": "169676"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1PortworxVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'fs_type': 'str',
'read_only': 'bool',
'volume_id': 'str'
}
attribute_map = {
'fs_type': 'fsType',
'read_only': 'readOnly',
'volume_id': 'volumeID'
}
def __init__(self, fs_type=None, read_only=None, volume_id=None):
"""
V1PortworxVolumeSource - a model defined in Swagger
"""
self._fs_type = None
self._read_only = None
self._volume_id = None
self.discriminator = None
if fs_type is not None:
self.fs_type = fs_type
if read_only is not None:
self.read_only = read_only
self.volume_id = volume_id
@property
def fs_type(self):
"""
Gets the fs_type of this V1PortworxVolumeSource.
FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified.
:return: The fs_type of this V1PortworxVolumeSource.
:rtype: str
"""
return self._fs_type
@fs_type.setter
def fs_type(self, fs_type):
"""
Sets the fs_type of this V1PortworxVolumeSource.
FSType represents the filesystem type to mount Must be a filesystem type supported by the host operating system. Ex. \"ext4\", \"xfs\". Implicitly inferred to be \"ext4\" if unspecified.
:param fs_type: The fs_type of this V1PortworxVolumeSource.
:type: str
"""
self._fs_type = fs_type
@property
def read_only(self):
"""
Gets the read_only of this V1PortworxVolumeSource.
Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.
:return: The read_only of this V1PortworxVolumeSource.
:rtype: bool
"""
return self._read_only
@read_only.setter
def read_only(self, read_only):
"""
Sets the read_only of this V1PortworxVolumeSource.
Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.
:param read_only: The read_only of this V1PortworxVolumeSource.
:type: bool
"""
self._read_only = read_only
@property
def volume_id(self):
"""
Gets the volume_id of this V1PortworxVolumeSource.
VolumeID uniquely identifies a Portworx volume
:return: The volume_id of this V1PortworxVolumeSource.
:rtype: str
"""
return self._volume_id
@volume_id.setter
def volume_id(self, volume_id):
"""
Sets the volume_id of this V1PortworxVolumeSource.
VolumeID uniquely identifies a Portworx volume
:param volume_id: The volume_id of this V1PortworxVolumeSource.
:type: str
"""
if volume_id is None:
raise ValueError("Invalid value for `volume_id`, must not be `None`")
self._volume_id = volume_id
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1PortworxVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "2c7459fb4d51b85c78bdef38f647f60c",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 194,
"avg_line_length": 28.81767955801105,
"alnum_prop": 0.5655674846625767,
"repo_name": "mbohlool/client-python",
"id": "a3702485472915494c82bda9550cc0f9eae4924c",
"size": "5233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1_portworx_volume_source.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
} |
import errno
import os
import random
import signal
import socket
from time import sleep
import unittest
from unittest import mock
try:
from time import monotonic as time
except ImportError:
from time import time as time
try:
import resource
except ImportError:
resource = None
import zmq
from aiozmq.selector import ZmqSelector, EVENT_READ, EVENT_WRITE, SelectorKey
from aiozmq._test_util import requires_mac_ver
if hasattr(socket, 'socketpair'):
socketpair = socket.socketpair
else:
def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0):
with socket.socket(family, type, proto) as l:
l.bind(('127.0.0.1', 0))
l.listen(3)
c = socket.socket(family, type, proto)
try:
c.connect(l.getsockname())
caddr = c.getsockname()
while True:
a, addr = l.accept()
# check that we've got the correct client
if addr == caddr:
return c, a
a.close()
except OSError:
c.close()
raise
def find_ready_matching(ready, flag):
match = []
for key, events in ready:
if events & flag:
match.append(key.fileobj)
return match
class SelectorTests(unittest.TestCase):
SELECTOR = ZmqSelector
def make_socketpair(self):
rd, wr = socketpair()
self.addCleanup(rd.close)
self.addCleanup(wr.close)
return rd, wr
def test_register(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, EVENT_READ, "data")
self.assertIsInstance(key, SelectorKey)
self.assertEqual(key.fileobj, rd)
self.assertEqual(key.fd, rd.fileno())
self.assertEqual(key.events, EVENT_READ)
self.assertEqual(key.data, "data")
# register an unknown event
self.assertRaises(ValueError, s.register, 0, 999999)
# register an invalid FD
self.assertRaises(ValueError, s.register, -10, EVENT_READ)
# register twice
self.assertRaises(KeyError, s.register, rd, EVENT_READ)
# register the same FD, but with a different object
self.assertRaises(KeyError, s.register, rd.fileno(),
EVENT_READ)
# register an invalid fd type
self.assertRaises(ValueError, s.register, 'abc', EVENT_READ)
def test_register_with_zmq_error(self):
s = self.SELECTOR()
self.addCleanup(s.close)
m = mock.Mock()
m.side_effect = zmq.ZMQError(errno.EFAULT, 'not a socket')
s._poller.register = m
with self.assertRaises(OSError) as ctx:
s.register(1, EVENT_READ)
self.assertEqual(errno.EFAULT, ctx.exception.errno)
self.assertNotIn(1, s.get_map())
def test_unregister(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
s.unregister(rd)
# unregister an unknown file obj
self.assertRaises(KeyError, s.unregister, 999999)
# unregister twice
self.assertRaises(KeyError, s.unregister, rd)
def test_unregister_with_zmq_error(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
m = mock.Mock()
m.side_effect = zmq.ZMQError(errno.EFAULT, 'not a socket')
s._poller.unregister = m
with self.assertRaises(OSError) as ctx:
s.unregister(rd)
self.assertEqual(errno.EFAULT, ctx.exception.errno)
self.assertIn(rd, s.get_map())
def test_unregister_after_fd_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
r, w = rd.fileno(), wr.fileno()
s.register(r, EVENT_READ)
s.register(w, EVENT_WRITE)
rd.close()
wr.close()
s.unregister(r)
s.unregister(w)
@unittest.skipUnless(os.name == 'posix', "requires posix")
def test_unregister_after_fd_close_and_reuse(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
r, w = rd.fileno(), wr.fileno()
s.register(r, EVENT_READ)
s.register(w, EVENT_WRITE)
rd2, wr2 = self.make_socketpair()
rd.close()
wr.close()
os.dup2(rd2.fileno(), r)
os.dup2(wr2.fileno(), w)
self.addCleanup(os.close, r)
self.addCleanup(os.close, w)
s.unregister(r)
s.unregister(w)
def test_unregister_after_socket_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
s.register(wr, EVENT_WRITE)
rd.close()
wr.close()
s.unregister(rd)
s.unregister(wr)
def test_modify(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, EVENT_READ)
# modify events
key2 = s.modify(rd, EVENT_WRITE)
self.assertNotEqual(key.events, key2.events)
self.assertEqual(key2, s.get_key(rd))
s.unregister(rd)
# modify data
d1 = object()
d2 = object()
key = s.register(rd, EVENT_READ, d1)
key2 = s.modify(rd, EVENT_READ, d2)
self.assertEqual(key.events, key2.events)
self.assertNotEqual(key.data, key2.data)
self.assertEqual(key2, s.get_key(rd))
self.assertEqual(key2.data, d2)
key3 = s.modify(rd, EVENT_READ, d2)
self.assertIs(key3, key2)
# modify unknown file obj
self.assertRaises(KeyError, s.modify, 999999, EVENT_READ)
# modify use a shortcut
d3 = object()
s.register = mock.Mock()
s.unregister = mock.Mock()
s.modify(rd, EVENT_READ, d3)
self.assertFalse(s.register.called)
self.assertFalse(s.unregister.called)
def test_modify_with_zmq_error(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
m = mock.Mock()
m.side_effect = zmq.ZMQError(errno.EFAULT, 'not a socket')
s._poller.modify = m
with self.assertRaises(OSError) as ctx:
s.modify(rd, EVENT_WRITE)
self.assertEqual(errno.EFAULT, ctx.exception.errno)
self.assertIn(rd, s.get_map())
def test_close(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
s.register(wr, EVENT_WRITE)
s.close()
self.assertRaises(KeyError, s.get_key, rd)
self.assertRaises(KeyError, s.get_key, wr)
def test_get_key(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
key = s.register(rd, EVENT_READ, "data")
self.assertEqual(key, s.get_key(rd))
# unknown file obj
self.assertRaises(KeyError, s.get_key, 999999)
def test_get_map(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
keys = s.get_map()
self.assertFalse(keys)
self.assertEqual(len(keys), 0)
self.assertEqual(list(keys), [])
key = s.register(rd, EVENT_READ, "data")
self.assertIn(rd, keys)
self.assertEqual(key, keys[rd])
self.assertEqual(len(keys), 1)
self.assertEqual(list(keys), [rd.fileno()])
self.assertEqual(list(keys.values()), [key])
# unknown file obj
with self.assertRaises(KeyError):
keys[999999]
# Read-only mapping
with self.assertRaises(TypeError):
del keys[rd]
def test_select(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
wr_key = s.register(wr, EVENT_WRITE)
result = s.select()
for key, events in result:
self.assertTrue(isinstance(key, SelectorKey))
self.assertTrue(events)
self.assertFalse(events & ~(EVENT_READ |
EVENT_WRITE))
self.assertEqual([(wr_key, EVENT_WRITE)], result)
def test_select_with_zmq_error(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
m = mock.Mock()
m.side_effect = zmq.ZMQError(errno.EFAULT, 'not a socket')
s._poller.poll = m
with self.assertRaises(OSError) as ctx:
s.select()
self.assertEqual(errno.EFAULT, ctx.exception.errno)
def test_select_without_key(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(wr, EVENT_WRITE)
s._fd_to_key = {}
result = s.select()
self.assertFalse(result)
def test_context_manager(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
with s as sel:
sel.register(rd, EVENT_READ)
sel.register(wr, EVENT_WRITE)
self.assertRaises(KeyError, s.get_key, rd)
self.assertRaises(KeyError, s.get_key, wr)
def test_fileno(self):
s = self.SELECTOR()
self.addCleanup(s.close)
if hasattr(s, 'fileno'):
fd = s.fileno()
self.assertTrue(isinstance(fd, int))
self.assertGreaterEqual(fd, 0)
def test_selector(self):
s = self.SELECTOR()
self.addCleanup(s.close)
NUM_SOCKETS = 12
MSG = b" This is a test."
MSG_LEN = len(MSG)
readers = []
writers = []
r2w = {}
w2r = {}
for i in range(NUM_SOCKETS):
rd, wr = self.make_socketpair()
s.register(rd, EVENT_READ)
s.register(wr, EVENT_WRITE)
readers.append(rd)
writers.append(wr)
r2w[rd] = wr
w2r[wr] = rd
bufs = []
while writers:
ready = s.select()
ready_writers = find_ready_matching(ready, EVENT_WRITE)
if not ready_writers:
self.fail("no sockets ready for writing")
wr = random.choice(ready_writers)
wr.send(MSG)
for i in range(10):
ready = s.select()
ready_readers = find_ready_matching(ready,
EVENT_READ)
if ready_readers:
break
# there might be a delay between the write to the write end and
# the read end is reported ready
sleep(0.1)
else:
self.fail("no sockets ready for reading")
self.assertEqual([w2r[wr]], ready_readers)
rd = ready_readers[0]
buf = rd.recv(MSG_LEN)
self.assertEqual(len(buf), MSG_LEN)
bufs.append(buf)
s.unregister(r2w[rd])
s.unregister(rd)
writers.remove(r2w[rd])
self.assertEqual(bufs, [MSG] * NUM_SOCKETS)
def test_timeout(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
s.register(wr, EVENT_WRITE)
t = time()
self.assertEqual(1, len(s.select(0)))
self.assertEqual(1, len(s.select(-1)))
self.assertLess(time() - t, 0.5)
s.unregister(wr)
s.register(rd, EVENT_READ)
t = time()
self.assertFalse(s.select(0))
self.assertFalse(s.select(-1))
self.assertLess(time() - t, 0.5)
t0 = time()
self.assertFalse(s.select(1))
t1 = time()
dt = t1 - t0
self.assertTrue(0.8 <= dt <= 1.6, dt)
@unittest.skipUnless(hasattr(signal, "alarm"),
"signal.alarm() required for this test")
def test_select_interrupt(self):
s = self.SELECTOR()
self.addCleanup(s.close)
rd, wr = self.make_socketpair()
orig_alrm_handler = signal.signal(signal.SIGALRM, lambda *args: None)
self.addCleanup(signal.signal, signal.SIGALRM, orig_alrm_handler)
self.addCleanup(signal.alarm, 0)
signal.alarm(1)
s.register(rd, EVENT_READ)
t = time()
self.assertFalse(s.select(2))
self.assertLess(time() - t, 3.5)
# see issue #18963 for why it's skipped on older OS X versions
@requires_mac_ver(10, 5)
@unittest.skipUnless(resource, "Test needs resource module")
def test_above_fd_setsize(self):
# A scalable implementation should have no problem with more than
# FD_SETSIZE file descriptors. Since we don't know the value, we just
# try to set the soft RLIMIT_NOFILE to the hard RLIMIT_NOFILE ceiling.
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard))
self.addCleanup(resource.setrlimit, resource.RLIMIT_NOFILE,
(soft, hard))
NUM_FDS = hard
except (OSError, ValueError):
NUM_FDS = soft
# guard for already allocated FDs (stdin, stdout...)
NUM_FDS -= 32
s = self.SELECTOR()
self.addCleanup(s.close)
for i in range(NUM_FDS // 2):
try:
rd, wr = self.make_socketpair()
except OSError:
# too many FDs, skip - note that we should only catch EMFILE
# here, but apparently *BSD and Solaris can fail upon connect()
# or bind() with EADDRNOTAVAIL, so let's be safe
self.skipTest("FD limit reached")
try:
s.register(rd, EVENT_READ)
s.register(wr, EVENT_WRITE)
except OSError as e:
if e.errno == errno.ENOSPC:
# this can be raised by epoll if we go over
# fs.epoll.max_user_watches sysctl
self.skipTest("FD limit reached")
raise
self.assertEqual(NUM_FDS // 2, len(s.select()))
| {
"content_hash": "6dd657af8c2589a4841f1e0d66c359b4",
"timestamp": "",
"source": "github",
"line_count": 490,
"max_line_length": 79,
"avg_line_length": 29.722448979591835,
"alnum_prop": 0.5597363361713815,
"repo_name": "MetaMemoryT/aiozmq",
"id": "e44b1ecd7f90b8ac270b62a109580e1a61d26f78",
"size": "14564",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/selectors_test.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "882"
},
{
"name": "Python",
"bytes": "301246"
}
],
"symlink_target": ""
} |
import os
import time
from dbm import whichdb
import py
import pytest
from doit.dependency import DbmDB, Dependency, MD5Checker
from doit.task import Task
def get_abspath(relativePath):
""" return abs file path relative to this file"""
return os.path.join(os.path.dirname(__file__), relativePath)
# fixture to create a sample file to be used as file_dep
@pytest.fixture
def dependency1(request):
path = get_abspath("data/dependency1")
if os.path.exists(path): os.remove(path)
ff = open(path, "w")
ff.write("whatever" + str(time.asctime()))
ff.close()
def remove_dependency():
if os.path.exists(path):
os.remove(path)
request.addfinalizer(remove_dependency)
return path
# fixture to create a sample file to be used as file_dep
@pytest.fixture
def target1(request):
path = get_abspath("data/target1")
if os.path.exists(path): # pragma: no cover
os.remove(path)
def remove_path():
if os.path.exists(path):
os.remove(path)
request.addfinalizer(remove_path)
return path
# fixture for "doit.db". create/remove for every test
def remove_db(filename):
"""remove db file from anydbm"""
# dbm on some systems add '.db' on others add ('.dir', '.pag')
extensions = ['', #dbhash #gdbm
'.bak', #dumbdb
'.dat', #dumbdb
'.dir', #dumbdb #dbm2
'.db', #dbm1
'.pag', #dbm2
]
for ext in extensions:
if os.path.exists(filename + ext):
os.remove(filename + ext)
# dbm backends use different file extentions
db_ext = {'dbhash': [''],
'gdbm': [''],
'dbm': ['.db', '.dir'],
'dumbdbm': ['.dat'],
# for python3
'dbm.ndbm': ['.db'],
}
@pytest.fixture
def depfile(request):
if hasattr(request, 'param'):
dep_class = request.param
else:
dep_class = DbmDB
# copied from tempdir plugin
name = request._pyfuncitem.name
name = py.std.re.sub("[\W]", "_", name)
my_tmpdir = request.config._tmpdirhandler.mktemp(name, numbered=True)
dep_file = Dependency(dep_class, os.path.join(my_tmpdir.strpath, "testdb"))
dep_file.whichdb = whichdb(dep_file.name) if dep_class is DbmDB else 'XXX'
dep_file.name_ext = db_ext.get(dep_file.whichdb, [''])
def remove_depfile():
if not dep_file._closed:
dep_file.close()
remove_db(dep_file.name)
request.addfinalizer(remove_depfile)
return dep_file
@pytest.fixture
def depfile_name(request):
# copied from tempdir plugin
name = request._pyfuncitem.name
name = py.std.re.sub("[\W]", "_", name)
my_tmpdir = request.config._tmpdirhandler.mktemp(name, numbered=True)
depfile_name = (os.path.join(my_tmpdir.strpath, "testdb"))
def remove_depfile():
remove_db(depfile_name)
request.addfinalizer(remove_depfile)
return depfile_name
@pytest.fixture
def dep_manager(request, depfile_name):
return Dependency(DbmDB, depfile_name)
@pytest.fixture
def restore_cwd(request):
"""restore cwd to its initial value after test finishes."""
previous = os.getcwd()
def restore_cwd():
os.chdir(previous)
request.addfinalizer(restore_cwd)
# create a list of sample tasks
def tasks_sample():
tasks_sample = [
# 0
Task("t1", [""], doc="t1 doc string"),
# 1
Task("t2", [""], file_dep=['tests/data/dependency1'],
doc="t2 doc string"),
# 2
Task("g1", None, doc="g1 doc string", has_subtask=True),
# 3
Task("g1.a", [""], doc="g1.a doc string", is_subtask=True),
# 4
Task("g1.b", [""], doc="g1.b doc string", is_subtask=True),
# 5
Task("t3", [""], doc="t3 doc string", task_dep=["t1"])
]
tasks_sample[2].task_dep = ['g1.a', 'g1.b']
return tasks_sample
def CmdFactory(cls, outstream=None, task_loader=None, dep_file=None,
backend=None, task_list=None, sel_tasks=None,
dep_manager=None, config=None, cmds=None):
"""helper for test code, so test can call _execute() directly"""
cmd = cls(task_loader=task_loader, config=config, cmds=cmds)
if outstream:
cmd.outstream = outstream
if backend:
assert backend == "dbm" # the only one used on tests
cmd.dep_manager = Dependency(DbmDB, dep_file, MD5Checker)
elif dep_manager:
cmd.dep_manager = dep_manager
cmd.dep_file = dep_file # (str) filename usually '.doit.db'
cmd.task_list = task_list # list of tasks
cmd.sel_tasks = sel_tasks # from command line or default_tasks
return cmd
| {
"content_hash": "82734be6ee0caedfa38778048b409f7d",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 79,
"avg_line_length": 30.05095541401274,
"alnum_prop": 0.6034336583298008,
"repo_name": "Hinidu/doit",
"id": "a2f00ae6cb976ca7fdc0f150cafe5477ce2cf0d6",
"size": "4718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "489292"
}
],
"symlink_target": ""
} |
import pygame
import random
from os import path
img_dir = path.join(path.dirname(__file__), 'img')
snd_dir = path.join(path.dirname(__file__), 'snd')
WIDTH = 480
HEIGHT = 600
FPS = 60
# define colors
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
# initialize pygame and create window
pygame.init()
pygame.mixer.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Shmup!")
clock = pygame.time.Clock()
font_name = pygame.font.match_font('arial')
def draw_text(surf, text, size, x, y):
font = pygame.font.Font(font_name, size)
text_surface = font.render(text, True, WHITE)
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
surf.blit(text_surface, text_rect)
def newmob():
m = Mob()
all_sprites.add(m)
mobs.add(m)
def draw_shield_bar(surf, x, y, pct):
if pct < 0:
pct = 0
BAR_LENGTH = 100
BAR_HEIGHT = 10
fill = (pct / 100) * BAR_LENGTH
outline_rect = pygame.Rect(x, y, BAR_LENGTH, BAR_HEIGHT)
fill_rect = pygame.Rect(x, y, fill, BAR_HEIGHT)
pygame.draw.rect(surf, GREEN, fill_rect)
pygame.draw.rect(surf, WHITE, outline_rect, 2)
def draw_lives(surf, x, y, lives, img):
for i in range(lives):
img_rect = img.get_rect()
img_rect.x = x + 30 * i
img_rect.y = y
surf.blit(img, img_rect)
class Player(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.transform.scale(player_img, (50, 38))
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.radius = 20
# pygame.draw.circle(self.image, RED, self.rect.center, self.radius)
self.rect.centerx = WIDTH / 2
self.rect.bottom = HEIGHT - 10
self.speedx = 0
self.shield = 100
self.shoot_delay = 250
self.last_shot = pygame.time.get_ticks()
self.lives = 3
self.hidden = False
self.hide_timer = pygame.time.get_ticks()
def update(self):
# unhide if hidden
if self.hidden and pygame.time.get_ticks() - self.hide_timer > 1000:
self.hidden = False
self.rect.centerx = WIDTH / 2
self.rect.bottom = HEIGHT - 10
self.speedx = 0
keystate = pygame.key.get_pressed()
if keystate[pygame.K_LEFT]:
self.speedx = -8
if keystate[pygame.K_RIGHT]:
self.speedx = 8
if keystate[pygame.K_SPACE]:
self.shoot()
self.rect.x += self.speedx
if self.rect.right > WIDTH:
self.rect.right = WIDTH
if self.rect.left < 0:
self.rect.left = 0
def shoot(self):
now = pygame.time.get_ticks()
if now - self.last_shot > self.shoot_delay:
self.last_shot = now
bullet = Bullet(self.rect.centerx, self.rect.top)
all_sprites.add(bullet)
bullets.add(bullet)
shoot_sound.play()
def hide(self):
# hide the player temporarily
self.hidden = True
self.hide_timer = pygame.time.get_ticks()
self.rect.center = (WIDTH / 2, HEIGHT + 200)
class Mob(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image_orig = random.choice(meteor_images)
self.image_orig.set_colorkey(BLACK)
self.image = self.image_orig.copy()
self.rect = self.image.get_rect()
self.radius = int(self.rect.width * .85 / 2)
# pygame.draw.circle(self.image, RED, self.rect.center, self.radius)
self.rect.x = random.randrange(WIDTH - self.rect.width)
self.rect.bottom = random.randrange(-80, -20)
self.speedy = random.randrange(1, 8)
self.speedx = random.randrange(-3, 3)
self.rot = 0
self.rot_speed = random.randrange(-8, 8)
self.last_update = pygame.time.get_ticks()
def rotate(self):
now = pygame.time.get_ticks()
if now - self.last_update > 50:
self.last_update = now
self.rot = (self.rot + self.rot_speed) % 360
new_image = pygame.transform.rotate(self.image_orig, self.rot)
old_center = self.rect.center
self.image = new_image
self.rect = self.image.get_rect()
self.rect.center = old_center
def update(self):
self.rotate()
self.rect.x += self.speedx
self.rect.y += self.speedy
if self.rect.top > HEIGHT + 10 or self.rect.left < -100 or self.rect.right > WIDTH + 100:
self.rect.x = random.randrange(WIDTH - self.rect.width)
self.rect.y = random.randrange(-100, -40)
self.speedy = random.randrange(1, 8)
class Bullet(pygame.sprite.Sprite):
def __init__(self, x, y):
pygame.sprite.Sprite.__init__(self)
self.image = bullet_img
self.image.set_colorkey(BLACK)
self.rect = self.image.get_rect()
self.rect.bottom = y
self.rect.centerx = x
self.speedy = -10
def update(self):
self.rect.y += self.speedy
# kill if it moves off the top of the screen
if self.rect.bottom < 0:
self.kill()
class Explosion(pygame.sprite.Sprite):
def __init__(self, center, size):
pygame.sprite.Sprite.__init__(self)
self.size = size
self.image = explosion_anim[self.size][0]
self.rect = self.image.get_rect()
self.rect.center = center
self.frame = 0
self.last_update = pygame.time.get_ticks()
self.frame_rate = 75
def update(self):
now = pygame.time.get_ticks()
if now - self.last_update > self.frame_rate:
self.last_update = now
self.frame += 1
if self.frame == len(explosion_anim[self.size]):
self.kill()
else:
center = self.rect.center
self.image = explosion_anim[self.size][self.frame]
self.rect = self.image.get_rect()
self.rect.center = center
# Load all game graphics
background = pygame.image.load(path.join(img_dir, "starfield.png")).convert()
background_rect = background.get_rect()
player_img = pygame.image.load(path.join(img_dir, "playerShip1_orange.png")).convert()
player_mini_img = pygame.transform.scale(player_img, (25, 19))
player_mini_img.set_colorkey(BLACK)
bullet_img = pygame.image.load(path.join(img_dir, "laserRed16.png")).convert()
meteor_images = []
meteor_list = ['meteorBrown_big1.png', 'meteorBrown_med1.png', 'meteorBrown_med1.png',
'meteorBrown_med3.png', 'meteorBrown_small1.png', 'meteorBrown_small2.png',
'meteorBrown_tiny1.png']
for img in meteor_list:
meteor_images.append(pygame.image.load(path.join(img_dir, img)).convert())
explosion_anim = {}
explosion_anim['lg'] = []
explosion_anim['sm'] = []
explosion_anim['player'] = []
for i in range(9):
filename = 'regularExplosion0{}.png'.format(i)
img = pygame.image.load(path.join(img_dir, filename)).convert()
img.set_colorkey(BLACK)
img_lg = pygame.transform.scale(img, (75, 75))
explosion_anim['lg'].append(img_lg)
img_sm = pygame.transform.scale(img, (32, 32))
explosion_anim['sm'].append(img_sm)
filename = 'sonicExplosion0{}.png'.format(i)
img = pygame.image.load(path.join(img_dir, filename)).convert()
img.set_colorkey(BLACK)
explosion_anim['player'].append(img)
# Load all game sounds
shoot_sound = pygame.mixer.Sound(path.join(snd_dir, 'pew.wav'))
expl_sounds = []
for snd in ['expl3.wav', 'expl6.wav']:
expl_sounds.append(pygame.mixer.Sound(path.join(snd_dir, snd)))
player_die_sound = pygame.mixer.Sound(path.join(snd_dir, 'rumble1.ogg'))
pygame.mixer.music.load(path.join(snd_dir, 'tgfcoder-FrozenJam-SeamlessLoop.ogg'))
pygame.mixer.music.set_volume(0.4)
all_sprites = pygame.sprite.Group()
mobs = pygame.sprite.Group()
bullets = pygame.sprite.Group()
player = Player()
all_sprites.add(player)
for i in range(8):
newmob()
score = 0
pygame.mixer.music.play(loops=-1)
# Game loop
running = True
while running:
# keep loop running at the right speed
clock.tick(FPS)
# Process input (events)
for event in pygame.event.get():
# check for closing window
if event.type == pygame.QUIT:
running = False
# Update
all_sprites.update()
# check to see if a bullet hit a mob
hits = pygame.sprite.groupcollide(mobs, bullets, True, True)
for hit in hits:
score += 50 - hit.radius
random.choice(expl_sounds).play()
expl = Explosion(hit.rect.center, 'lg')
all_sprites.add(expl)
newmob()
# check to see if a mob hit the player
hits = pygame.sprite.spritecollide(player, mobs, True, pygame.sprite.collide_circle)
for hit in hits:
player.shield -= hit.radius * 2
expl = Explosion(hit.rect.center, 'sm')
all_sprites.add(expl)
newmob()
if player.shield <= 0:
player_die_sound.play()
death_explosion = Explosion(player.rect.center, 'player')
all_sprites.add(death_explosion)
player.hide()
player.lives -= 1
player.shield = 100
# if the player died and the explosion has finished playing
if player.lives == 0 and not death_explosion.alive():
running = False
# Draw / render
screen.fill(BLACK)
screen.blit(background, background_rect)
all_sprites.draw(screen)
draw_text(screen, str(score), 18, WIDTH / 2, 10)
draw_shield_bar(screen, 5, 5, player.shield)
draw_lives(screen, WIDTH - 100, 5, player.lives, player_mini_img)
# *after* drawing everything, flip the display
pygame.display.flip()
pygame.quit()
| {
"content_hash": "3f54046fec986a935f6b91b2f9513aa6",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 97,
"avg_line_length": 33.96896551724138,
"alnum_prop": 0.6084661455689778,
"repo_name": "kidscancode/gamedev",
"id": "3f858a5164326a94b7a510e66aa8d43a5451044a",
"size": "10103",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tutorials/shmup/shmup-11.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1690942"
}
],
"symlink_target": ""
} |
import json
import responses
import pytest
from django.conf import settings
TEST_JSON_RESPONSE = {
"error_post_list": [
{
"id": 8,
"url": "http://www.fixmydjango.com/1/",
"exception_type": "DataError",
"error_message": "value too long for type character varying(100)",
"raised_by": "django/db/backends/util.py",
"raised_by_line": 56,
"django_version": "1.6"
}
],
"list_url": "http://www.fixmydjango.com/test-server"
}
TEST_JSON_RESPONSE_EMPTY = {
"error_post_list": [],
"list_url": "http://www.fixmydjango.com/test-server"
}
@pytest.yield_fixture(scope='function')
def mock_api_response():
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
settings.FIX_MY_DJANGO_API_BASE_URL + '/api/search/',
body=json.dumps(TEST_JSON_RESPONSE), status=200,
content_type='application/json')
yield TEST_JSON_RESPONSE
@pytest.yield_fixture(scope='function')
def mock_api_response_empty():
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
settings.FIX_MY_DJANGO_API_BASE_URL + '/api/search/',
body=json.dumps(TEST_JSON_RESPONSE_EMPTY), status=200,
content_type='application/json')
yield TEST_JSON_RESPONSE
| {
"content_hash": "73fe80ecc7adb30ec3c94e3caa47b269",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 78,
"avg_line_length": 30.755555555555556,
"alnum_prop": 0.5903179190751445,
"repo_name": "vintasoftware/fix-my-django-lib",
"id": "e6a623010077bd97fdf3ad2d47593f785a6d7579",
"size": "1384",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "fixmydjango/tests/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28534"
}
],
"symlink_target": ""
} |
import time
import sys
import urllib2
import urllib2
n = int(sys.argv[1])
url = sys.argv[2]
headers = {"Accept-Language" : "en" }
req = urllib2.Request(url, None, headers)
t0 = time.time()
for k in xrange(n):
data = urllib2.urlopen(req).read()
print (time.time()-t0)/n
if n==1: print data
| {
"content_hash": "e5319c60d39d78d5107ae755a896ef79",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 41,
"avg_line_length": 19.666666666666668,
"alnum_prop": 0.6745762711864407,
"repo_name": "andersonsilvade/python_C",
"id": "ffb98452ebbea715f2255a30c59b43a9c45ee931",
"size": "295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python32/web2py/scripts/bench.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6031642"
},
{
"name": "C#",
"bytes": "9892"
},
{
"name": "C++",
"bytes": "1800118"
},
{
"name": "CSS",
"bytes": "126965"
},
{
"name": "F#",
"bytes": "4611"
},
{
"name": "JavaScript",
"bytes": "296535"
},
{
"name": "Objective-C",
"bytes": "272866"
},
{
"name": "Python",
"bytes": "1949327"
},
{
"name": "Shell",
"bytes": "54613"
},
{
"name": "Tcl",
"bytes": "3155560"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import compas
if not compas.IPY:
from .fd_numpy import * # noqa: F401 F403
__all__ = [name for name in dir() if not name.startswith("_")]
| {
"content_hash": "dc2dc3277ad329fa38711aa35533cc70",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 62,
"avg_line_length": 23.272727272727273,
"alnum_prop": 0.6875,
"repo_name": "compas-dev/compas",
"id": "30ab16da8165d1b497e07c15225a7859561b5b79",
"size": "256",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/compas/numerical/fd/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3181804"
}
],
"symlink_target": ""
} |
"""Implements the RelFS entity class."""
#------------------------------------------------------------------------------#
import BTrees.OOBTree
import persistent
from .component import Component
#------------------------------------------------------------------------------#
class EntityBase(persistent.Persistent):
"""Common base class for RelFs persistent entity."""
#--------------------------------------------------------------------------#
def __init__(self):
persistent.Persistent.__init__(self)
self._components = BTrees.OOBTree.BTree()
self._p_changed = True
#--------------------------------------------------------------------------#
def has_component(self, component_class):
"""
Indicates if this entity instance has component
of the specified component_class.
"""
assert isinstance(component_class, type)
assert issubclass(component_class, Component)
return self._components.has_key(component_class.unique_id())
#--------------------------------------------------------------------------#
def get_component(self, component_class):
"""
If this entity does not have a component of the specified
component_class, then a new default instance is added.
Then the component of component_class is returned of this
entity is returned.
"""
assert isinstance(component_class, type)
assert issubclass(component_class, Component)
try:
return self._components[component_class.unique_id()]
except KeyError:
result = component_class()
self._components[component_class.unique_id()] = result
self._p_changed = True
return result
#--------------------------------------------------------------------------#
def find_component(self, component_class):
"""
Returns the component with the specified component_class,
if this entity has one, returns None otherwise.
"""
assert isinstance(component_class, type)
assert issubclass(component_class, Component)
try:
return self._components[component_class.unique_id()]
except KeyError:
return None
#--------------------------------------------------------------------------#
def find_all_components_by_name(self, *component_names):
"""
Returns a tuple of components of this entity with the specified
component class names if this entity has all of them.
Returns None otherwise.
"""
try:
return tuple(self._components[name] for name in component_names)
except KeyError:
return None
#--------------------------------------------------------------------------#
def all_components(self):
"""
Yields pairs of type names and instances of all components
of this entity.
"""
for name, component in self._components.items():
yield name, component
#--------------------------------------------------------------------------#
def only_components(self, component_names):
"""
Yields pairs of type names and instances of components with type
names specified in the component_names list in the this entity.
"""
for component_name in component_names:
try:
yield component_name, self._components[component_name]
except KeyError:
pass
#------------------------------------------------------------------------------#
class Entity(EntityBase):
"""Class representing a identifyiable RelFS entity."""
#--------------------------------------------------------------------------#
def __init__(self):
EntityBase.__init__(self)
#--------------------------------------------------------------------------#
def add_component(self, some_component):
"""Adds some_component instance to this entity."""
assert isinstance(some_component, Component)
self._components[some_component.unique_id()] = some_component
self._p_changed = True
#------------------------------------------------------------------------------#
class EntityContext(EntityBase):
"""Class representing shared context for RelFS entities."""
#--------------------------------------------------------------------------#
def __init__(self):
EntityBase.__init__(self)
| {
"content_hash": "fe117d2b5974026ec43bb8a0c5bdf969",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 80,
"avg_line_length": 41.45871559633027,
"alnum_prop": 0.4711219296304492,
"repo_name": "matus-chochlik/various",
"id": "aaf9557ac46e3354041282b7594a90510472994f",
"size": "4615",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "relfs/relfs/components/entity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "44686"
},
{
"name": "C++",
"bytes": "76296"
},
{
"name": "Makefile",
"bytes": "8751"
},
{
"name": "Python",
"bytes": "295676"
},
{
"name": "QML",
"bytes": "19387"
},
{
"name": "QMake",
"bytes": "3981"
},
{
"name": "Roff",
"bytes": "13504"
},
{
"name": "Shell",
"bytes": "88324"
},
{
"name": "TeX",
"bytes": "199528"
},
{
"name": "Vim script",
"bytes": "1417"
},
{
"name": "XSLT",
"bytes": "2225"
}
],
"symlink_target": ""
} |
import numbers
from itertools import chain
from itertools import count
from math import ceil
import numpy as np
from scipy import sparse
from scipy.stats.mstats import mquantiles
from joblib import Parallel, delayed
from .. import partial_dependence
from ...base import is_regressor
from ...utils import check_array
from ...utils import check_matplotlib_support # noqa
from ...utils import _safe_indexing
from ...utils.validation import _deprecate_positional_args
@_deprecate_positional_args
def plot_partial_dependence(estimator, X, features, *, feature_names=None,
target=None, response_method='auto', n_cols=3,
grid_resolution=100, percentiles=(0.05, 0.95),
method='auto', n_jobs=None, verbose=0,
line_kw=None, contour_kw=None, ax=None,
kind='average', subsample=1000):
"""Partial dependence (PD) and individual conditional expectation (ICE)
plots.
Partial dependence plots, individual conditional expectation plots or an
overlay of both of them can be plotted by setting the ``kind``
parameter.
The ``len(features)`` plots are arranged in a grid with ``n_cols``
columns. Two-way partial dependence plots are plotted as contour plots. The
deciles of the feature values will be shown with tick marks on the x-axes
for one-way plots, and on both axes for two-way plots.
Read more in the :ref:`User Guide <partial_dependence>`.
.. note::
:func:`plot_partial_dependence` does not support using the same axes
with multiple calls. To plot the the partial dependence for multiple
estimators, please pass the axes created by the first call to the
second call::
>>> from sklearn.inspection import plot_partial_dependence
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.linear_model import LinearRegression
>>> from sklearn.ensemble import RandomForestRegressor
>>> X, y = make_friedman1()
>>> est1 = LinearRegression().fit(X, y)
>>> est2 = RandomForestRegressor().fit(X, y)
>>> disp1 = plot_partial_dependence(est1, X,
... [1, 2]) # doctest: +SKIP
>>> disp2 = plot_partial_dependence(est2, X, [1, 2],
... ax=disp1.axes_) # doctest: +SKIP
.. warning::
For :class:`~sklearn.ensemble.GradientBoostingClassifier` and
:class:`~sklearn.ensemble.GradientBoostingRegressor`, the
`'recursion'` method (used by default) will not account for the `init`
predictor of the boosting process. In practice, this will produce
the same values as `'brute'` up to a constant offset in the target
response, provided that `init` is a constant estimator (which is the
default). However, if `init` is not a constant estimator, the
partial dependence values are incorrect for `'recursion'` because the
offset will be sample-dependent. It is preferable to use the `'brute'`
method. Note that this only applies to
:class:`~sklearn.ensemble.GradientBoostingClassifier` and
:class:`~sklearn.ensemble.GradientBoostingRegressor`, not to
:class:`~sklearn.ensemble.HistGradientBoostingClassifier` and
:class:`~sklearn.ensemble.HistGradientBoostingRegressor`.
Parameters
----------
estimator : BaseEstimator
A fitted estimator object implementing :term:`predict`,
:term:`predict_proba`, or :term:`decision_function`.
Multioutput-multiclass classifiers are not supported.
X : {array-like or dataframe} of shape (n_samples, n_features)
``X`` is used to generate a grid of values for the target
``features`` (where the partial dependence will be evaluated), and
also to generate values for the complement features when the
`method` is `'brute'`.
features : list of {int, str, pair of int, pair of str}
The target features for which to create the PDPs.
If `features[i]` is an integer or a string, a one-way PDP is created;
if `features[i]` is a tuple, a two-way PDP is created (only supported
with `kind='average'`). Each tuple must be of size 2.
if any entry is a string, then it must be in ``feature_names``.
feature_names : array-like of shape (n_features,), dtype=str, default=None
Name of each feature; `feature_names[i]` holds the name of the feature
with index `i`.
By default, the name of the feature corresponds to their numerical
index for NumPy array and their column name for pandas dataframe.
target : int, default=None
- In a multiclass setting, specifies the class for which the PDPs
should be computed. Note that for binary classification, the
positive class (index 1) is always used.
- In a multioutput setting, specifies the task for which the PDPs
should be computed.
Ignored in binary classification or classical regression settings.
response_method : {'auto', 'predict_proba', 'decision_function'}, \
default='auto'
Specifies whether to use :term:`predict_proba` or
:term:`decision_function` as the target response. For regressors
this parameter is ignored and the response is always the output of
:term:`predict`. By default, :term:`predict_proba` is tried first
and we revert to :term:`decision_function` if it doesn't exist. If
``method`` is `'recursion'`, the response is always the output of
:term:`decision_function`.
n_cols : int, default=3
The maximum number of columns in the grid plot. Only active when `ax`
is a single axis or `None`.
grid_resolution : int, default=100
The number of equally spaced points on the axes of the plots, for each
target feature.
percentiles : tuple of float, default=(0.05, 0.95)
The lower and upper percentile used to create the extreme values
for the PDP axes. Must be in [0, 1].
method : str, default='auto'
The method used to calculate the averaged predictions:
- `'recursion'` is only supported for some tree-based estimators
(namely
:class:`~sklearn.ensemble.GradientBoostingClassifier`,
:class:`~sklearn.ensemble.GradientBoostingRegressor`,
:class:`~sklearn.ensemble.HistGradientBoostingClassifier`,
:class:`~sklearn.ensemble.HistGradientBoostingRegressor`,
:class:`~sklearn.tree.DecisionTreeRegressor`,
:class:`~sklearn.ensemble.RandomForestRegressor`
but is more efficient in terms of speed.
With this method, the target response of a
classifier is always the decision function, not the predicted
probabilities. Since the `'recursion'` method implicitely computes
the average of the ICEs by design, it is not compatible with ICE and
thus `kind` must be `'average'`.
- `'brute'` is supported for any estimator, but is more
computationally intensive.
- `'auto'`: the `'recursion'` is used for estimators that support it,
and `'brute'` is used otherwise.
Please see :ref:`this note <pdp_method_differences>` for
differences between the `'brute'` and `'recursion'` method.
n_jobs : int, default=None
The number of CPUs to use to compute the partial dependences.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
verbose : int, default=0
Verbose output during PD computations.
line_kw : dict, default=None
Dict with keywords passed to the ``matplotlib.pyplot.plot`` call.
For one-way partial dependence plots.
contour_kw : dict, default=None
Dict with keywords passed to the ``matplotlib.pyplot.contourf`` call.
For two-way partial dependence plots.
ax : Matplotlib axes or array-like of Matplotlib axes, default=None
- If a single axis is passed in, it is treated as a bounding axes
and a grid of partial dependence plots will be drawn within
these bounds. The `n_cols` parameter controls the number of
columns in the grid.
- If an array-like of axes are passed in, the partial dependence
plots will be drawn directly into these axes.
- If `None`, a figure and a bounding axes is created and treated
as the single axes case.
.. versionadded:: 0.22
kind : {'average', 'individual', 'both'}, default='average'
Whether to plot the partial dependence averaged across all the samples
in the dataset or one line per sample or both.
- ``kind='average'`` results in the traditional PD plot;
- ``kind='individual'`` results in the ICE plot.
Note that the fast ``method='recursion'`` option is only available for
``kind='average'``. Plotting individual dependencies requires using the
slower ``method='brute'`` option.
.. versionadded:: 0.24
subsample : float, int or None, default=1000
Sampling for ICE curves when `kind` is 'individual' or 'both'.
If `float`, should be between 0.0 and 1.0 and represent the proportion
of the dataset to be used to plot ICE curves. If `int`, represents the
absolute number samples to use.
Note that the full dataset is still used to calculate averaged partial
dependence when `kind='both'`.
.. versionadded:: 0.24
Returns
-------
display: :class:`~sklearn.inspection.PartialDependenceDisplay`
Examples
--------
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.ensemble import GradientBoostingRegressor
>>> X, y = make_friedman1()
>>> clf = GradientBoostingRegressor(n_estimators=10).fit(X, y)
>>> plot_partial_dependence(clf, X, [0, (0, 1)]) #doctest: +SKIP
See also
--------
sklearn.inspection.partial_dependence: Return raw partial
dependence values
"""
check_matplotlib_support('plot_partial_dependence') # noqa
import matplotlib.pyplot as plt # noqa
from matplotlib import transforms # noqa
from matplotlib.ticker import MaxNLocator # noqa
from matplotlib.ticker import ScalarFormatter # noqa
# set target_idx for multi-class estimators
if hasattr(estimator, 'classes_') and np.size(estimator.classes_) > 2:
if target is None:
raise ValueError('target must be specified for multi-class')
target_idx = np.searchsorted(estimator.classes_, target)
if (not (0 <= target_idx < len(estimator.classes_)) or
estimator.classes_[target_idx] != target):
raise ValueError('target not in est.classes_, got {}'.format(
target))
else:
# regression and binary classification
target_idx = 0
# Use check_array only on lists and other non-array-likes / sparse. Do not
# convert DataFrame into a NumPy array.
if not(hasattr(X, '__array__') or sparse.issparse(X)):
X = check_array(X, force_all_finite='allow-nan', dtype=object)
n_features = X.shape[1]
# convert feature_names to list
if feature_names is None:
if hasattr(X, "loc"):
# get the column names for a pandas dataframe
feature_names = X.columns.tolist()
else:
# define a list of numbered indices for a numpy array
feature_names = [str(i) for i in range(n_features)]
elif hasattr(feature_names, "tolist"):
# convert numpy array or pandas index to a list
feature_names = feature_names.tolist()
if len(set(feature_names)) != len(feature_names):
raise ValueError('feature_names should not contain duplicates.')
def convert_feature(fx):
if isinstance(fx, str):
try:
fx = feature_names.index(fx)
except ValueError:
raise ValueError('Feature %s not in feature_names' % fx)
return int(fx)
# convert features into a seq of int tuples
tmp_features = []
for fxs in features:
if isinstance(fxs, (numbers.Integral, str)):
fxs = (fxs,)
try:
fxs = tuple(convert_feature(fx) for fx in fxs)
except TypeError:
raise ValueError('Each entry in features must be either an int, '
'a string, or an iterable of size at most 2.')
if not 1 <= np.size(fxs) <= 2:
raise ValueError('Each entry in features must be either an int, '
'a string, or an iterable of size at most 2.')
if kind != 'average' and np.size(fxs) > 1:
raise ValueError(
f"It is not possible to display individual effects for more "
f"than one feature at a time. Got: features={features}.")
tmp_features.append(fxs)
features = tmp_features
# Early exit if the axes does not have the correct number of axes
if ax is not None and not isinstance(ax, plt.Axes):
axes = np.asarray(ax, dtype=object)
if axes.size != len(features):
raise ValueError("Expected ax to have {} axes, got {}".format(
len(features), axes.size))
for i in chain.from_iterable(features):
if i >= len(feature_names):
raise ValueError('All entries of features must be less than '
'len(feature_names) = {0}, got {1}.'
.format(len(feature_names), i))
if isinstance(subsample, numbers.Integral):
if subsample <= 0:
raise ValueError(
f"When an integer, subsample={subsample} should be positive."
)
elif isinstance(subsample, numbers.Real):
if subsample <= 0 or subsample >= 1:
raise ValueError(
f"When a floating-point, subsample={subsample} should be in "
f"the (0, 1) range."
)
# compute predictions and/or averaged predictions
pd_results = Parallel(n_jobs=n_jobs, verbose=verbose)(
delayed(partial_dependence)(estimator, X, fxs,
response_method=response_method,
method=method,
grid_resolution=grid_resolution,
percentiles=percentiles,
kind=kind)
for fxs in features)
# For multioutput regression, we can only check the validity of target
# now that we have the predictions.
# Also note: as multiclass-multioutput classifiers are not supported,
# multiclass and multioutput scenario are mutually exclusive. So there is
# no risk of overwriting target_idx here.
pd_result = pd_results[0] # checking the first result is enough
n_tasks = (pd_result.average.shape[0] if kind == 'average'
else pd_result.individual.shape[0])
if is_regressor(estimator) and n_tasks > 1:
if target is None:
raise ValueError(
'target must be specified for multi-output regressors')
if not 0 <= target <= n_tasks:
raise ValueError(
'target must be in [0, n_tasks], got {}.'.format(target))
target_idx = target
# get global min and max average predictions of PD grouped by plot type
pdp_lim = {}
for pdp in pd_results:
values = pdp["values"]
preds = (pdp.average if kind == 'average' else pdp.individual)
min_pd = preds[target_idx].min()
max_pd = preds[target_idx].max()
n_fx = len(values)
old_min_pd, old_max_pd = pdp_lim.get(n_fx, (min_pd, max_pd))
min_pd = min(min_pd, old_min_pd)
max_pd = max(max_pd, old_max_pd)
pdp_lim[n_fx] = (min_pd, max_pd)
deciles = {}
for fx in chain.from_iterable(features):
if fx not in deciles:
X_col = _safe_indexing(X, fx, axis=1)
deciles[fx] = mquantiles(X_col, prob=np.arange(0.1, 1.0, 0.1))
display = PartialDependenceDisplay(pd_results=pd_results,
features=features,
feature_names=feature_names,
target_idx=target_idx,
pdp_lim=pdp_lim,
deciles=deciles,
kind=kind,
subsample=subsample)
return display.plot(ax=ax, n_cols=n_cols, line_kw=line_kw,
contour_kw=contour_kw)
class PartialDependenceDisplay:
"""Partial Dependence Plot (PDP)
This can also display individual partial dependencies which are often
referred to as: Individual Condition Expectation (ICE).
It is recommended to use
:func:`~sklearn.inspection.plot_partial_dependence` to create a
:class:`~sklearn.inspection.PartialDependenceDisplay`. All parameters are
stored as attributes.
Read more in
:ref:`sphx_glr_auto_examples_miscellaneous_plot_partial_dependence_visualization_api.py`
and the :ref:`User Guide <visualizations>`.
.. versionadded:: 0.22
Parameters
----------
pd_results : list of Bunch
Results of :func:`~sklearn.inspection.partial_dependence` for
``features``.
features : list of (int,) or list of (int, int)
Indices of features for a given plot. A tuple of one integer will plot
a partial dependence curve of one feature. A tuple of two integers will
plot a two-way partial dependence curve as a contour plot.
feature_names : list of str
Feature names corresponding to the indices in ``features``.
target_idx : int
- In a multiclass setting, specifies the class for which the PDPs
should be computed. Note that for binary classification, the
positive class (index 1) is always used.
- In a multioutput setting, specifies the task for which the PDPs
should be computed.
Ignored in binary classification or classical regression settings.
pdp_lim : dict
Global min and max average predictions, such that all plots will have
the same scale and y limits. `pdp_lim[1]` is the global min and max for
single partial dependence curves. `pdp_lim[2]` is the global min and
max for two-way partial dependence curves.
deciles : dict
Deciles for feature indices in ``features``.
kind : {'average', 'individual', 'both'}, default='average'
Whether to plot the partial dependence averaged across all the samples
in the dataset or one line per sample or both.
- ``kind='average'`` results in the traditional PD plot;
- ``kind='individual'`` results in the ICE plot.
Note that the fast ``method='recursion'`` option is only available for
``kind='average'``. Plotting individual dependencies requires using the
slower ``method='brute'`` option.
.. versionadded:: 0.24
subsample : float, int or None, default=1000
Sampling for ICE curves when `kind` is 'individual' or 'both'.
If float, should be between 0.0 and 1.0 and represent the proportion
of the dataset to be used to plot ICE curves. If int, represents the
maximum absolute number of samples to use.
Note that the full dataset is still used to calculate partial
dependence when `kind='both'`.
.. versionadded:: 0.24
Attributes
----------
bounding_ax_ : matplotlib Axes or None
If `ax` is an axes or None, the `bounding_ax_` is the axes where the
grid of partial dependence plots are drawn. If `ax` is a list of axes
or a numpy array of axes, `bounding_ax_` is None.
axes_ : ndarray of matplotlib Axes
If `ax` is an axes or None, `axes_[i, j]` is the axes on the i-th row
and j-th column. If `ax` is a list of axes, `axes_[i]` is the i-th item
in `ax`. Elements that are None correspond to a nonexisting axes in
that position.
lines_ : ndarray of matplotlib Artists
If `ax` is an axes or None, `lines_[i, j]` is the partial dependence
curve on the i-th row and j-th column. If `ax` is a list of axes,
`lines_[i]` is the partial dependence curve corresponding to the i-th
item in `ax`. Elements that are None correspond to a nonexisting axes
or an axes that does not include a line plot.
deciles_vlines_ : ndarray of matplotlib LineCollection
If `ax` is an axes or None, `vlines_[i, j]` is the line collection
representing the x axis deciles of the i-th row and j-th column. If
`ax` is a list of axes, `vlines_[i]` corresponds to the i-th item in
`ax`. Elements that are None correspond to a nonexisting axes or an
axes that does not include a PDP plot.
.. versionadded:: 0.23
deciles_hlines_ : ndarray of matplotlib LineCollection
If `ax` is an axes or None, `vlines_[i, j]` is the line collection
representing the y axis deciles of the i-th row and j-th column. If
`ax` is a list of axes, `vlines_[i]` corresponds to the i-th item in
`ax`. Elements that are None correspond to a nonexisting axes or an
axes that does not include a 2-way plot.
.. versionadded:: 0.23
contours_ : ndarray of matplotlib Artists
If `ax` is an axes or None, `contours_[i, j]` is the partial dependence
plot on the i-th row and j-th column. If `ax` is a list of axes,
`contours_[i]` is the partial dependence plot corresponding to the i-th
item in `ax`. Elements that are None correspond to a nonexisting axes
or an axes that does not include a contour plot.
figure_ : matplotlib Figure
Figure containing partial dependence plots.
"""
@_deprecate_positional_args
def __init__(self, pd_results, *, features, feature_names, target_idx,
pdp_lim, deciles, kind='average', subsample=1000):
self.pd_results = pd_results
self.features = features
self.feature_names = feature_names
self.target_idx = target_idx
self.pdp_lim = pdp_lim
self.deciles = deciles
self.kind = kind
self.subsample = subsample
def _get_sample_count(self, n_samples):
if isinstance(self.subsample, numbers.Integral):
if self.subsample < n_samples:
return self.subsample
return n_samples
elif isinstance(self.subsample, numbers.Real):
return ceil(n_samples * self.subsample)
return n_samples
def plot(self, ax=None, n_cols=3, line_kw=None, contour_kw=None):
"""Plot partial dependence plots.
Parameters
----------
ax : Matplotlib axes or array-like of Matplotlib axes, default=None
- If a single axis is passed in, it is treated as a bounding axes
and a grid of partial dependence plots will be drawn within
these bounds. The `n_cols` parameter controls the number of
columns in the grid.
- If an array-like of axes are passed in, the partial dependence
plots will be drawn directly into these axes.
- If `None`, a figure and a bounding axes is created and treated
as the single axes case.
n_cols : int, default=3
The maximum number of columns in the grid plot. Only active when
`ax` is a single axes or `None`.
line_kw : dict, default=None
Dict with keywords passed to the `matplotlib.pyplot.plot` call.
For one-way partial dependence plots.
contour_kw : dict, default=None
Dict with keywords passed to the `matplotlib.pyplot.contourf`
call for two-way partial dependence plots.
Returns
-------
display: :class:`~sklearn.inspection.PartialDependenceDisplay`
"""
check_matplotlib_support("plot_partial_dependence")
import matplotlib.pyplot as plt # noqa
from matplotlib import transforms # noqa
from matplotlib.ticker import MaxNLocator # noqa
from matplotlib.ticker import ScalarFormatter # noqa
from matplotlib.gridspec import GridSpecFromSubplotSpec # noqa
if line_kw is None:
line_kw = {}
if contour_kw is None:
contour_kw = {}
if ax is None:
_, ax = plt.subplots()
default_contour_kws = {"alpha": 0.75}
contour_kw = {**default_contour_kws, **contour_kw}
default_line_kws = {'color': 'C0'}
line_kw = {**default_line_kws, **line_kw}
individual_line_kw = line_kw.copy()
if self.kind == 'individual' or self.kind == 'both':
individual_line_kw['alpha'] = 0.3
individual_line_kw['linewidth'] = 0.5
n_features = len(self.features)
n_sampled = 1
if self.kind == 'individual':
n_instances = len(self.pd_results[0].individual[0])
n_sampled = self._get_sample_count(n_instances)
elif self.kind == 'both':
n_instances = len(self.pd_results[0].individual[0])
n_sampled = self._get_sample_count(n_instances) + 1
if isinstance(ax, plt.Axes):
# If ax was set off, it has most likely been set to off
# by a previous call to plot.
if not ax.axison:
raise ValueError("The ax was already used in another plot "
"function, please set ax=display.axes_ "
"instead")
ax.set_axis_off()
self.bounding_ax_ = ax
self.figure_ = ax.figure
n_cols = min(n_cols, n_features)
n_rows = int(np.ceil(n_features / float(n_cols)))
self.axes_ = np.empty((n_rows, n_cols), dtype=object)
if self.kind == 'average':
self.lines_ = np.empty((n_rows, n_cols), dtype=object)
else:
self.lines_ = np.empty((n_rows, n_cols, n_sampled),
dtype=object)
self.contours_ = np.empty((n_rows, n_cols), dtype=object)
axes_ravel = self.axes_.ravel()
gs = GridSpecFromSubplotSpec(n_rows, n_cols,
subplot_spec=ax.get_subplotspec())
for i, spec in zip(range(n_features), gs):
axes_ravel[i] = self.figure_.add_subplot(spec)
else: # array-like
ax = np.asarray(ax, dtype=object)
if ax.size != n_features:
raise ValueError("Expected ax to have {} axes, got {}"
.format(n_features, ax.size))
if ax.ndim == 2:
n_cols = ax.shape[1]
else:
n_cols = None
self.bounding_ax_ = None
self.figure_ = ax.ravel()[0].figure
self.axes_ = ax
if self.kind == 'average':
self.lines_ = np.empty_like(ax, dtype=object)
else:
self.lines_ = np.empty(ax.shape + (n_sampled,),
dtype=object)
self.contours_ = np.empty_like(ax, dtype=object)
# create contour levels for two-way plots
if 2 in self.pdp_lim:
Z_level = np.linspace(*self.pdp_lim[2], num=8)
self.deciles_vlines_ = np.empty_like(self.axes_, dtype=object)
self.deciles_hlines_ = np.empty_like(self.axes_, dtype=object)
# Create 1d views of these 2d arrays for easy indexing
lines_ravel = self.lines_.ravel(order='C')
contours_ravel = self.contours_.ravel(order='C')
vlines_ravel = self.deciles_vlines_.ravel(order='C')
hlines_ravel = self.deciles_hlines_.ravel(order='C')
for i, axi, fx, pd_result in zip(count(), self.axes_.ravel(),
self.features, self.pd_results):
avg_preds = None
preds = None
values = pd_result["values"]
if self.kind == 'individual':
preds = pd_result.individual
elif self.kind == 'average':
avg_preds = pd_result.average
else: # kind='both'
avg_preds = pd_result.average
preds = pd_result.individual
if len(values) == 1:
if self.kind == 'individual' or self.kind == 'both':
n_samples = self._get_sample_count(
len(preds[self.target_idx])
)
ice_lines = preds[self.target_idx]
sampled = ice_lines[np.random.choice(
ice_lines.shape[0], n_samples, replace=False
), :]
for j, ins in enumerate(sampled):
lines_ravel[i * j + j] = axi.plot(
values[0], ins.ravel(), **individual_line_kw
)[0]
if self.kind == 'average':
lines_ravel[i] = axi.plot(
values[0], avg_preds[self.target_idx].ravel(),
**line_kw
)[0]
elif self.kind == 'both':
lines_ravel[i] = axi.plot(
values[0], avg_preds[self.target_idx].ravel(),
label='average', **line_kw
)[0]
axi.legend()
else:
# contour plot
XX, YY = np.meshgrid(values[0], values[1])
Z = avg_preds[self.target_idx].T
CS = axi.contour(XX, YY, Z, levels=Z_level, linewidths=0.5,
colors='k')
contours_ravel[i] = axi.contourf(XX, YY, Z, levels=Z_level,
vmax=Z_level[-1],
vmin=Z_level[0],
**contour_kw)
axi.clabel(CS, fmt='%2.2f', colors='k', fontsize=10,
inline=True)
trans = transforms.blended_transform_factory(axi.transData,
axi.transAxes)
ylim = axi.get_ylim()
vlines_ravel[i] = axi.vlines(self.deciles[fx[0]], 0, 0.05,
transform=trans, color='k')
axi.set_ylim(ylim)
# Set xlabel if it is not already set
if not axi.get_xlabel():
axi.set_xlabel(self.feature_names[fx[0]])
if len(values) == 1:
if n_cols is None or i % n_cols == 0:
axi.set_ylabel('Partial dependence')
else:
axi.set_yticklabels([])
axi.set_ylim(self.pdp_lim[1])
else:
# contour plot
trans = transforms.blended_transform_factory(axi.transAxes,
axi.transData)
xlim = axi.get_xlim()
hlines_ravel[i] = axi.hlines(self.deciles[fx[1]], 0, 0.05,
transform=trans, color='k')
# hline erases xlim
axi.set_ylabel(self.feature_names[fx[1]])
axi.set_xlim(xlim)
return self
| {
"content_hash": "a65f9b8d6c1176f43cf54e44bca58bc6",
"timestamp": "",
"source": "github",
"line_count": 730,
"max_line_length": 92,
"avg_line_length": 43.798630136986304,
"alnum_prop": 0.5883088856222438,
"repo_name": "bnaul/scikit-learn",
"id": "504a7a5a334ad36f23b7203cb73283a1615d9de1",
"size": "31973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sklearn/inspection/_plot/partial_dependence.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "451996"
},
{
"name": "C++",
"bytes": "140322"
},
{
"name": "Makefile",
"bytes": "1512"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "7229182"
},
{
"name": "Shell",
"bytes": "19938"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from collections import namedtuple, defaultdict
import copy
from types import MethodType
import numpy
from llvmlite import ir as llvmir
import llvmlite.llvmpy.core as lc
from llvmlite.llvmpy.core import Type, Constant, LLVMException
import llvmlite.binding as ll
from numba import types, utils, cgutils, typing
from numba import _dynfunc, _helperlib
from numba.pythonapi import PythonAPI
from numba.targets.imputils import (user_function, user_generator,
builtin_registry, impl_attribute,
impl_ret_borrowed)
from . import (
arrayobj, builtins, iterators, rangeobj, optional, slicing, tupleobj)
from numba import datamodel
try:
from . import npdatetime
except NotImplementedError:
pass
GENERIC_POINTER = Type.pointer(Type.int(8))
PYOBJECT = GENERIC_POINTER
void_ptr = GENERIC_POINTER
LTYPEMAP = {
types.pyobject: PYOBJECT,
types.boolean: Type.int(8),
types.uint8: Type.int(8),
types.uint16: Type.int(16),
types.uint32: Type.int(32),
types.uint64: Type.int(64),
types.int8: Type.int(8),
types.int16: Type.int(16),
types.int32: Type.int(32),
types.int64: Type.int(64),
types.float32: Type.float(),
types.float64: Type.double(),
}
STRUCT_TYPES = {
types.complex64: builtins.Complex64,
types.complex128: builtins.Complex128,
types.slice3_type: slicing.Slice,
}
class Overloads(object):
def __init__(self):
# A list of (signature, implementation)
self.versions = []
def find(self, sig):
for ver_sig, impl in self.versions:
if ver_sig == sig:
return impl
# As generic type
if self._match_arglist(ver_sig.args, sig.args):
return impl
raise NotImplementedError(self, sig)
def _match_arglist(self, formal_args, actual_args):
if formal_args and isinstance(formal_args[-1], types.VarArg):
formal_args = (
formal_args[:-1] +
(formal_args[-1].dtype,) * (len(actual_args) - len(formal_args) + 1))
if len(formal_args) != len(actual_args):
return False
for formal, actual in zip(formal_args, actual_args):
if not self._match(formal, actual):
return False
return True
def _match(self, formal, actual):
if formal == actual:
# formal argument matches actual arguments
return True
elif types.Any == formal:
# formal argument is any
return True
elif (isinstance(formal, types.Kind) and
isinstance(actual, formal.of)):
# formal argument is a kind and the actual argument
# is of that kind
return True
def append(self, impl, sig):
self.versions.append((sig, impl))
@utils.runonce
def _load_global_helpers():
"""
Execute once to install special symbols into the LLVM symbol table.
"""
ll.add_symbol("Py_None", id(None))
# Add C helper functions
for c_helpers in (_helperlib.c_helpers, _dynfunc.c_helpers):
for py_name in c_helpers:
c_name = "numba_" + py_name
c_address = c_helpers[py_name]
ll.add_symbol(c_name, c_address)
# Add all built-in exception classes
for obj in utils.builtins.__dict__.values():
if isinstance(obj, type) and issubclass(obj, BaseException):
ll.add_symbol("PyExc_%s" % (obj.__name__), id(obj))
class BaseContext(object):
"""
Notes on Structure
------------------
Most objects are lowered as plain-old-data structure in the generated
llvm. They are passed around by reference (a pointer to the structure).
Only POD structure can life across function boundaries by copying the
data.
"""
# True if the target requires strict alignment
# Causes exception to be raised if the record members are not aligned.
strict_alignment = False
# Use default mangler (no specific requirement)
mangler = None
# Force powi implementation as math.pow call
implement_powi_as_math_call = False
implement_pow_as_math_call = False
# Bound checking
enable_boundcheck = False
# NRT
enable_nrt = False
# PYCC
aot_mode = False
# Error model for various operations (only FP exceptions currently)
error_model = None
def __init__(self, typing_context):
_load_global_helpers()
self.address_size = utils.MACHINE_BITS
self.typing_context = typing_context
self.defns = defaultdict(Overloads)
self.attrs = defaultdict(Overloads)
self.generators = {}
self.special_ops = {}
self.install_registry(builtin_registry)
self.cached_internal_func = {}
self.data_model_manager = datamodel.default_manager
# Initialize
self.init()
def init(self):
"""
For subclasses to add initializer
"""
pass
def get_arg_packer(self, fe_args):
return datamodel.ArgPacker(self.data_model_manager, fe_args)
@property
def target_data(self):
raise NotImplementedError
def subtarget(self, **kws):
obj = copy.copy(self) # shallow copy
for k, v in kws.items():
if not hasattr(obj, k):
raise NameError("unknown option {0!r}".format(k))
setattr(obj, k, v)
return obj
def install_registry(self, registry):
"""
Install a *registry* (a imputils.Registry instance) of function
and attribute implementations.
"""
self.insert_func_defn(registry.functions)
self.insert_attr_defn(registry.attributes)
def insert_func_defn(self, defns):
for impl, func_sigs in defns:
for func, sig in func_sigs:
self.defns[func].append(impl, sig)
def insert_attr_defn(self, defns):
for impl in defns:
self.attrs[impl.attr].append(impl, impl.signature)
def insert_user_function(self, func, fndesc, libs=()):
impl = user_function(fndesc, libs)
self.defns[func].append(impl, impl.signature)
def add_user_function(self, func, fndesc, libs=()):
if func not in self.defns:
msg = "{func} is not a registered user function"
raise KeyError(msg.format(func=func))
impl = user_function(fndesc, libs)
self.defns[func].append(impl, impl.signature)
def insert_generator(self, genty, gendesc, libs=()):
assert isinstance(genty, types.Generator)
impl = user_generator(gendesc, libs)
self.generators[genty] = gendesc, impl
def remove_user_function(self, func):
"""
Remove user function *func*.
KeyError is raised if the function isn't known to us.
"""
del self.defns[func]
def get_external_function_type(self, fndesc):
argtypes = [self.get_argument_type(aty)
for aty in fndesc.argtypes]
# don't wrap in pointer
restype = self.get_argument_type(fndesc.restype)
fnty = Type.function(restype, argtypes)
return fnty
def declare_function(self, module, fndesc):
fnty = self.call_conv.get_function_type(fndesc.restype, fndesc.argtypes)
fn = module.get_or_insert_function(fnty, name=fndesc.mangled_name)
assert fn.is_declaration
self.call_conv.decorate_function(fn, fndesc.args, fndesc.argtypes)
if fndesc.inline:
fn.attributes.add('alwaysinline')
return fn
def declare_external_function(self, module, fndesc):
fnty = self.get_external_function_type(fndesc)
fn = module.get_or_insert_function(fnty, name=fndesc.mangled_name)
assert fn.is_declaration
for ak, av in zip(fndesc.args, fn.args):
av.name = "arg.%s" % ak
return fn
def insert_const_string(self, mod, string):
"""
Insert constant *string* (a str object) into module *mod*.
"""
stringtype = GENERIC_POINTER
name = ".const.%s" % string
text = cgutils.make_bytearray(string.encode("utf-8") + b"\x00")
gv = self.insert_unique_const(mod, name, text)
return Constant.bitcast(gv, stringtype)
def insert_unique_const(self, mod, name, val):
"""
Insert a unique internal constant named *name*, with LLVM value
*val*, into module *mod*.
"""
gv = mod.get_global(name)
if gv is not None:
return gv
else:
return cgutils.global_constant(mod, name, val)
def get_argument_type(self, ty):
return self.data_model_manager[ty].get_argument_type()
def get_return_type(self, ty):
return self.data_model_manager[ty].get_return_type()
def get_data_type(self, ty):
"""
Get a LLVM data representation of the Numba type *ty* that is safe
for storage. Record data are stored as byte array.
The return value is a llvmlite.ir.Type object, or None if the type
is an opaque pointer (???).
"""
return self.data_model_manager[ty].get_data_type()
def get_value_type(self, ty):
return self.data_model_manager[ty].get_value_type()
def pack_value(self, builder, ty, value, ptr, align=None):
"""
Pack value into the array storage at *ptr*.
If *align* is given, it is the guaranteed alignment for *ptr*
(by default, the standard ABI alignment).
"""
dataval = self.data_model_manager[ty].as_data(builder, value)
builder.store(dataval, ptr, align=align)
def unpack_value(self, builder, ty, ptr, align=None):
"""
Unpack value from the array storage at *ptr*.
If *align* is given, it is the guaranteed alignment for *ptr*
(by default, the standard ABI alignment).
"""
dm = self.data_model_manager[ty]
return dm.load_from_data_pointer(builder, ptr, align)
def is_struct_type(self, ty):
return isinstance(self.data_model_manager[ty], datamodel.CompositeModel)
def get_constant_generic(self, builder, ty, val):
"""
Return a LLVM constant representing value *val* of Numba type *ty*.
"""
if self.is_struct_type(ty):
struct = self.get_constant_struct(builder, ty, val)
if isinstance(ty, types.Record):
ptrty = self.data_model_manager[ty].get_data_type()
ptr = cgutils.alloca_once(builder, ptrty)
builder.store(struct, ptr)
return ptr
return struct
elif isinstance(ty, types.ExternalFunctionPointer):
ptrty = self.get_function_pointer_type(ty)
ptrval = ty.get_pointer(val)
return builder.inttoptr(self.get_constant(types.intp, ptrval),
ptrty)
else:
return self.get_constant(ty, val)
def get_constant_struct(self, builder, ty, val):
assert self.is_struct_type(ty)
if ty in types.complex_domain:
if ty == types.complex64:
innertype = types.float32
elif ty == types.complex128:
innertype = types.float64
else:
raise Exception("unreachable")
real = self.get_constant(innertype, val.real)
imag = self.get_constant(innertype, val.imag)
const = Constant.struct([real, imag])
return const
elif isinstance(ty, (types.Tuple, types.NamedTuple)):
consts = [self.get_constant_generic(builder, ty.types[i], v)
for i, v in enumerate(val)]
return Constant.struct(consts)
elif isinstance(ty, types.Record):
consts = [self.get_constant(types.int8, b)
for b in bytearray(val.tostring())]
return Constant.array(consts[0].type, consts)
else:
raise NotImplementedError("%s as constant unsupported" % ty)
def get_constant(self, ty, val):
assert not self.is_struct_type(ty)
lty = self.get_value_type(ty)
if ty == types.none:
assert val is None
return self.get_dummy_value()
elif ty == types.boolean:
return Constant.int(Type.int(1), int(val))
elif ty in types.signed_domain:
return Constant.int_signextend(lty, val)
elif ty in types.unsigned_domain:
return Constant.int(lty, val)
elif ty in types.real_domain:
return Constant.real(lty, val)
elif isinstance(ty, (types.NPDatetime, types.NPTimedelta)):
return Constant.real(lty, val.astype(numpy.int64))
elif isinstance(ty, (types.UniTuple, types.NamedUniTuple)):
consts = [self.get_constant(ty.dtype, v) for v in val]
return Constant.array(consts[0].type, consts)
raise NotImplementedError("cannot lower constant of type '%s'" % (ty,))
def get_constant_undef(self, ty):
lty = self.get_value_type(ty)
return Constant.undef(lty)
def get_constant_null(self, ty):
lty = self.get_value_type(ty)
return Constant.null(lty)
def get_setattr(self, attr, sig):
typ = sig.args[0]
if isinstance(typ, types.Record):
self.sentry_record_alignment(typ, attr)
offset = typ.offset(attr)
elemty = typ.typeof(attr)
def imp(context, builder, sig, args):
valty = sig.args[1]
[target, val] = args
dptr = cgutils.get_record_member(builder, target, offset,
self.get_data_type(elemty))
val = context.cast(builder, val, valty, elemty)
align = None if typ.aligned else 1
self.pack_value(builder, elemty, val, dptr, align=align)
return _wrap_impl(imp, self, sig)
def get_function(self, fn, sig):
"""
Return the implementation of function *fn* for signature *sig*.
The return value is a callable with the signature (builder, args).
"""
if isinstance(fn, (types.Function)):
key = fn.template.key
if isinstance(key, MethodType):
overloads = self.defns[key.im_func]
elif sig.recvr:
sig = typing.signature(sig.return_type,
*((sig.recvr,) + sig.args))
overloads = self.defns[key]
else:
overloads = self.defns[key]
elif isinstance(fn, types.Dispatcher):
key = fn.overloaded.get_overload(sig.args)
overloads = self.defns[key]
else:
key = fn
overloads = self.defns[key]
try:
return _wrap_impl(overloads.find(sig), self, sig)
except NotImplementedError:
pass
if isinstance(fn, types.Type):
# It's a type instance => try to find a definition for the type class
return self.get_function(type(fn), sig)
raise NotImplementedError("No definition for lowering %s%s" % (key, sig))
def get_generator_desc(self, genty):
"""
"""
return self.generators[genty][0]
def get_generator_impl(self, genty):
"""
"""
return self.generators[genty][1]
def get_bound_function(self, builder, obj, ty):
return obj
def get_attribute(self, val, typ, attr):
if isinstance(typ, types.Record):
# Implement get attribute for records
self.sentry_record_alignment(typ, attr)
offset = typ.offset(attr)
elemty = typ.typeof(attr)
if isinstance(elemty, types.NestedArray):
# Inside a structured type only the array data is stored, so we
# create an array structure to point to that data.
aryty = arrayobj.make_array(elemty)
@impl_attribute(typ, attr, elemty)
def imp(context, builder, typ, val):
ary = aryty(context, builder)
dtype = elemty.dtype
newshape = [self.get_constant(types.intp, s) for s in
elemty.shape]
newstrides = [self.get_constant(types.intp, s) for s in
elemty.strides]
newdata = cgutils.get_record_member(builder, val, offset,
self.get_data_type(dtype))
arrayobj.populate_array(
ary,
data=newdata,
shape=cgutils.pack_array(builder, newshape),
strides=cgutils.pack_array(builder, newstrides),
itemsize=context.get_constant(types.intp, elemty.size),
meminfo=None,
parent=None,
)
res = ary._getvalue()
return impl_ret_borrowed(context, builder, typ, res)
else:
@impl_attribute(typ, attr, elemty)
def imp(context, builder, typ, val):
dptr = cgutils.get_record_member(builder, val, offset,
context.get_data_type(elemty))
align = None if typ.aligned else 1
res = self.unpack_value(builder, elemty, dptr, align)
return impl_ret_borrowed(context, builder, typ, res)
return imp
if isinstance(typ, types.Module):
# Implement getattr for module-level globals.
# We are treating them as constants.
# XXX We shouldn't have to retype this
attrty = self.typing_context.resolve_module_constants(typ, attr)
if attrty is not None and not isinstance(attrty, types.Dummy):
pyval = getattr(typ.pymod, attr)
llval = self.get_constant(attrty, pyval)
@impl_attribute(typ, attr, attrty)
def imp(context, builder, typ, val):
return impl_ret_borrowed(context, builder, attrty, llval)
return imp
# No implementation required for dummies (functions, modules...),
# which are dealt with later
return None
# Lookup specific attribute implementation for this type
overloads = self.attrs[attr]
try:
return overloads.find(typing.signature(types.Any, typ))
except NotImplementedError:
pass
# Lookup generic getattr implementation for this type
overloads = self.attrs[None]
try:
return overloads.find(typing.signature(types.Any, typ))
except NotImplementedError:
raise Exception("No definition for lowering %s.%s" % (typ, attr))
def get_argument_value(self, builder, ty, val):
"""
Argument representation to local value representation
"""
return self.data_model_manager[ty].from_argument(builder, val)
def get_returned_value(self, builder, ty, val):
"""
Return value representation to local value representation
"""
return self.data_model_manager[ty].from_return(builder, val)
def get_return_value(self, builder, ty, val):
"""
Local value representation to return type representation
"""
return self.data_model_manager[ty].as_return(builder, val)
def get_value_as_argument(self, builder, ty, val):
"""Prepare local value representation as argument type representation
"""
return self.data_model_manager[ty].as_argument(builder, val)
def get_value_as_data(self, builder, ty, val):
return self.data_model_manager[ty].as_data(builder, val)
def get_data_as_value(self, builder, ty, val):
return self.data_model_manager[ty].from_data(builder, val)
def pair_first(self, builder, val, ty):
"""
Extract the first element of a heterogenous pair.
"""
paircls = self.make_pair(ty.first_type, ty.second_type)
pair = paircls(self, builder, value=val)
return pair.first
def pair_second(self, builder, val, ty):
"""
Extract the second element of a heterogenous pair.
"""
paircls = self.make_pair(ty.first_type, ty.second_type)
pair = paircls(self, builder, value=val)
return pair.second
def cast(self, builder, val, fromty, toty):
if fromty == toty or toty == types.Any or isinstance(toty, types.Kind):
return val
elif isinstance(fromty, types.Integer) and isinstance(toty, types.Integer):
if toty.bitwidth == fromty.bitwidth:
# Just a change of signedness
return val
elif toty.bitwidth < fromty.bitwidth:
# Downcast
return builder.trunc(val, self.get_value_type(toty))
elif fromty.signed:
# Signed upcast
return builder.sext(val, self.get_value_type(toty))
else:
# Unsigned upcast
return builder.zext(val, self.get_value_type(toty))
elif fromty in types.real_domain and toty in types.real_domain:
lty = self.get_value_type(toty)
if fromty == types.float32 and toty == types.float64:
return builder.fpext(val, lty)
elif fromty == types.float64 and toty == types.float32:
return builder.fptrunc(val, lty)
elif fromty in types.real_domain and toty in types.complex_domain:
if fromty == types.float32:
if toty == types.complex128:
real = self.cast(builder, val, fromty, types.float64)
else:
real = val
elif fromty == types.float64:
if toty == types.complex64:
real = self.cast(builder, val, fromty, types.float32)
else:
real = val
if toty == types.complex128:
imag = self.get_constant(types.float64, 0)
elif toty == types.complex64:
imag = self.get_constant(types.float32, 0)
else:
raise Exception("unreachable")
cmplx = self.make_complex(toty)(self, builder)
cmplx.real = real
cmplx.imag = imag
return cmplx._getvalue()
elif fromty in types.integer_domain and toty in types.real_domain:
lty = self.get_value_type(toty)
if fromty in types.signed_domain:
return builder.sitofp(val, lty)
else:
return builder.uitofp(val, lty)
elif toty in types.integer_domain and fromty in types.real_domain:
lty = self.get_value_type(toty)
if toty in types.signed_domain:
return builder.fptosi(val, lty)
else:
return builder.fptoui(val, lty)
elif fromty in types.integer_domain and toty in types.complex_domain:
cmplxcls, flty = builtins.get_complex_info(toty)
cmpl = cmplxcls(self, builder)
cmpl.real = self.cast(builder, val, fromty, flty)
cmpl.imag = self.get_constant(flty, 0)
return cmpl._getvalue()
elif fromty in types.complex_domain and toty in types.complex_domain:
srccls, srcty = builtins.get_complex_info(fromty)
dstcls, dstty = builtins.get_complex_info(toty)
src = srccls(self, builder, value=val)
dst = dstcls(self, builder)
dst.real = self.cast(builder, src.real, srcty, dstty)
dst.imag = self.cast(builder, src.imag, srcty, dstty)
return dst._getvalue()
elif (isinstance(fromty, (types.UniTuple, types.Tuple)) and
isinstance(toty, (types.UniTuple, types.Tuple)) and
len(toty) == len(fromty)):
olditems = cgutils.unpack_tuple(builder, val, len(fromty))
items = [self.cast(builder, i, f, t)
for i, f, t in zip(olditems, fromty, toty)]
return cgutils.make_anonymous_struct(builder, items)
elif toty == types.boolean:
return self.is_true(builder, fromty, val)
elif fromty == types.boolean:
# first promote to int32
asint = builder.zext(val, Type.int())
# then promote to number
return self.cast(builder, asint, types.int32, toty)
elif fromty == types.none and isinstance(toty, types.Optional):
return self.make_optional_none(builder, toty.type)
elif isinstance(toty, types.Optional):
casted = self.cast(builder, val, fromty, toty.type)
return self.make_optional_value(builder, toty.type, casted)
elif isinstance(fromty, types.Optional):
optty = self.make_optional(fromty)
optval = optty(self, builder, value=val)
validbit = cgutils.as_bool_bit(builder, optval.valid)
with cgutils.if_unlikely(builder, builder.not_(validbit)):
msg = "expected %s, got None" % (fromty.type,)
self.call_conv.return_user_exc(builder, TypeError, (msg,))
return optval.data
elif (isinstance(fromty, types.Array) and
isinstance(toty, types.Array)):
# Type inference should have prevented illegal array casting.
assert toty.layout == 'A'
return val
elif (isinstance(fromty, types.List) and
isinstance(toty, types.List)):
# Casting from non-reflected to reflected
assert fromty.dtype == toty.dtype
return val
elif (isinstance(fromty, types.RangeType) and
isinstance(toty, types.RangeType)):
olditems = cgutils.unpack_tuple(builder, val, 3)
items = [self.cast(builder, v, fromty.dtype, toty.dtype)
for v in olditems]
return cgutils.make_anonymous_struct(builder, items)
elif fromty in types.integer_domain and toty == types.voidptr:
return builder.inttoptr(val, self.get_value_type(toty))
raise NotImplementedError("cast", val, fromty, toty)
def generic_compare(self, builder, key, argtypes, args):
"""
Compare the given LLVM values of the given Numba types using
the comparison *key* (e.g. '=='). The values are first cast to
a common safe conversion type.
"""
at, bt = argtypes
av, bv = args
ty = self.typing_context.unify_types(at, bt)
cav = self.cast(builder, av, at, ty)
cbv = self.cast(builder, bv, bt, ty)
cmpsig = typing.signature(types.boolean, ty, ty)
cmpfunc = self.get_function(key, cmpsig)
return cmpfunc(builder, (cav, cbv))
def make_optional(self, optionaltype):
return optional.make_optional(optionaltype.type)
def make_optional_none(self, builder, valtype):
optcls = optional.make_optional(valtype)
optval = optcls(self, builder)
optval.valid = cgutils.false_bit
return optval._getvalue()
def make_optional_value(self, builder, valtype, value):
optcls = optional.make_optional(valtype)
optval = optcls(self, builder)
optval.valid = cgutils.true_bit
optval.data = value
return optval._getvalue()
def is_true(self, builder, typ, val):
"""
Return the truth value of a value of the given Numba type.
"""
impl = self.get_function(bool, typing.signature(types.boolean, typ))
return impl(builder, (val,))
def get_c_value(self, builder, typ, name):
"""
Get a global value through its C-accessible *name*, with the given
LLVM type.
"""
module = builder.function.module
try:
gv = module.get_global_variable_named(name)
except LLVMException:
gv = module.add_global_variable(typ, name)
return gv
def call_external_function(self, builder, callee, argtys, args):
args = [self.get_value_as_argument(builder, ty, arg)
for ty, arg in zip(argtys, args)]
retval = builder.call(callee, args)
return retval
def get_function_pointer_type(self, typ):
return self.data_model_manager[typ].get_data_type()
def call_function_pointer(self, builder, funcptr, args, cconv=None):
return builder.call(funcptr, args, cconv=cconv)
def print_string(self, builder, text):
mod = builder.basic_block.function.module
cstring = GENERIC_POINTER
fnty = Type.function(Type.int(), [cstring])
puts = mod.get_or_insert_function(fnty, "puts")
return builder.call(puts, [text])
def debug_print(self, builder, text):
mod = builder.module
cstr = self.insert_const_string(mod, str(text))
self.print_string(builder, cstr)
def get_struct_type(self, struct):
"""
Get the LLVM struct type for the given Structure class *struct*.
"""
fields = [self.get_value_type(v) for _, v in struct._fields]
return Type.struct(fields)
def get_dummy_value(self):
return Constant.null(self.get_dummy_type())
def get_dummy_type(self):
return GENERIC_POINTER
def compile_only_no_cache(self, builder, impl, sig, locals={}):
"""Invoke the compiler to compile a function to be used inside a
nopython function, but without generating code to call that
function.
"""
# Compile
from numba import compiler
codegen = self.codegen()
library = codegen.create_library(impl.__name__)
flags = compiler.Flags()
flags.set('no_compile')
flags.set('no_cpython_wrapper')
cres = compiler.compile_internal(self.typing_context, self,
library,
impl, sig.args,
sig.return_type, flags,
locals=locals)
# Allow inlining the function inside callers.
codegen.add_linking_library(cres.library)
return cres
def compile_subroutine(self, builder, impl, sig, locals={}):
"""
Compile the function *impl* for the given *sig* (in nopython mode).
Return a placeholder object that's callable from another Numba
function.
"""
cache_key = (impl.__code__, sig)
if impl.__closure__:
# XXX This obviously won't work if a cell's value is
# unhashable.
cache_key += tuple(c.cell_contents for c in impl.__closure__)
ty = self.cached_internal_func.get(cache_key)
if ty is None:
cres = self.compile_only_no_cache(builder, impl, sig,
locals=locals)
ty = types.NumbaFunction(cres.fndesc, sig)
self.cached_internal_func[cache_key] = ty
return ty
def compile_internal(self, builder, impl, sig, args, locals={}):
"""
Like compile_subroutine(), but also call the function with the given
*args*.
"""
ty = self.compile_subroutine(builder, impl, sig, locals)
return self.call_internal(builder, ty.fndesc, sig, args)
def call_internal(self, builder, fndesc, sig, args):
"""Given the function descriptor of an internally compiled function,
emit a call to that function with the given arguments.
"""
# Add call to the generated function
llvm_mod = builder.module
fn = self.declare_function(llvm_mod, fndesc)
status, res = self.call_conv.call_function(builder, fn, sig.return_type,
sig.args, args)
with cgutils.if_unlikely(builder, status.is_error):
self.call_conv.return_status_propagate(builder, status)
return res
def get_executable(self, func, fndesc):
raise NotImplementedError
def get_python_api(self, builder):
return PythonAPI(self, builder)
def sentry_record_alignment(self, rectyp, attr):
"""
Assumes offset starts from a properly aligned location
"""
if self.strict_alignment:
offset = rectyp.offset(attr)
elemty = rectyp.typeof(attr)
align = self.get_abi_alignment(self.get_data_type(elemty))
if offset % align:
msg = "{rec}.{attr} of type {type} is not aligned".format(
rec=rectyp, attr=attr, type=elemty)
raise TypeError(msg)
def make_array(self, typ):
return arrayobj.make_array(typ)
def populate_array(self, arr, **kwargs):
"""
Populate array structure.
"""
return arrayobj.populate_array(arr, **kwargs)
def make_complex(self, typ):
cls, _ = builtins.get_complex_info(typ)
return cls
def make_pair(self, first_type, second_type):
"""
Create a heterogenous pair class parametered for the given types.
"""
return builtins.make_pair(first_type, second_type)
def make_tuple(self, builder, typ, values):
"""
Create a tuple of the given *typ* containing the *values*.
"""
tup = self.get_constant_undef(typ)
for i, val in enumerate(values):
tup = builder.insert_value(tup, val, i)
return tup
def make_constant_array(self, builder, typ, ary):
assert typ.layout == 'C' # assumed in typeinfer.py
ary = numpy.ascontiguousarray(ary)
flat = ary.flatten()
# Handle data
if self.is_struct_type(typ.dtype):
values = [self.get_constant_struct(builder, typ.dtype, flat[i])
for i in range(flat.size)]
else:
values = [self.get_constant(typ.dtype, flat[i])
for i in range(flat.size)]
lldtype = values[0].type
consts = Constant.array(lldtype, values)
data = cgutils.global_constant(builder, ".const.array.data", consts)
# Handle shape
llintp = self.get_value_type(types.intp)
shapevals = [self.get_constant(types.intp, s) for s in ary.shape]
cshape = Constant.array(llintp, shapevals)
# Handle strides
stridevals = [self.get_constant(types.intp, s) for s in ary.strides]
cstrides = Constant.array(llintp, stridevals)
# Create array structure
cary = self.make_array(typ)(self, builder)
rt_addr = self.get_constant(types.uintp, id(ary)).inttoptr(
self.get_value_type(types.pyobject))
intp_itemsize = self.get_constant(types.intp, ary.dtype.itemsize)
self.populate_array(cary,
data=builder.bitcast(data, cary.data.type),
shape=cshape,
strides=cstrides,
itemsize=intp_itemsize,
parent=rt_addr,
meminfo=None)
return cary._getvalue()
def get_abi_sizeof(self, ty):
"""
Get the ABI size of LLVM type *ty*.
"""
if isinstance(ty, llvmir.Type):
return ty.get_abi_size(self.target_data)
# XXX this one unused?
return self.target_data.get_abi_size(ty)
def get_abi_alignment(self, ty):
"""
Get the ABI alignment of LLVM type *ty*.
"""
assert isinstance(ty, llvmir.Type), "Expected LLVM type"
return ty.get_abi_alignment(self.target_data)
def post_lowering(self, mod, library):
"""Run target specific post-lowering transformation here.
"""
def create_module(self, name):
"""Create a LLVM module
"""
return lc.Module.new(name)
def nrt_meminfo_alloc(self, builder, size):
"""
Allocate a new MemInfo with a data payload of `size` bytes.
A pointer to the MemInfo is returned.
"""
if not self.enable_nrt:
raise Exception("Require NRT")
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_alloc_safe")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size])
def nrt_meminfo_alloc_aligned(self, builder, size, align):
"""
Allocate a new MemInfo with an aligned data payload of `size` bytes.
The data pointer is aligned to `align` bytes. `align` can be either
a Python int or a LLVM uint32 value.
A pointer to the MemInfo is returned.
"""
if not self.enable_nrt:
raise Exception("Require NRT")
mod = builder.module
intp = self.get_value_type(types.intp)
u32 = self.get_value_type(types.uint32)
fnty = llvmir.FunctionType(void_ptr, [intp, u32])
fn = mod.get_or_insert_function(fnty,
name="NRT_MemInfo_alloc_safe_aligned")
fn.return_value.add_attribute("noalias")
if isinstance(align, int):
align = self.get_constant(types.uint32, align)
else:
assert align.type == u32, "align must be a uint32"
return builder.call(fn, [size, align])
def nrt_meminfo_varsize_alloc(self, builder, size):
"""
Allocate a MemInfo pointing to a variable-sized data area. The area
is separately allocated (i.e. two allocations are made) so that
re-allocating it doesn't change the MemInfo's address.
A pointer to the MemInfo is returned.
"""
if not self.enable_nrt:
raise Exception("Require NRT")
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_varsize_alloc")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [size])
def nrt_meminfo_varsize_realloc(self, builder, meminfo, size):
"""
Reallocate a data area allocated by nrt_meminfo_varsize_alloc().
The new data pointer is returned, for convenience.
"""
if not self.enable_nrt:
raise Exception("Require NRT")
mod = builder.module
fnty = llvmir.FunctionType(void_ptr,
[void_ptr, self.get_value_type(types.intp)])
fn = mod.get_or_insert_function(fnty, name="NRT_MemInfo_varsize_realloc")
fn.return_value.add_attribute("noalias")
return builder.call(fn, [meminfo, size])
def nrt_meminfo_data(self, builder, meminfo):
"""
Given a MemInfo pointer, return a pointer to the allocated data
managed by it. This works for MemInfos allocated with all the
above methods.
"""
if not self.enable_nrt:
raise Exception("Require NRT")
from numba.runtime.atomicops import meminfo_data_ty
mod = builder.module
fn = mod.get_or_insert_function(meminfo_data_ty, name="NRT_MemInfo_data")
return builder.call(fn, [meminfo])
def _call_nrt_incref_decref(self, builder, root_type, typ, value, funcname):
if not self.enable_nrt:
raise Exception("Require NRT")
from numba.runtime.atomicops import incref_decref_ty
data_model = self.data_model_manager[typ]
members = data_model.traverse(builder, value)
for mt, mv in members:
self._call_nrt_incref_decref(builder, root_type, mt, mv, funcname)
try:
meminfo = data_model.get_nrt_meminfo(builder, value)
except NotImplementedError as e:
raise NotImplementedError("%s: %s" % (root_type, str(e)))
if meminfo:
mod = builder.module
fn = mod.get_or_insert_function(incref_decref_ty, name=funcname)
# XXX "nonnull" causes a crash in test_dyn_array: can this
# function be called with a NULL pointer?
fn.args[0].add_attribute("noalias")
fn.args[0].add_attribute("nocapture")
builder.call(fn, [meminfo])
def nrt_incref(self, builder, typ, value):
"""
Recursively incref the given *value* and its members.
"""
self._call_nrt_incref_decref(builder, typ, typ, value, "NRT_incref")
def nrt_decref(self, builder, typ, value):
"""
Recursively decref the given *value* and its members.
"""
self._call_nrt_incref_decref(builder, typ, typ, value, "NRT_decref")
class _wrap_impl(object):
def __init__(self, imp, context, sig):
self._imp = imp
self._context = context
self._sig = sig
def __call__(self, builder, args):
return self._imp(self._context, builder, self._sig, args)
def __getattr__(self, item):
return getattr(self._imp, item)
def __repr__(self):
return "<wrapped %s>" % self._imp
| {
"content_hash": "f789087f91929ae454bb6d9814d35b1c",
"timestamp": "",
"source": "github",
"line_count": 1137,
"max_line_length": 85,
"avg_line_length": 36.666666666666664,
"alnum_prop": 0.5814343967378268,
"repo_name": "ssarangi/numba",
"id": "a5d9c8e9dc4b185d9ac2c78a887e1517ea161b2d",
"size": "41690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numba/targets/base.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2212"
},
{
"name": "C",
"bytes": "230630"
},
{
"name": "C++",
"bytes": "18847"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "HTML",
"bytes": "98846"
},
{
"name": "PowerShell",
"bytes": "3153"
},
{
"name": "Python",
"bytes": "3191202"
},
{
"name": "Shell",
"bytes": "120"
}
],
"symlink_target": ""
} |
"""
modified from seq2seq.py in tensorflow
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import xrange # pylint: disable=redefined-builtin
from six.moves import zip # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope
def sentence_embedding_rnn(_encoder_inputs, vocab_size, cell,
embedding_size, mask=None, dtype=dtypes.float32, scope=None, reuse_scop=None):
"""
"""
with variable_scope.variable_scope("embedding_rnn", reuse=reuse_scop):
# encoder_cell = rnn_cell.EmbeddingWrapper(
# cell, embedding_classes=vocab_size,
# embedding_size=embedding_size)
# Divde encoder_inputs by given input_mask
if mask != None:
encoder_inputs = [[] for _ in mask]
_mask = 0
for num in range(len(_encoder_inputs)):
encoder_inputs[_mask].append(_encoder_inputs[num])
if num == mask[_mask]:
_mask += 1
else:
encoder_inputs = []
encoder_inputs.append(_encoder_inputs)
encoder_state = None
encoder_states = []
for encoder_input in encoder_inputs:
if encoder_state == []:
_, encoder_state = rnn.dynamic_rnn(encoder_cell, encoder_input, dtype=dtype)
else:
_, encoder_state = rnn.dynamic_rnn(encoder_cell, encoder_input, encoder_state, dtype=dtype)
encoder_states.append(encoder_state)
return encoder_states
# def def_feedforward_nn(input_size, l1_size, l2_size):
# with tf.variable_scope("episodic"):
# l1_weights = tf.get_variable("l1_weights", [input_size, l1_size])
# l1_biases = tf.get_variable("l1_biases", [l1_size])
# l2_weights = tf.get_variable("l2_weights", [l1_size, l2_size])
# l2_biases = tf.get_variable("l2_biases", [l2_size])
#def feedforward_nn(l1_input, input_size, l1_size, l2_size):
# with tf.variable_scope("episodic"):
# l1_weights = tf.get_variable("l1_weights", [input_size, l1_size])
# l1_biases = tf.get_variable("l1_biases", [l1_size])
# l2_weights = tf.get_variable("l2_weights", [l1_size, l2_size])
# l2_biases = tf.get_variable("l2_biases", [l2_size])
# l2_input = tf.tanh(tf.matmul(l1_input , l1_weights) + l1_biases)
# gate_prediction = tf.matmul(l2_input , l2_weights) + l2_biases
# return gate_prediction
| {
"content_hash": "8c184c29bc4ab777be2ebc25d35e56c3",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 95,
"avg_line_length": 36.45205479452055,
"alnum_prop": 0.7147688838782412,
"repo_name": "sufengniu/DMN-tensorflow",
"id": "e53af917b028e534f97f645b44ab9a31ffebb1c8",
"size": "2661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/seq2seq.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54459"
},
{
"name": "Shell",
"bytes": "320"
}
],
"symlink_target": ""
} |
from opentelemetry.propagators.aws.aws_xray_propagator import AwsXRayPropagator
__all__ = ["AwsXRayPropagator"]
| {
"content_hash": "4c22da53a58e4d5a58488d0ec9345124",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 79,
"avg_line_length": 37.666666666666664,
"alnum_prop": 0.8053097345132744,
"repo_name": "open-telemetry/opentelemetry-python-contrib",
"id": "f28f1c8b1527a25e2a827ea0727d262b2c918fb0",
"size": "698",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "propagator/opentelemetry-propagator-aws-xray/src/opentelemetry/propagators/aws/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "211"
},
{
"name": "HTML",
"bytes": "118"
},
{
"name": "Python",
"bytes": "1960979"
},
{
"name": "Shell",
"bytes": "7256"
}
],
"symlink_target": ""
} |
from collections import deque
class ValuesXY:
def __init__(self,buffersize):
self.x = deque(maxlen=buffersize)
self.y = deque(maxlen=buffersize)
def append(self,x,y):
self.x.append(x)
self.y.append(y)
| {
"content_hash": "812a6b19a5644aa813d688936bce22d0",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 41,
"avg_line_length": 24.3,
"alnum_prop": 0.6213991769547325,
"repo_name": "Overdrivr/DistantIO",
"id": "1721161c45c759a3b2dd36cb6d9f3d58bc45ab0a",
"size": "243",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "distantio/Utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "18262"
},
{
"name": "Python",
"bytes": "68238"
}
],
"symlink_target": ""
} |
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
# Django settings for graphite project.
# DO NOT MODIFY THIS FILE DIRECTLY - use local_settings.py instead
from __future__ import print_function
import os
import sys
from os.path import abspath, dirname, join
from warnings import warn
from importlib import import_module
from django import VERSION as DJANGO_VERSION
try:
from django.urls import reverse_lazy
except ImportError: # Django < 1.10
from django.core.urlresolvers import reverse_lazy
GRAPHITE_WEB_APP_SETTINGS_LOADED = False
WEBAPP_VERSION = '1.2.0-dev'
DEBUG = False
JAVASCRIPT_DEBUG = False
DATE_FORMAT = '%m/%d'
# Filesystem layout
WEB_DIR = dirname( abspath(__file__) )
WEBAPP_DIR = dirname(WEB_DIR)
GRAPHITE_ROOT = dirname(WEBAPP_DIR)
# Initialize additional path variables
# Defaults for these are set after local_settings is imported
STATIC_ROOT = ''
STATIC_URL = '/static/'
URL_PREFIX = ''
CONF_DIR = ''
DASHBOARD_CONF = ''
GRAPHTEMPLATES_CONF = ''
STORAGE_DIR = ''
WHITELIST_FILE = ''
INDEX_FILE = ''
LOG_DIR = ''
CERES_DIR = ''
WHISPER_DIR = ''
RRD_DIR = ''
STANDARD_DIRS = []
# Timeout settings
FIND_TIMEOUT = None # default 3.0 see below
FETCH_TIMEOUT = None # default 6.0 see below
# Cluster settings
CLUSTER_SERVERS = []
# Worker Pool
USE_WORKER_POOL = True
POOL_MAX_WORKERS = 10
# This settings control whether https is used to communicate between cluster members
INTRACLUSTER_HTTPS = False
REMOTE_FIND_TIMEOUT = None # Replaced by FIND_TIMEOUT
REMOTE_FETCH_TIMEOUT = None # Replaced by FETCH_TIMEOUT
REMOTE_RETRY_DELAY = 60.0
REMOTE_EXCLUDE_LOCAL = False
STORE_FAIL_ON_ERROR = False
REMOTE_STORE_MERGE_RESULTS = True
REMOTE_STORE_FORWARD_HEADERS = []
REMOTE_STORE_USE_POST = False
REMOTE_BUFFER_SIZE = 1024 * 1024 # Set to 0 to prevent streaming deserialization
# Carbonlink settings
CARBON_METRIC_PREFIX='carbon'
CARBONLINK_HOSTS = ["127.0.0.1:7002"]
CARBONLINK_TIMEOUT = 1.0
CARBONLINK_HASHING_KEYFUNC = None
CARBONLINK_HASHING_TYPE = 'carbon_ch'
CARBONLINK_RETRY_DELAY = 15
REPLICATION_FACTOR = 1
# Cache settings.
MEMCACHE_HOSTS = []
MEMCACHE_KEY_PREFIX = ''
MEMCACHE_OPTIONS = {}
CACHES={}
FIND_CACHE_DURATION = 300
FIND_TOLERANCE = 2 * FIND_CACHE_DURATION
DEFAULT_CACHE_DURATION = 60 #metric data and graphs are cached for one minute by default
DEFAULT_CACHE_POLICY = []
# this setting controls the default xFilesFactor used for query-time aggregration
DEFAULT_XFILES_FACTOR = 0
# These can also be configured using:
# https://docs.djangoproject.com/en/1.11/topics/logging/
LOG_RENDERING_PERFORMANCE = False
LOG_CACHE_PERFORMANCE = False
LOG_ROTATION = True
LOG_ROTATION_COUNT = 1
LOG_FILE_INFO = 'info.log'
LOG_FILE_EXCEPTION = 'exception.log'
LOG_FILE_CACHE = 'cache.log'
LOG_FILE_RENDERING = 'rendering.log'
MAX_FETCH_RETRIES = 2
# This settings limit metrics find to prevent from too large query
METRICS_FIND_WARNING_THRESHOLD = float('Inf') # Print a warning if more than X metrics are returned
METRICS_FIND_FAILURE_THRESHOLD = float('Inf') # Fail if more than X metrics are returned
#Remote rendering settings
REMOTE_RENDERING = False #if True, rendering is delegated to RENDERING_HOSTS
RENDERING_HOSTS = []
REMOTE_RENDER_CONNECT_TIMEOUT = 1.0
#Miscellaneous settings
DOCUMENTATION_VERSION = 'latest' if 'dev' in WEBAPP_VERSION else WEBAPP_VERSION
DOCUMENTATION_URL = 'https://graphite.readthedocs.io/en/{}/'.format(DOCUMENTATION_VERSION)
ALLOW_ANONYMOUS_CLI = True
LEGEND_MAX_ITEMS = 10
RRD_CF = 'AVERAGE'
STORAGE_FINDERS = (
'graphite.finders.remote.RemoteFinder',
'graphite.finders.standard.StandardFinder',
)
# TagDB settings
TAGDB = 'graphite.tags.localdatabase.LocalDatabaseTagDB'
TAGDB_CACHE_DURATION = 60
TAGDB_AUTOCOMPLETE_LIMIT = 100
TAGDB_REDIS_HOST = 'localhost'
TAGDB_REDIS_PORT = 6379
TAGDB_REDIS_DB = 0
TAGDB_REDIS_PASSWORD = ''
TAGDB_HTTP_URL = ''
TAGDB_HTTP_USER = ''
TAGDB_HTTP_PASSWORD = ''
TAGDB_HTTP_AUTOCOMPLETE = False
# Function plugins
FUNCTION_PLUGINS = []
MIDDLEWARE = ()
if DJANGO_VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
MAX_TAG_LENGTH = 50
AUTO_REFRESH_INTERVAL = 60
#Authentication settings
USE_LDAP_AUTH = False
LDAP_SERVER = "" # "ldapserver.mydomain.com"
LDAP_PORT = 389
LDAP_USE_TLS = False
LDAP_SEARCH_BASE = "" # "OU=users,DC=mydomain,DC=com"
LDAP_BASE_USER = "" # "CN=some_readonly_account,DC=mydomain,DC=com"
LDAP_BASE_PASS = "" # "my_password"
LDAP_USER_QUERY = "" # "(username=%s)" For Active Directory use "(sAMAccountName=%s)"
LDAP_URI = None
LDAP_USER_DN_TEMPLATE = None
#Set this to True to delegate authentication to the web server
USE_REMOTE_USER_AUTHENTICATION = False
REMOTE_USER_BACKEND = "" # Provide an alternate or subclassed backend
REMOTE_USER_MIDDLEWARE = "" # Provide an alternate or subclassed middleware
AUTHENTICATION_BACKENDS=[]
# Django 1.5 requires this so we set a default but warn the user
SECRET_KEY = 'UNSAFE_DEFAULT'
# Input validation
# - When False we still validate the received input parameters, but if validation
# detects an issue it only logs an error and doesn't directly reject the request
# - When True we reject requests of which the input validation detected an issue with the
# provided arguments and return an error message to the user
ENFORCE_INPUT_VALIDATION = False
# Django 1.5 requires this to be set. Here we default to prior behavior and allow all
ALLOWED_HOSTS = [ '*' ]
# Override to link a different URL for login (e.g. for django_openid_auth)
LOGIN_URL = reverse_lazy('account_login')
# Set the default timezone to UTC
TIME_ZONE = 'UTC'
# Set to True to require authentication to save or delete dashboards
DASHBOARD_REQUIRE_AUTHENTICATION = False
# Require Django change/delete permissions to save or delete dashboards.
# NOTE: Requires DASHBOARD_REQUIRE_AUTHENTICATION to be set
DASHBOARD_REQUIRE_PERMISSIONS = False
# Name of a group to which the user must belong to save or delete dashboards. Alternative to
# DASHBOARD_REQUIRE_PERMISSIONS, particularly useful when using only LDAP (without Admin app)
# NOTE: Requires DASHBOARD_REQUIRE_AUTHENTICATION to be set
DASHBOARD_REQUIRE_EDIT_GROUP = None
DATABASES = None
# If using rrdcached, set to the address or socket of the daemon
FLUSHRRDCACHED = ''
## Load our local_settings
SETTINGS_MODULE = os.environ.get('GRAPHITE_SETTINGS_MODULE', 'graphite.local_settings')
try:
globals().update(import_module(SETTINGS_MODULE).__dict__)
except ImportError:
print("Could not import {0}, using defaults!".format(SETTINGS_MODULE), file=sys.stderr)
## Load Django settings if they werent picked up in local_settings
if not GRAPHITE_WEB_APP_SETTINGS_LOADED:
from graphite.app_settings import * # noqa
STATICFILES_DIRS = (
join(WEBAPP_DIR, 'content'),
)
# Handle renamed timeout settings
FIND_TIMEOUT = FIND_TIMEOUT or REMOTE_FIND_TIMEOUT or 3.0
FETCH_TIMEOUT = FETCH_TIMEOUT or REMOTE_FETCH_TIMEOUT or 6.0
## Set config dependent on flags set in local_settings
# Path configuration
if not STATIC_ROOT:
STATIC_ROOT = join(GRAPHITE_ROOT, 'static')
if not CONF_DIR:
CONF_DIR = os.environ.get('GRAPHITE_CONF_DIR', join(GRAPHITE_ROOT, 'conf'))
if not DASHBOARD_CONF:
DASHBOARD_CONF = join(CONF_DIR, 'dashboard.conf')
if not GRAPHTEMPLATES_CONF:
GRAPHTEMPLATES_CONF = join(CONF_DIR, 'graphTemplates.conf')
if not STORAGE_DIR:
STORAGE_DIR = os.environ.get('GRAPHITE_STORAGE_DIR', join(GRAPHITE_ROOT, 'storage'))
if not WHITELIST_FILE:
WHITELIST_FILE = join(STORAGE_DIR, 'lists', 'whitelist')
if not INDEX_FILE:
INDEX_FILE = join(STORAGE_DIR, 'index')
if not LOG_DIR:
LOG_DIR = join(STORAGE_DIR, 'log', 'webapp')
if not WHISPER_DIR:
WHISPER_DIR = join(STORAGE_DIR, 'whisper/')
if not CERES_DIR:
CERES_DIR = join(STORAGE_DIR, 'ceres/')
if not RRD_DIR:
RRD_DIR = join(STORAGE_DIR, 'rrd/')
if not STANDARD_DIRS:
try:
import whisper # noqa
if os.path.exists(WHISPER_DIR):
STANDARD_DIRS.append(WHISPER_DIR)
except ImportError:
print("WARNING: whisper module could not be loaded, whisper support disabled", file=sys.stderr)
try:
import ceres # noqa
if os.path.exists(CERES_DIR):
STANDARD_DIRS.append(CERES_DIR)
except ImportError:
pass
try:
import rrdtool # noqa
if os.path.exists(RRD_DIR):
STANDARD_DIRS.append(RRD_DIR)
except ImportError:
pass
if DATABASES is None:
DATABASES = {
'default': {
'NAME': join(STORAGE_DIR, 'graphite.db'),
'ENGINE': 'django.db.backends.sqlite3',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
},
}
# Handle URL prefix in static files handling
if URL_PREFIX and not STATIC_URL.startswith(URL_PREFIX):
STATIC_URL = '/{0}{1}'.format(URL_PREFIX.strip('/'), STATIC_URL)
# Default sqlite db file
# This is set here so that a user-set STORAGE_DIR is available
if 'sqlite3' in DATABASES.get('default',{}).get('ENGINE','') \
and not DATABASES.get('default',{}).get('NAME'):
DATABASES['default']['NAME'] = join(STORAGE_DIR, 'graphite.db')
# Caching shortcuts
if MEMCACHE_HOSTS:
CACHES['default'] = {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': MEMCACHE_HOSTS,
'TIMEOUT': DEFAULT_CACHE_DURATION,
'KEY_PREFIX': MEMCACHE_KEY_PREFIX,
'OPTIONS': MEMCACHE_OPTIONS,
}
if not CACHES:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
# Authentication shortcuts
if USE_LDAP_AUTH and LDAP_URI is None:
LDAP_URI = "ldap://%s:%d/" % (LDAP_SERVER, LDAP_PORT)
if USE_REMOTE_USER_AUTHENTICATION or REMOTE_USER_BACKEND:
if REMOTE_USER_MIDDLEWARE:
MIDDLEWARE += (REMOTE_USER_MIDDLEWARE,)
else:
MIDDLEWARE += ('django.contrib.auth.middleware.RemoteUserMiddleware',)
if DJANGO_VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
if REMOTE_USER_BACKEND:
AUTHENTICATION_BACKENDS.insert(0,REMOTE_USER_BACKEND)
else:
AUTHENTICATION_BACKENDS.insert(0,'django.contrib.auth.backends.RemoteUserBackend')
if USE_LDAP_AUTH:
AUTHENTICATION_BACKENDS.insert(0,'graphite.account.ldapBackend.LDAPBackend')
if SECRET_KEY == 'UNSAFE_DEFAULT':
warn('SECRET_KEY is set to an unsafe default. This should be set in local_settings.py for better security')
USE_TZ = True
| {
"content_hash": "ecf791e01e523faeff37f822d0043baa",
"timestamp": "",
"source": "github",
"line_count": 343,
"max_line_length": 109,
"avg_line_length": 31.411078717201168,
"alnum_prop": 0.7299053276406163,
"repo_name": "obfuscurity/graphite-web",
"id": "1c5ba338d785d0d04338a3598d57822ab6d396b4",
"size": "10774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapp/graphite/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "150191"
},
{
"name": "HTML",
"bytes": "21474"
},
{
"name": "JavaScript",
"bytes": "1691086"
},
{
"name": "Perl",
"bytes": "857"
},
{
"name": "Python",
"bytes": "1261981"
},
{
"name": "Ruby",
"bytes": "1950"
},
{
"name": "Shell",
"bytes": "1113"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=300)),
('description', models.TextField(null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
]
| {
"content_hash": "30214a19119a8a31f11d3f2e3ff5f079",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 114,
"avg_line_length": 28.952380952380953,
"alnum_prop": 0.5740131578947368,
"repo_name": "mjt145/coffeedapp",
"id": "54194f3d07d1c1f3e7bc717f85223ca1061248ea",
"size": "632",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "20185"
},
{
"name": "HTML",
"bytes": "12176"
},
{
"name": "Python",
"bytes": "9343"
}
],
"symlink_target": ""
} |
import os
from io import BytesIO
import mock
import pytest
from twitter.common.contextutil import temporary_dir
from apache.aurora.client import config
from apache.aurora.client.config import get_config as get_aurora_config
from apache.aurora.client.config import PRODUCTION_DEPRECATED_WARNING
from apache.aurora.config import AuroraConfig
from apache.aurora.config.loader import AuroraConfigLoader
from apache.aurora.config.schema.base import (
MB,
Announcer,
HealthCheckConfig,
Job,
Resources,
Task,
UpdateConfig
)
from apache.thermos.config.schema_base import Process
MESOS_CONFIG_BASE = """
HELLO_WORLD = Job(
name = 'hello_world',
role = 'john_doe',
cluster = 'test-cluster',
environment = 'test',
%(announce)s
task = Task(
name = 'main',
processes = [Process(name = 'hello_world', cmdline = '%(cmdline)s')],
resources = Resources(cpu = 0.1, ram = 64 * MB, disk = 64 * MB),
)
)
jobs = [HELLO_WORLD]
"""
MESOS_CONFIG_WITH_INCLUDE = """
%s
include(%s)
"""
MESOS_CONFIG_WITH_ANNOUNCE_1 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http"),'}
MESOS_CONFIG_WITH_ANNOUNCE_2 = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': '''announce = Announcer(
primary_port = "http",
portmap = {"aurora": "http"}),
'''}
MESOS_CONFIG_WITH_INVALID_STATS = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': 'announce = Announcer(primary_port="http", stats_port="blah"),'}
MESOS_CONFIG_WITHOUT_ANNOUNCE = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{thermos.ports[http]}}',
'announce': ''
}
def test_get_config_announces():
for good_config in (
MESOS_CONFIG_WITH_ANNOUNCE_1,
MESOS_CONFIG_WITH_ANNOUNCE_2,
MESOS_CONFIG_WITHOUT_ANNOUNCE):
bio = BytesIO(good_config)
get_aurora_config('hello_world', bio).job()
def test_get_config_with_broken_subscopes():
bad_config = MESOS_CONFIG_BASE % {
'cmdline': 'echo {{hello[{{thermos.ports[http]}}]}}',
'announce': '',
}
bio = BytesIO(bad_config)
with pytest.raises(AuroraConfig.InvalidConfig) as cm:
get_aurora_config('hello_world', bio).job()
assert 'Unexpected unbound refs' in str(cm.value.message)
def test_get_config_select():
bio = BytesIO(MESOS_CONFIG_WITHOUT_ANNOUNCE)
get_aurora_config(
'hello_world',
bio,
select_env='test',
select_role='john_doe',
select_cluster='test-cluster').job()
bio.seek(0)
with pytest.raises(ValueError) as cm:
get_aurora_config(
'hello_world',
bio,
select_env='staging42',
select_role='moua',
select_cluster='test-cluster').job()
assert 'test-cluster/john_doe/test/hello_world' in str(cm.value.message)
def test_include():
with temporary_dir() as dir:
hello_mesos_fname = "hello_world.mesos"
hello_mesos_path = os.path.join(dir, hello_mesos_fname)
with open(os.path.join(dir, hello_mesos_path), "wb") as hello_world_mesos:
hello_world_mesos.write(MESOS_CONFIG_WITHOUT_ANNOUNCE)
hello_world_mesos.flush()
hello_include_fname_path = os.path.join(dir, "hello_include_fname.mesos")
with open(hello_include_fname_path, "wb+") as hello_include_fname_fp:
hello_include_fname_fp.write(MESOS_CONFIG_WITH_INCLUDE %
("", """'%s'""" % hello_mesos_fname))
hello_include_fname_fp.flush()
get_aurora_config('hello_world', hello_include_fname_path)
hello_include_fname_fp.seek(0)
with pytest.raises(AuroraConfigLoader.InvalidConfigError):
get_aurora_config('hello_world', hello_include_fname_fp)
def test_dedicated_portmap():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_announce_configuration(AuroraConfig(base_job))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'})))
config._validate_announce_configuration(
AuroraConfig(base_job(constraints={'dedicated': 'mesos-team'},
announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}))))
with pytest.raises(ValueError):
config._validate_announce_configuration(
AuroraConfig(base_job(announce=Announcer(portmap={'http': 80}),
constraints={'foo': 'bar'})))
def test_update_config_passes_with_default_values():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_max_consecutive_failures_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(max_consecutive_failures=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_with_max_consecutive_failures_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=26),
health_check_config=HealthCheckConfig(max_consecutive_failures=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_min_consecutive_successes_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(min_consecutive_successes=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_with_min_consecutive_successes_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
health_check_config=HealthCheckConfig(min_consecutive_successes=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_passes_with_watch_secs_zero():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=0),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
config._validate_update_config(AuroraConfig(base_job))
def test_update_config_fails_watch_secs_negative():
base_job = Job(
name='hello_world', role='john_doe', cluster='test-cluster',
update_config=UpdateConfig(watch_secs=-1),
task=Task(name='main', processes=[],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)))
with pytest.raises(SystemExit):
config._validate_update_config(AuroraConfig(base_job))
def test_validate_deprecated_config_adds_warning_for_production():
job = Job(name='hello_world', role='john_doe', cluster='test-cluster', environment='test',
task=Task(name='main', processes=[Process(cmdline='echo {{_unbound_}}', name='eco')],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)), production='true')
with mock.patch('apache.aurora.client.config.deprecation_warning') as mock_warning:
config._validate_deprecated_config(AuroraConfig(job))
mock_warning.assert_called_once_with(PRODUCTION_DEPRECATED_WARNING)
def test_validate_deprecated_config_adds_no_warning_when_tier_is_set():
job = Job(name='hello_world', role='john_doe', cluster='test-cluster', environment='test',
task=Task(name='main', processes=[Process(cmdline='echo {{_unbound_}}', name='eco')],
resources=Resources(cpu=0.1, ram=64 * MB, disk=64 * MB)),
production='true', tier='preferred')
with mock.patch('apache.aurora.client.config.deprecation_warning') as mock_warning:
config._validate_deprecated_config(AuroraConfig(job))
assert mock_warning.call_count == 0
| {
"content_hash": "42412384f1f4ea5a8d0cc609cb56f008",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 92,
"avg_line_length": 35.30578512396694,
"alnum_prop": 0.674625468164794,
"repo_name": "crashlytics/aurora",
"id": "3d5289adcb2d53506644604380797ff64227fecd",
"size": "9093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/python/apache/aurora/client/test_config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24108"
},
{
"name": "Groovy",
"bytes": "7856"
},
{
"name": "HTML",
"bytes": "13576"
},
{
"name": "Java",
"bytes": "3537654"
},
{
"name": "JavaScript",
"bytes": "202454"
},
{
"name": "Makefile",
"bytes": "2292"
},
{
"name": "Python",
"bytes": "1560260"
},
{
"name": "Ruby",
"bytes": "4315"
},
{
"name": "Shell",
"bytes": "86531"
},
{
"name": "Smalltalk",
"bytes": "79"
},
{
"name": "Smarty",
"bytes": "25233"
},
{
"name": "Thrift",
"bytes": "54490"
}
],
"symlink_target": ""
} |
from .common import *
class IfStatementTest(TestCase):
def test_interpret_if(self):
root, var_defs = create_if_statement(2)
context = Context()
context.set_variable(var_defs['IfCondition'], True)
root.interpret(context)
self.failUnless(context.get_variable(var_defs['IfCondition']))
self.failUnless(context.get_variable(var_defs['IfEnter']))
self.failIf(context.get_variable(var_defs['ElseEnter']))
def test_interpret_if_else(self):
root, var_defs = create_if_statement(3)
context = Context()
root.interpret(context)
self.failIf(context.get_variable(var_defs['IfCondition']))
self.failIf(context.get_variable(var_defs['IfEnter']))
self.failUnless(context.get_variable(var_defs['ElseEnter']))
def test_interpret_elif(self):
root, var_defs = create_if_statement(4)
context = Context()
context.set_variable(var_defs['ElseIfCondition1'], True)
root.interpret(context)
self.failIf(context.get_variable(var_defs['IfCondition']))
self.failIf(context.get_variable(var_defs['IfEnter']))
self.failIf(context.get_variable(var_defs['ElseEnter']))
self.failUnless(context.get_variable(var_defs['ElseIfEnter1']))
def test_interpret_elif_2(self):
root, var_defs = create_if_statement(6)
context = Context()
context.set_variable(var_defs['ElseIfCondition2'], True)
root.interpret(context)
self.failIf(context.get_variable(var_defs['IfCondition']))
self.failIf(context.get_variable(var_defs['IfEnter']))
self.failIf(context.get_variable(var_defs['ElseEnter']))
self.failIf(context.get_variable(var_defs['ElseIfEnter1']))
self.failUnless(context.get_variable(var_defs['ElseIfEnter2']))
| {
"content_hash": "e2cfef23ebf9651fb4e7e1bd83ca1303",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 71,
"avg_line_length": 43.57142857142857,
"alnum_prop": 0.6622950819672131,
"repo_name": "vlfedotov/django-business-logic",
"id": "d5a1516cf04fa32343ca5e2e49d19050aa817c7b",
"size": "1857",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/test_ifstatement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "496"
},
{
"name": "HTML",
"bytes": "8659"
},
{
"name": "JavaScript",
"bytes": "45562"
},
{
"name": "Python",
"bytes": "247550"
},
{
"name": "Shell",
"bytes": "606"
},
{
"name": "TypeScript",
"bytes": "235120"
}
],
"symlink_target": ""
} |
"""
Test that debug symbols have the correct order as specified by the order file.
"""
import re
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class OrderFileTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipUnlessDarwin
def test(self):
"""Test debug symbols follow the correct order by the order file."""
self.build()
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Test that the debug symbols have Function f3 before Function f1.
# Use "-s address" option to sort by address.
self.runCmd("image dump symtab -s address %s" % exe)
output = self.res.GetOutput()
mo_f3 = re.search("Code +.+f3", output)
mo_f1 = re.search("Code +.+f1", output)
# Match objects for f3 and f1 must exist and f3 must come before f1.
self.assertTrue(mo_f3 and mo_f1 and mo_f3.start() < mo_f1.start(),
"Symbols have correct order by the order file")
self.runCmd("run", RUN_COMPLETED)
| {
"content_hash": "b0be45ff4d615d2d903a78a66410d7f3",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 78,
"avg_line_length": 31.86111111111111,
"alnum_prop": 0.6442894507410637,
"repo_name": "endlessm/chromium-browser",
"id": "778d06ddaca90316c93dfcef4885edd636360b41",
"size": "1147",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "third_party/llvm/lldb/test/API/macosx/order/TestOrderFile.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
""" Python library for interacting with Project Vote Smart API.
Project Vote Smart's API (http://www.votesmart.org/services_api.php)
provides rich biographical data, including data on votes, committee
assignments, and much more.
"""
__author__ = "James Turk <jturk@sunlightfoundation.com>"
__version__ = "0.3.3"
__copyright__ = "Copyright (c) 2012 Sunlight Labs"
__license__ = "BSD"
import urllib
# The following added to support Python 3
try:
from urllib.request import urlopen
except ImportError:
import urllib2
# The following added to support Python 3
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
try:
import json
except ImportError:
import simplejson as json
import sys
class VotesmartApiError(Exception):
""" Exception for Sunlight API errors """
class VotesmartApiObject(object):
def __init__(self, d):
self.__dict__ = d
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class Address(object):
def __init__(self, d):
self.__dict__.update(d['address'])
self.__dict__.update(d['phone'])
self.__dict__.update(d['notes'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class WebAddress(VotesmartApiObject):
def __str__(self):
return self.webAddress
class Bio(object):
def __init__(self, d):
#self.__dict__.update(d['election'])
#self.__dict__.update(d['office'])
self.__dict__.update(d['candidate'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class AddlBio(VotesmartApiObject):
def __str__(self):
return ': '.join((self.name, self.data))
class Candidate(VotesmartApiObject):
def __str__(self):
return ' '.join((self.firstName, self.lastName))
class CommitteeType(VotesmartApiObject):
def __str__(self):
return self.name
class Committee(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeDetail(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeMember(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class District(VotesmartApiObject):
def __str__(self):
return self.name
class Election(VotesmartApiObject):
def __init__(self, d):
stages = d.pop('stage', None)
self.__dict__ = d
if stages:
self.stages = _result_to_obj(ElectionStage, stages)
def __str__(self):
return self.name
class ElectionStage(VotesmartApiObject):
def __str__(self):
return '%s (%s)' % (self.name, self.electionDate)
class Official(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class LeadershipPosition(VotesmartApiObject):
def __str__(self):
return self.name
class Locality(VotesmartApiObject):
def __str__(self):
return self.name
class Measure(VotesmartApiObject):
def __str__(self):
return self.title
class MeasureDetail(VotesmartApiObject):
def __str__(self):
return self.title
class OfficeType(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeTypeId, self.name))
class OfficeBranch(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeBranchId, self.name))
class OfficeLevel(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeLevelId, self.name))
class Office(VotesmartApiObject):
def __str__(self):
return self.name
class Category(VotesmartApiObject):
def __str__(self):
return ': '.join((self.categoryId, self.name))
class Sig(VotesmartApiObject):
def __str__(self):
return ': '.join((self.sigId, self.name))
class SigDetail(VotesmartApiObject):
def __str__(self):
return self.name
class Rating(VotesmartApiObject):
def __str__(self):
return self.ratingText
class RatingOneCandidate(VotesmartApiObject):
def __str__(self):
return ': '.join((self.candidateId, self.rating))
class Ratings(VotesmartApiObject):
def __str__(self):
return ': '.join((self.ratingId, self.ratingName))
class State(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class StateDetail(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class BillSponsor(VotesmartApiObject):
def __str__(self):
return self.name
class BillAction(VotesmartApiObject):
def __str__(self):
return ' - '.join((self.statusDate, self.stage))
class BillAmendment(VotesmartApiObject):
def __str__(self):
return self.title
class BillDetail(VotesmartApiObject):
def __init__(self, d):
sponsors = d.pop('sponsors')
actions = d.pop('actions')
amendments = d.pop('amendments') # ammendments -- sic
if not sponsors: sponsors = { 'sponsor': [] }
self.sponsors = _result_to_obj(BillSponsor, sponsors['sponsor'])
self.actions = _result_to_obj(BillAction, actions['action'])
if amendments:
self.amendments = _result_to_obj(BillAmendment, amendments['amendment'])
self.__dict__.update(d)
class BillActionDetail(VotesmartApiObject):
def __str__(self):
return self.officialTitle
class Bill(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.title))
class Vote(VotesmartApiObject):
def __str__(self):
return ': '.join((self.candidateName, self.action))
class Veto(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.billTitle))
def _result_to_obj(cls, result):
if isinstance(result, dict):
return [cls(result)]
else:
# the if o predicate is important, sometimes they return empty strings
return [cls(o) for o in result if o]
class votesmart(object):
apikey = None
@staticmethod
def _apicall(func, params):
if votesmart.apikey is None:
raise VotesmartApiError('Missing Project Vote Smart apikey')
if sys.version_info > (3, 0):
# Python 3 code in this block
params = dict([(k,v) for (k,v) in params.items() if v])
url = 'http://api.votesmart.org/%s?o=JSON&key=%s&%s' % (func, votesmart.apikey, urlencode(params))
try:
response = urlopen(url).read().decode('utf-8')
obj = json.loads(response)
if 'error' in obj:
raise VotesmartApiError(obj['error']['errorMessage'])
else:
return obj
except urllib.error.HTTPError as e:
raise VotesmartApiError(e)
except ValueError:
raise VotesmartApiError('Invalid Response')
else:
# Python 2 code in this block
params = dict([(k,v) for (k,v) in params.iteritems() if v])
url = 'http://api.votesmart.org/%s?o=JSON&key=%s&%s' % (func, votesmart.apikey, urllib.urlencode(params))
try:
response = urllib2.urlopen(url).read()
obj = json.loads(response)
if 'error' in obj:
raise VotesmartApiError(obj['error']['errorMessage'])
else:
return obj
except:
raise VotesmartApiError('Invalid Response') # TODO I was getting error trying to run in 2.7
class address(object):
@staticmethod
def getCampaign(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaign', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getCampaignWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaignWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
@staticmethod
def getCampaignByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Address.getCampaignByElection', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOffice(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOffice', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOfficeWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOfficeWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
#@staticmethod
#def getOfficeByOfficeState(officeId, stateId=None):
# params = {'officeId': officeId, 'stateId': stateId}
# result = votesmart._apicall('Address.getOfficeByOfficeState', params)
# return _result_to_obj(Address, result['address']['office'])
class candidatebio(object):
@staticmethod
def getBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getBio', params)
return Bio(result['bio'])
@staticmethod
def getAddlBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getAddlBio', params)
return _result_to_obj(AddlBio,
result['addlBio']['additional']['item'])
class candidates(object):
@staticmethod
def getByOfficeState(officeId, stateId=None, electionYear=None):
params = {'officeId': officeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByOfficeTypeState(officeTypeId, stateId=None, electionYear=None):
params = {'officeTypeId': officeTypeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeTypeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLastname', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLevenstein', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Candidates.getByElection', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId, electionYear=None):
params = {'districtId': districtId, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByDistrict', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Candidates.getByZip', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
class committee(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Committee.getTypes', {})
return _result_to_obj(CommitteeType, result['committeeTypes']['type'])
@staticmethod
def getCommitteesByTypeState(typeId=None, stateId=None):
params = {'typeId':typeId, 'stateId':stateId}
result = votesmart._apicall('Committee.getCommitteesByTypeState', params)
return _result_to_obj(Committee, result['committees']['committee'])
@staticmethod
def getCommittee(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommittee', params)
return CommitteeDetail(result['committee'])
@staticmethod
def getCommitteeMembers(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommitteeMembers', params)
return _result_to_obj(CommitteeMember, result['committeeMembers']['member'])
class district(object):
@staticmethod
def getByOfficeState(officeId, stateId, districtName=None):
params = {'officeId':officeId, 'stateId': stateId, 'districtName': districtName}
result = votesmart._apicall('District.getByOfficeState', params)
return _result_to_obj(District, result['districtList']['district'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip5': zip5, 'zip4': zip4}
result = votesmart._apicall('District.getByZip', params)
return _result_to_obj(District, result['districtList']['district'])
class election(object):
@staticmethod
def getElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Election.getElection', params)
return Election(result['elections']['election'])
@staticmethod
def getElectionByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Election.getElectionByYearState', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getElectionByZip(zip5, zip4=None, year=None):
params = {'zip5': zip5, 'zip4': zip4, 'year': year}
result = votesmart._apicall('Election.getElectionByZip', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getStageCandidates(electionId, stageId, party=None,
districtId=None, stateId=None):
params = {'electionId':electionId, 'stageId':stageId,
'party':party, 'districtId':districtId, 'stateId':stateId}
result = votesmart._apicall('Election.getStageCandidates', params)
return _result_to_obj(Candidate, result['stageCandidates']['candidate'])
class leadership(object):
@staticmethod
def getPositions(stateId=None, officeId=None):
params = {'stateId':stateId, 'officeId':officeId}
result = votesmart._apicall('Leadership.getPositions', params)
return _result_to_obj(LeadershipPosition, result['leadership']['position'])
#@staticmethod
#def getCandidates(leadershipId, stateId=None):
# params = {'leadershipId':leadershipId, 'stateId':stateId}
# result = votesmart._apicall('Leadership.getCandidates', params)
# return result['leaders']['leader']
class local(object):
@staticmethod
def getCounties(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCounties', params)
return _result_to_obj(Locality, result['counties']['county'])
@staticmethod
def getCities(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCities', params)
return _result_to_obj(Locality, result['cities']['city'])
@staticmethod
def getOfficials(localId):
params = {'localId': localId}
result = votesmart._apicall('Local.getOfficials', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class measure(object):
@staticmethod
def getMeasuresByYearState(year, stateId):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Measure.getMeasuresByYearState', params)
return _result_to_obj(Measure, result['measures']['measure'])
@staticmethod
def getMeasure(measureId):
params = {'measureId':measureId}
result = votesmart._apicall('Measure.getMeasure', params)
return MeasureDetail(result['measure'])
class npat(object):
@staticmethod
def getNpat(candidateId):
params = {'candidateId':candidateId}
result = votesmart._apicall('Npat.getNpat', params)
return result['npat']
class office(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Office.getTypes', {})
return _result_to_obj(OfficeType, result['officeTypes']['type'])
@staticmethod
def getBranches():
result = votesmart._apicall('Office.getBranches', {})
return _result_to_obj(OfficeBranch, result['branches']['branch'])
@staticmethod
def getLevels():
result = votesmart._apicall('Office.getLevels', {})
return _result_to_obj(OfficeLevel, result['levels']['level'])
@staticmethod
def getOfficesByType(typeId):
params = {'officeTypeId':typeId}
result = votesmart._apicall('Office.getOfficesByType', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByLevel(levelId):
params = {'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByTypeLevel(typeId, levelId):
params = {'typeId':typeId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByTypeLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByBranchLevel(branchId, levelId):
params = {'branchId':branchId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByBranchLevel', params)
return _result_to_obj(Office, result['offices']['office'])
class officials(object):
@staticmethod
def getStatewide(stateId=None):
params = {'stateId': stateId}
result = votesmart._apicall('Officials.getStatewide', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByOfficeState(officeId, stateId=None):
params = {'officeId':officeId, 'stateId': stateId}
result = votesmart._apicall('Officials.getByOfficeState', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLastname', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLevenstein', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Officials.getByElection', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId):
params = {'districtId':districtId}
result = votesmart._apicall('Officials.getByDistrict', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Officials.getByZip', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class rating(object):
@staticmethod
def getCategories(stateId=None):
params = {'stateId':stateId}
result = votesmart._apicall('Rating.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getSigList(categoryId, stateId=None):
params = {'categoryId':categoryId, 'stateId':stateId}
result = votesmart._apicall('Rating.getSigList', params)
return _result_to_obj(Sig, result['sigs']['sig'])
@staticmethod
def getSig(sigId):
params = {'sigId':sigId}
result = votesmart._apicall('Rating.getSig', params)
return SigDetail(result['sig'])
@staticmethod
def getCandidateRating(candidateId, sigId=None):
params = {'candidateId':candidateId, 'sigId':sigId}
result = votesmart._apicall('Rating.getCandidateRating', params)
return _result_to_obj(Rating, result['candidateRating']['rating'])
@staticmethod
def getRating(ratingId):
params = {'ratingId':ratingId}
result = votesmart._apicall('Rating.getRating', params)
return _result_to_obj(RatingOneCandidate, result['rating']['candidateRating'])
@staticmethod
def getSigRatings(sigId):
params = {'sigId':sigId}
result = votesmart._apicall('Rating.getSigRatings', params)
return _result_to_obj(Ratings, result['sigRatings']['rating'])
class state(object):
@staticmethod
def getStateIDs():
result = votesmart._apicall('State.getStateIDs', {})
return _result_to_obj(State, result['stateList']['list']['state'])
@staticmethod
def getState(stateId):
params = {'stateId':stateId}
result = votesmart._apicall('State.getState', params)
return StateDetail(result['state']['details'])
class votes(object):
@staticmethod
def getCategories(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getBill(billId):
params = {'billId':billId}
result = votesmart._apicall('Votes.getBill', params)
return BillDetail(result['bill'])
@staticmethod
def getBillAction(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillAction', params)
return BillActionDetail(result['action'])
@staticmethod
def getBillActionVotes(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillActionVotes', params)
return _result_to_obj(Vote, result['votes']['vote'])
@staticmethod
def getBillActionVoteByOfficial(actionId, candidateId):
params = {'actionId':actionId, 'candidateId':candidateId}
result = votesmart._apicall('Votes.getBillActionVoteByOfficial', params)
return Vote(result['votes']['vote'])
@staticmethod
def getByBillNumber(billNumber):
params = {'billNumber': billNumber}
result = votesmart._apicall('Votes.getByBillNumber', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByCategoryYearState(categoryId, year, stateId=None):
params = {'categoryId':categoryId, 'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByCategoryYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByOfficialYearOffice(candidateId, year, officeId=None):
params = {'candidateId':candidateId, 'year':year, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByOfficialYearOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByOfficial(candidateId, year, officeId=None, categoryId=None):
params = {'candidateId':candidateId, 'year':year, 'officeId':officeId, 'categoryId':categoryId}
result = votesmart._apicall('Votes.getBillsByOfficial', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByOfficialCategoryOffice(candidateId, categoryId, officeId=None):
params = {'candidateId':candidateId, 'categoryId':categoryId, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByOfficialCategoryOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorYear(candidateId, year):
params = {'candidateId':candidateId, 'year':year}
result = votesmart._apicall('Votes.getBillsBySponsorYear', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorCategory(candidateId, categoryId):
params = {'candidateId':candidateId, 'categoryId':categoryId}
result = votesmart._apicall('Votes.getBillsBySponsorCategory', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByStateRecent(stateId=None, amount=None):
params = {'stateId':stateId, 'amount':amount}
result = votesmart._apicall('Votes.getBillsByStateRecent', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getVetoes(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Votes.getVetoes', params)
return _result_to_obj(Veto, result['vetoes']['veto']) | {
"content_hash": "1ef48556e391d7f4919714471688c576",
"timestamp": "",
"source": "github",
"line_count": 690,
"max_line_length": 117,
"avg_line_length": 39.256521739130434,
"alnum_prop": 0.6185254919333998,
"repo_name": "jainanisha90/WeVoteServer",
"id": "406fe6cf5b9317bbbdcd266bd5783737bdd68170",
"size": "27087",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "import_export_vote_smart/votesmart_local.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3612"
},
{
"name": "HTML",
"bytes": "1003027"
},
{
"name": "Python",
"bytes": "7489854"
},
{
"name": "Shell",
"bytes": "611"
}
],
"symlink_target": ""
} |
import contextlib
import copy
from debtcollector import moves
from debtcollector import removals
from neutron_lib import exceptions
from oslo_config import cfg
from oslo_db import api as oslo_db_api
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import enginefacade
from oslo_log import log as logging
from oslo_utils import excutils
import osprofiler.sqlalchemy
from pecan import util as p_util
import six
import sqlalchemy
from sqlalchemy.orm import exc
import traceback
from neutron._i18n import _LE
from neutron.common import profiler # noqa
def set_hook(engine):
if cfg.CONF.profiler.enabled and cfg.CONF.profiler.trace_sqlalchemy:
osprofiler.sqlalchemy.add_tracing(sqlalchemy, engine, 'neutron.db')
context_manager = enginefacade.transaction_context()
context_manager.configure(sqlite_fk=True)
context_manager.append_on_engine_create(set_hook)
MAX_RETRIES = 10
LOG = logging.getLogger(__name__)
def is_retriable(e):
if getattr(e, '_RETRY_EXCEEDED', False):
return False
if _is_nested_instance(e, (db_exc.DBDeadlock, exc.StaleDataError,
db_exc.DBConnectionError,
db_exc.DBDuplicateEntry, db_exc.RetryRequest)):
return True
# looking savepoints mangled by deadlocks. see bug/1590298 for details.
return _is_nested_instance(e, db_exc.DBError) and '1305' in str(e)
is_deadlock = moves.moved_function(is_retriable, 'is_deadlock', __name__,
message='use "is_retriable" instead',
version='newton', removal_version='ocata')
_retry_db_errors = oslo_db_api.wrap_db_retry(
max_retries=MAX_RETRIES,
retry_interval=0.1,
inc_retry_interval=True,
exception_checker=is_retriable
)
def _tag_retriables_as_unretriable(f):
"""Puts a flag on retriable exceptions so is_retriable returns False.
This decorator can be used outside of a retry decorator to prevent
decorators higher up from retrying again.
"""
@six.wraps(f)
def wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
with excutils.save_and_reraise_exception():
if is_retriable(e):
setattr(e, '_RETRY_EXCEEDED', True)
return wrapped
def _copy_if_lds(item):
"""Deepcopy lists/dicts/sets, leave everything else alone."""
return copy.deepcopy(item) if isinstance(item, (list, dict, set)) else item
def retry_db_errors(f):
"""Nesting-safe retry decorator with auto-arg-copy and logging.
Retry decorator for all functions which do not accept a context as an
argument. If the function accepts a context, use
'retry_if_session_inactive' below.
If retriable errors are retried and exceed the count, they will be tagged
with a flag so is_retriable will no longer recognize them as retriable.
This prevents multiple applications of this decorator (and/or the one
below) from retrying the same exception.
"""
@_tag_retriables_as_unretriable
@_retry_db_errors
@six.wraps(f)
def wrapped(*args, **kwargs):
try:
# copy mutable args and kwargs to make retries safe. this doesn't
# prevent mutations of complex objects like the context or 'self'
dup_args = [_copy_if_lds(a) for a in args]
dup_kwargs = {k: _copy_if_lds(v) for k, v in kwargs.items()}
return f(*dup_args, **dup_kwargs)
except Exception as e:
with excutils.save_and_reraise_exception():
if is_retriable(e):
LOG.debug("Retry wrapper got retriable exception: %s",
traceback.format_exc())
return wrapped
def retry_if_session_inactive(context_var_name='context'):
"""Retries only if the session in the context is inactive.
Calls a retry_db_errors wrapped version of the function if the context's
session passed in is inactive, otherwise it just calls the function
directly. This is useful to avoid retrying things inside of a transaction
which is ineffective for DB races/errors.
This should be used in all cases where retries are desired and the method
accepts a context.
"""
def decorator(f):
try:
# NOTE(kevinbenton): we use pecan's util function here because it
# deals with the horrors of finding args of already decorated
# functions
ctx_arg_index = p_util.getargspec(f).args.index(context_var_name)
except ValueError:
raise RuntimeError(_LE("Could not find position of var %s")
% context_var_name)
f_with_retry = retry_db_errors(f)
@six.wraps(f)
def wrapped(*args, **kwargs):
# only use retry wrapper if we aren't nested in an active
# transaction
if context_var_name in kwargs:
context = kwargs[context_var_name]
else:
context = args[ctx_arg_index]
method = f if context.session.is_active else f_with_retry
return method(*args, **kwargs)
return wrapped
return decorator
def reraise_as_retryrequest(f):
"""Packs retriable exceptions into a RetryRequest."""
@six.wraps(f)
def wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
with excutils.save_and_reraise_exception() as ctx:
if is_retriable(e):
ctx.reraise = False
raise db_exc.RetryRequest(e)
return wrapped
def _is_nested_instance(e, etypes):
"""Check if exception or its inner excepts are an instance of etypes."""
return (isinstance(e, etypes) or
isinstance(e, exceptions.MultipleExceptions) and
any(_is_nested_instance(i, etypes) for i in e.inner_exceptions))
@contextlib.contextmanager
def exc_to_retry(etypes):
try:
yield
except Exception as e:
with excutils.save_and_reraise_exception() as ctx:
if _is_nested_instance(e, etypes):
ctx.reraise = False
raise db_exc.RetryRequest(e)
@removals.remove(version='Newton', removal_version='Ocata')
def get_engine():
"""Helper method to grab engine."""
return context_manager.get_legacy_facade().get_engine()
@removals.remove(version='newton', removal_version='Ocata')
def dispose():
context_manager.dispose_pool()
#TODO(akamyshnikova): when all places in the code, which use sessions/
# connections will be updated, this won't be needed
def get_session(autocommit=True, expire_on_commit=False, use_slave=False):
"""Helper method to grab session."""
return context_manager.get_legacy_facade().get_session(
autocommit=autocommit, expire_on_commit=expire_on_commit,
use_slave=use_slave)
@contextlib.contextmanager
def autonested_transaction(sess):
"""This is a convenience method to not bother with 'nested' parameter."""
if sess.is_active:
session_context = sess.begin(nested=True)
else:
session_context = sess.begin(subtransactions=True)
with session_context as tx:
yield tx
| {
"content_hash": "9a2c8b8eeb7031b5b73f64079948bc63",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 79,
"avg_line_length": 34.628571428571426,
"alnum_prop": 0.6526402640264026,
"repo_name": "sebrandon1/neutron",
"id": "7612bf85091a1ea20fbda40c780cba77310c9baa",
"size": "7900",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/db/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "9903006"
},
{
"name": "Shell",
"bytes": "14339"
}
],
"symlink_target": ""
} |
from django import template
from django.template import defaultfilters
from django.utils.translation import ungettext
register = template.Library()
# A tuple of standard large number to their converters
intword_converters = (
(0, lambda x: x),
(3, lambda x: ungettext('%(value)s thousand', '%(value)s thousands', x)),
(6, lambda x: ungettext('%(value)s million', '%(value)s millions', x)),
(9, lambda x: ungettext('%(value)s billion', '%(value)s billions', x)),
(12, lambda x: ungettext('%(value)s trillion', '%(value)s trillions', x)),
)
@register.filter(is_safe=False)
def intword(value):
"""
Converts a large integer to a friendly text representation.
For example, 1000000 becomes '1.0 million',
1200000 becomes '1.2 millions' and '1200000000' becomes '1.2 billions'.
"""
try:
value = float(value)
except TypeError:
# value is None
value = 0
except ValueError:
# not translated to number
return value
value /= 100. # prices are in cents, we translate them to euros.
for exponent, converter in intword_converters:
large_number = 10 ** exponent
if value < large_number * 1000:
new_value = value / large_number
new_value = defaultfilters.floatformat(new_value, 1)
return converter(new_value) % {'value': new_value}
# use the highest available
exponent, converter = intword_converters[-1]
large_number = 10 ** exponent
new_value = value / float(large_number)
new_value = defaultfilters.floatformat(new_value, 1)
return converter(new_value) % {'value': new_value}
| {
"content_hash": "bbb3c1441fb913df6224d05d2a5907af",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 78,
"avg_line_length": 35.02127659574468,
"alnum_prop": 0.6506682867557716,
"repo_name": "jorgecarleitao/public-contracts",
"id": "8927f66d301bbaac844e03dfba5a07ea979c98b7",
"size": "1646",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contracts/templatetags/contracts/humanize.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "858"
},
{
"name": "HTML",
"bytes": "144711"
},
{
"name": "Python",
"bytes": "263813"
},
{
"name": "Shell",
"bytes": "617"
}
],
"symlink_target": ""
} |
import os
import unittest
from pprint import pprint
import numpy as np
from keras.callbacks import ModelCheckpoint
import anago
from anago.utils import load_data_and_labels, load_glove
get_path = lambda path: os.path.join(os.path.dirname(__file__), path)
DATA_ROOT = get_path('../data/conll2003/en/ner')
SAVE_ROOT = get_path('models') # trained model
LOG_ROOT = get_path('logs') # checkpoint, tensorboard
EMBEDDING_PATH = get_path('../data/glove.6B/glove.6B.100d.txt')
class TestWrapper(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not os.path.exists(LOG_ROOT):
os.mkdir(LOG_ROOT)
if not os.path.exists(SAVE_ROOT):
os.mkdir(SAVE_ROOT)
train_path = os.path.join(DATA_ROOT, 'train.txt')
valid_path = os.path.join(DATA_ROOT, 'valid.txt')
test_path = os.path.join(DATA_ROOT, 'test.txt')
x_train, y_train = load_data_and_labels(train_path)
x_valid, y_valid = load_data_and_labels(valid_path)
cls.x_test, cls.y_test = load_data_and_labels(test_path)
cls.x_train = np.r_[x_train, x_valid]
cls.y_train = np.r_[y_train, y_valid]
cls.embeddings = load_glove(EMBEDDING_PATH)
cls.text = 'President Obama is speaking at the White House.'
cls.dir_path = 'models'
def test_train_without_pretrained_embedding(self):
model = anago.Sequence()
model.fit(self.x_train, self.y_train, self.x_test, self.y_test)
def test_train_with_pretrained_embedding(self):
model = anago.Sequence(embeddings=self.embeddings)
model.fit(self.x_train, self.y_train, self.x_test, self.y_test)
def test_score(self):
model = anago.Sequence()
model.fit(self.x_train, self.y_train)
score = model.score(self.x_test, self.y_test)
self.assertIsInstance(score, float)
def test_analyze(self):
model = anago.Sequence()
model.fit(self.x_train, self.y_train)
res = model.analyze(self.text)
pprint(res)
self.assertIn('words', res)
self.assertIn('entities', res)
def test_save_and_load(self):
weights_file = os.path.join(SAVE_ROOT, 'weights.h5')
params_file = os.path.join(SAVE_ROOT, 'params.json')
preprocessor_file = os.path.join(SAVE_ROOT, 'preprocessor.pickle')
model = anago.Sequence()
model.fit(self.x_train, self.y_train)
model.save(weights_file, params_file, preprocessor_file)
score1 = model.score(self.x_test, self.y_test)
self.assertTrue(weights_file)
self.assertTrue(params_file)
self.assertTrue(preprocessor_file)
model = anago.Sequence.load(weights_file, params_file, preprocessor_file)
score2 = model.score(self.x_test, self.y_test)
self.assertEqual(score1, score2)
def test_train_vocab_init(self):
vocab = set()
for words in np.r_[self.x_train, self.x_test, self.x_test]:
for word in words:
vocab.add(word)
model = anago.Sequence(initial_vocab=vocab, embeddings=self.embeddings)
model.fit(self.x_train, self.y_train, self.x_test, self.y_test)
def test_load(self):
weights_file = os.path.join(SAVE_ROOT, 'weights.h5')
params_file = os.path.join(SAVE_ROOT, 'params.json')
preprocessor_file = os.path.join(SAVE_ROOT, 'preprocessor.pickle')
model = anago.Sequence.load(weights_file, params_file, preprocessor_file)
score = model.score(self.x_test, self.y_test)
print(score)
def test_train_callbacks(self):
weights_file = os.path.join(SAVE_ROOT, 'weights.h5')
params_file = os.path.join(SAVE_ROOT, 'params.json')
preprocessor_file = os.path.join(SAVE_ROOT, 'preprocessor.pickle')
log_dir = os.path.join(os.path.dirname(__file__), 'logs')
file_name = '_'.join(['weights', '{epoch:02d}', '{f1:2.4f}']) + '.h5'
callback = ModelCheckpoint(os.path.join(log_dir, file_name),
monitor='f1',
save_weights_only=True)
vocab = set()
for words in np.r_[self.x_train, self.x_test, self.x_test]:
for word in words:
vocab.add(word)
model = anago.Sequence(initial_vocab=vocab, embeddings=self.embeddings)
model.fit(self.x_train, self.y_train, self.x_test, self.y_test,
epochs=100, callbacks=[callback])
model.save(weights_file, params_file, preprocessor_file)
| {
"content_hash": "505f4c196dd39c33b9af964c32fb2916",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 81,
"avg_line_length": 38.76923076923077,
"alnum_prop": 0.6234567901234568,
"repo_name": "Hironsan/anago",
"id": "5e522c8105fee76ddf5c03de7758f92477dea2df",
"size": "4536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_wrapper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "86680"
}
],
"symlink_target": ""
} |
# Copyright 2012. Jurko Gospodnetic
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# This tests the SPLIT_BY_CHARACTERS rule.
import BoostBuild
def test_invalid(params, expected_error_line):
t = BoostBuild.Tester(pass_toolset=0)
t.write("file.jam", "SPLIT_BY_CHARACTERS %s ;" % params)
t.run_build_system(["-ffile.jam"], status=1)
t.expect_output_lines("[*] %s" % expected_error_line)
t.cleanup()
def test_valid():
t = BoostBuild.Tester(pass_toolset=0)
t.write("jamroot.jam", """\
import assert ;
assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : "" ;
assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : x ;
assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : r ;
assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rr ;
assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rrr ;
assert.result FooB rB z : SPLIT_BY_CHARACTERS FooBarBaz : a ;
assert.result FooB B z : SPLIT_BY_CHARACTERS FooBarBaz : ar ;
assert.result ooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : F ;
assert.result FooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : z ;
assert.result ooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : Fz ;
assert.result F B rB z : SPLIT_BY_CHARACTERS FooBarBaz : oa ;
assert.result Alib b : SPLIT_BY_CHARACTERS Alibaba : oa ;
assert.result libaba : SPLIT_BY_CHARACTERS Alibaba : oA ;
assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FooBarBaz ;
assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FoBarz ;
# Questionable results - should they return an empty string or an empty list?
assert.result : SPLIT_BY_CHARACTERS "" : "" ;
assert.result : SPLIT_BY_CHARACTERS "" : x ;
assert.result : SPLIT_BY_CHARACTERS "" : r ;
assert.result : SPLIT_BY_CHARACTERS "" : rr ;
assert.result : SPLIT_BY_CHARACTERS "" : rrr ;
assert.result : SPLIT_BY_CHARACTERS "" : oa ;
""")
t.run_build_system()
t.cleanup()
test_invalid("", "missing argument string")
test_invalid("Foo", "missing argument delimiters")
test_invalid(": Bar", "missing argument string")
test_invalid("a : b : c", "extra argument c")
test_invalid("a b : c", "extra argument b")
test_invalid("a : b c", "extra argument c")
test_valid()
| {
"content_hash": "000f81140cfe1d0903827d3fa38eae32",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 77,
"avg_line_length": 41.089285714285715,
"alnum_prop": 0.6949152542372882,
"repo_name": "ycsoft/FatCat-Server",
"id": "cdf5b04f9e3bd2af0ece7bcbe2012e54497122a9",
"size": "2320",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "LIBS/boost_1_58_0/tools/build/test/builtin_split_by_characters.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "195345"
},
{
"name": "Batchfile",
"bytes": "32367"
},
{
"name": "C",
"bytes": "9529739"
},
{
"name": "C#",
"bytes": "41850"
},
{
"name": "C++",
"bytes": "175536080"
},
{
"name": "CMake",
"bytes": "14812"
},
{
"name": "CSS",
"bytes": "282447"
},
{
"name": "Cuda",
"bytes": "26521"
},
{
"name": "FORTRAN",
"bytes": "1856"
},
{
"name": "Groff",
"bytes": "6163"
},
{
"name": "HTML",
"bytes": "148956564"
},
{
"name": "JavaScript",
"bytes": "174868"
},
{
"name": "Lex",
"bytes": "1290"
},
{
"name": "Makefile",
"bytes": "1045258"
},
{
"name": "Max",
"bytes": "37424"
},
{
"name": "Objective-C",
"bytes": "34644"
},
{
"name": "Objective-C++",
"bytes": "246"
},
{
"name": "PHP",
"bytes": "60249"
},
{
"name": "Perl",
"bytes": "37297"
},
{
"name": "Perl6",
"bytes": "2130"
},
{
"name": "Python",
"bytes": "1717781"
},
{
"name": "QML",
"bytes": "613"
},
{
"name": "QMake",
"bytes": "9450"
},
{
"name": "Rebol",
"bytes": "372"
},
{
"name": "Shell",
"bytes": "372652"
},
{
"name": "Tcl",
"bytes": "1205"
},
{
"name": "TeX",
"bytes": "13819"
},
{
"name": "XSLT",
"bytes": "564356"
},
{
"name": "Yacc",
"bytes": "19612"
}
],
"symlink_target": ""
} |
import sys
from setuptools import setup
# attempt automatic python2 to python3 conversion if using python3
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup( name='fido',
version='1.1.1',
install_requires=['distribute'],
description='Format Identification for Digital Objects (FIDO)',
packages=['fido'],
package_data={'fido':['*.*', 'conf/*.*']},
entry_points={'console_scripts':['fido = fido.fido:main']},
**extra )
| {
"content_hash": "50f9dae8fc46024757f1c7965e52a103",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 70,
"avg_line_length": 30.8125,
"alnum_prop": 0.6186612576064908,
"repo_name": "opf-attic/ref",
"id": "517a8d0f5c291392a87312af928e17f88c7530ab",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/fido/1.1.1/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "137454"
},
{
"name": "Shell",
"bytes": "13228"
}
],
"symlink_target": ""
} |
import datetime
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._diagnostics_operations import build_execute_site_analysis_request, build_execute_site_analysis_slot_request, build_execute_site_detector_request, build_execute_site_detector_slot_request, build_get_hosting_environment_detector_response_request, build_get_site_analysis_request, build_get_site_analysis_slot_request, build_get_site_detector_request, build_get_site_detector_response_request, build_get_site_detector_response_slot_request, build_get_site_detector_slot_request, build_get_site_diagnostic_category_request, build_get_site_diagnostic_category_slot_request, build_list_hosting_environment_detector_responses_request, build_list_site_analyses_request, build_list_site_analyses_slot_request, build_list_site_detector_responses_request, build_list_site_detector_responses_slot_request, build_list_site_detectors_request, build_list_site_detectors_slot_request, build_list_site_diagnostic_categories_request, build_list_site_diagnostic_categories_slot_request
from .._vendor import MixinABC
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DiagnosticsOperations: # pylint: disable=too-many-public-methods
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.web.v2020_12_01.aio.WebSiteManagementClient`'s
:attr:`diagnostics` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_hosting_environment_detector_responses(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> AsyncIterable[_models.DetectorResponseCollection]:
"""List Hosting Environment Detector Responses.
List Hosting Environment Detector Responses.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Site Name.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DetectorResponseCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DetectorResponseCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponseCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_hosting_environment_detector_responses_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_hosting_environment_detector_responses.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_hosting_environment_detector_responses_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DetectorResponseCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_hosting_environment_detector_responses.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/hostingEnvironments/{name}/detectors"} # type: ignore
@distributed_trace_async
async def get_hosting_environment_detector_response(
self,
resource_group_name: str,
name: str,
detector_name: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DetectorResponse:
"""Get Hosting Environment Detector Response.
Get Hosting Environment Detector Response.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: App Service Environment Name.
:type name: str
:param detector_name: Detector Resource Name.
:type detector_name: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DetectorResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DetectorResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponse]
request = build_get_hosting_environment_detector_response_request(
resource_group_name=resource_group_name,
name=name,
detector_name=detector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.get_hosting_environment_detector_response.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DetectorResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_hosting_environment_detector_response.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/hostingEnvironments/{name}/detectors/{detectorName}"} # type: ignore
@distributed_trace
def list_site_detector_responses(
self,
resource_group_name: str,
site_name: str,
**kwargs: Any
) -> AsyncIterable[_models.DetectorResponseCollection]:
"""List Site Detector Responses.
List Site Detector Responses.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DetectorResponseCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DetectorResponseCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponseCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_detector_responses_request(
resource_group_name=resource_group_name,
site_name=site_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_detector_responses.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_detector_responses_request(
resource_group_name=resource_group_name,
site_name=site_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DetectorResponseCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_detector_responses.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/detectors"} # type: ignore
@distributed_trace_async
async def get_site_detector_response(
self,
resource_group_name: str,
site_name: str,
detector_name: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DetectorResponse:
"""Get site detector response.
Get site detector response.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param detector_name: Detector Resource Name.
:type detector_name: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DetectorResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DetectorResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponse]
request = build_get_site_detector_response_request(
resource_group_name=resource_group_name,
site_name=site_name,
detector_name=detector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.get_site_detector_response.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DetectorResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_detector_response.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/detectors/{detectorName}"} # type: ignore
@distributed_trace
def list_site_diagnostic_categories(
self,
resource_group_name: str,
site_name: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticCategoryCollection]:
"""Get Diagnostics Categories.
Get Diagnostics Categories.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticCategoryCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticCategoryCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticCategoryCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_diagnostic_categories_request(
resource_group_name=resource_group_name,
site_name=site_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_diagnostic_categories.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_diagnostic_categories_request(
resource_group_name=resource_group_name,
site_name=site_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticCategoryCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_diagnostic_categories.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics"} # type: ignore
@distributed_trace_async
async def get_site_diagnostic_category(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
**kwargs: Any
) -> _models.DiagnosticCategory:
"""Get Diagnostics Category.
Get Diagnostics Category.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticCategory, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticCategory
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticCategory]
request = build_get_site_diagnostic_category_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_diagnostic_category.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticCategory', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_diagnostic_category.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}"} # type: ignore
@distributed_trace
def list_site_analyses(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticAnalysisCollection]:
"""Get Site Analyses.
Get Site Analyses.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticAnalysisCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticAnalysisCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticAnalysisCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_analyses_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_analyses.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_analyses_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticAnalysisCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_analyses.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses"} # type: ignore
@distributed_trace_async
async def get_site_analysis(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
analysis_name: str,
**kwargs: Any
) -> _models.AnalysisDefinition:
"""Get Site Analysis.
Get Site Analysis.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param analysis_name: Analysis Name.
:type analysis_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnalysisDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.AnalysisDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AnalysisDefinition]
request = build_get_site_analysis_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
analysis_name=analysis_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_analysis.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AnalysisDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_analysis.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses/{analysisName}"} # type: ignore
@distributed_trace_async
async def execute_site_analysis(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
analysis_name: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DiagnosticAnalysis:
"""Execute Analysis.
Execute Analysis.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Category Name.
:type diagnostic_category: str
:param analysis_name: Analysis Resource Name.
:type analysis_name: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticAnalysis, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticAnalysis
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticAnalysis]
request = build_execute_site_analysis_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
analysis_name=analysis_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.execute_site_analysis.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticAnalysis', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
execute_site_analysis.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/analyses/{analysisName}/execute"} # type: ignore
@distributed_trace
def list_site_detectors(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticDetectorCollection]:
"""Get Detectors.
Get Detectors.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticDetectorCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticDetectorCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticDetectorCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_detectors_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_detectors.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_detectors_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticDetectorCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_detectors.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors"} # type: ignore
@distributed_trace_async
async def get_site_detector(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
detector_name: str,
**kwargs: Any
) -> _models.DetectorDefinition:
"""Get Detector.
Get Detector.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param detector_name: Detector Name.
:type detector_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DetectorDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DetectorDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorDefinition]
request = build_get_site_detector_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
detector_name=detector_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_detector.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DetectorDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_detector.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors/{detectorName}"} # type: ignore
@distributed_trace_async
async def execute_site_detector(
self,
resource_group_name: str,
site_name: str,
detector_name: str,
diagnostic_category: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DiagnosticDetectorResponse:
"""Execute Detector.
Execute Detector.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param detector_name: Detector Resource Name.
:type detector_name: str
:param diagnostic_category: Category Name.
:type diagnostic_category: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticDetectorResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticDetectorResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticDetectorResponse]
request = build_execute_site_detector_request(
resource_group_name=resource_group_name,
site_name=site_name,
detector_name=detector_name,
diagnostic_category=diagnostic_category,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.execute_site_detector.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticDetectorResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
execute_site_detector.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/diagnostics/{diagnosticCategory}/detectors/{detectorName}/execute"} # type: ignore
@distributed_trace
def list_site_detector_responses_slot(
self,
resource_group_name: str,
site_name: str,
slot: str,
**kwargs: Any
) -> AsyncIterable[_models.DetectorResponseCollection]:
"""List Site Detector Responses.
List Site Detector Responses.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DetectorResponseCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DetectorResponseCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponseCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_detector_responses_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_detector_responses_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_detector_responses_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DetectorResponseCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_detector_responses_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/detectors"} # type: ignore
@distributed_trace_async
async def get_site_detector_response_slot(
self,
resource_group_name: str,
site_name: str,
detector_name: str,
slot: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DetectorResponse:
"""Get site detector response.
Get site detector response.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param detector_name: Detector Resource Name.
:type detector_name: str
:param slot: Slot Name.
:type slot: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DetectorResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DetectorResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorResponse]
request = build_get_site_detector_response_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
detector_name=detector_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.get_site_detector_response_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DetectorResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_detector_response_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/detectors/{detectorName}"} # type: ignore
@distributed_trace
def list_site_diagnostic_categories_slot(
self,
resource_group_name: str,
site_name: str,
slot: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticCategoryCollection]:
"""Get Diagnostics Categories.
Get Diagnostics Categories.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticCategoryCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticCategoryCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticCategoryCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_diagnostic_categories_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_diagnostic_categories_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_diagnostic_categories_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticCategoryCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_diagnostic_categories_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics"} # type: ignore
@distributed_trace_async
async def get_site_diagnostic_category_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
slot: str,
**kwargs: Any
) -> _models.DiagnosticCategory:
"""Get Diagnostics Category.
Get Diagnostics Category.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticCategory, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticCategory
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticCategory]
request = build_get_site_diagnostic_category_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_diagnostic_category_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticCategory', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_diagnostic_category_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}"} # type: ignore
@distributed_trace
def list_site_analyses_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
slot: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticAnalysisCollection]:
"""Get Site Analyses.
Get Site Analyses.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticAnalysisCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticAnalysisCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticAnalysisCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_analyses_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_analyses_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_analyses_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticAnalysisCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_analyses_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses"} # type: ignore
@distributed_trace_async
async def get_site_analysis_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
analysis_name: str,
slot: str,
**kwargs: Any
) -> _models.AnalysisDefinition:
"""Get Site Analysis.
Get Site Analysis.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param analysis_name: Analysis Name.
:type analysis_name: str
:param slot: Slot - optional.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AnalysisDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.AnalysisDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.AnalysisDefinition]
request = build_get_site_analysis_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
analysis_name=analysis_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_analysis_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AnalysisDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_analysis_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses/{analysisName}"} # type: ignore
@distributed_trace_async
async def execute_site_analysis_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
analysis_name: str,
slot: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DiagnosticAnalysis:
"""Execute Analysis.
Execute Analysis.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Category Name.
:type diagnostic_category: str
:param analysis_name: Analysis Resource Name.
:type analysis_name: str
:param slot: Slot Name.
:type slot: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticAnalysis, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticAnalysis
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticAnalysis]
request = build_execute_site_analysis_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
analysis_name=analysis_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.execute_site_analysis_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticAnalysis', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
execute_site_analysis_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/analyses/{analysisName}/execute"} # type: ignore
@distributed_trace
def list_site_detectors_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
slot: str,
**kwargs: Any
) -> AsyncIterable[_models.DiagnosticDetectorCollection]:
"""Get Detectors.
Get Detectors.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DiagnosticDetectorCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_12_01.models.DiagnosticDetectorCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticDetectorCollection]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_site_detectors_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_site_detectors_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = build_list_site_detectors_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DiagnosticDetectorCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_site_detectors_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors"} # type: ignore
@distributed_trace_async
async def get_site_detector_slot(
self,
resource_group_name: str,
site_name: str,
diagnostic_category: str,
detector_name: str,
slot: str,
**kwargs: Any
) -> _models.DetectorDefinition:
"""Get Detector.
Get Detector.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param diagnostic_category: Diagnostic Category.
:type diagnostic_category: str
:param detector_name: Detector Name.
:type detector_name: str
:param slot: Slot Name.
:type slot: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DetectorDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DetectorDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DetectorDefinition]
request = build_get_site_detector_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
diagnostic_category=diagnostic_category,
detector_name=detector_name,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_site_detector_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DetectorDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_site_detector_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors/{detectorName}"} # type: ignore
@distributed_trace_async
async def execute_site_detector_slot(
self,
resource_group_name: str,
site_name: str,
detector_name: str,
diagnostic_category: str,
slot: str,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
**kwargs: Any
) -> _models.DiagnosticDetectorResponse:
"""Execute Detector.
Execute Detector.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param site_name: Site Name.
:type site_name: str
:param detector_name: Detector Resource Name.
:type detector_name: str
:param diagnostic_category: Category Name.
:type diagnostic_category: str
:param slot: Slot Name.
:type slot: str
:param start_time: Start Time. Default value is None.
:type start_time: ~datetime.datetime
:param end_time: End Time. Default value is None.
:type end_time: ~datetime.datetime
:param time_grain: Time Grain. Default value is None.
:type time_grain: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DiagnosticDetectorResponse, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_12_01.models.DiagnosticDetectorResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop('api_version', _params.pop('api-version', "2020-12-01")) # type: str
cls = kwargs.pop('cls', None) # type: ClsType[_models.DiagnosticDetectorResponse]
request = build_execute_site_detector_slot_request(
resource_group_name=resource_group_name,
site_name=site_name,
detector_name=detector_name,
diagnostic_category=diagnostic_category,
slot=slot,
subscription_id=self._config.subscription_id,
api_version=api_version,
start_time=start_time,
end_time=end_time,
time_grain=time_grain,
template_url=self.execute_site_detector_slot.metadata['url'],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DiagnosticDetectorResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
execute_site_detector_slot.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slot}/diagnostics/{diagnosticCategory}/detectors/{detectorName}/execute"} # type: ignore
| {
"content_hash": "a6bee51ffebc72d921a32d290a8b1799",
"timestamp": "",
"source": "github",
"line_count": 1966,
"max_line_length": 986,
"avg_line_length": 43.61342828077314,
"alnum_prop": 0.6180840641910804,
"repo_name": "Azure/azure-sdk-for-python",
"id": "99838f3b94ef92c0ea7ecc818bd08af8be0fd22e",
"size": "86244",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_12_01/aio/operations/_diagnostics_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import json
import logging
import subprocess
import sys
import gflags as flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('num_streams', 1, 'Number of netperf processes to run')
flags.DEFINE_string('netperf_cmd', None,
'netperf command to run')
flags.DEFINE_integer('port_start', None,
'Starting port for netperf command and data ports')
def Main(argv=sys.argv):
# Parse command-line flags
try:
argv = FLAGS(argv)
except flags.FlagsError as e:
logging.error('%s\nUsage: %s ARGS\n%s', e, sys.argv[0], FLAGS)
sys.exit(1)
netperf_cmd = FLAGS.netperf_cmd
num_streams = FLAGS.num_streams
port_start = FLAGS.port_start
assert(netperf_cmd)
assert(num_streams >= 1)
assert(port_start)
stdouts = [None] * num_streams
stderrs = [None] * num_streams
return_codes = [None] * num_streams
processes = [None] * num_streams
# Start all of the netperf processes
for i in range(num_streams):
command_port = port_start + i * 2
data_port = port_start + i * 2 + 1
cmd = netperf_cmd.format(command_port=command_port, data_port=data_port)
processes[i] = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True)
# Wait for all of the netperf processes to finish and save their return codes
for i, process in enumerate(processes):
stdouts[i], stderrs[i] = process.communicate()
return_codes[i] = process.returncode
# Dump the stdouts, stderrs, and return_codes to stdout in json form
print(json.dumps((stdouts, stderrs, return_codes)))
if __name__ == '__main__':
sys.exit(Main())
| {
"content_hash": "85707ad74114babe55cc4fd0e647eef3",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 79,
"avg_line_length": 30.40740740740741,
"alnum_prop": 0.666869671132765,
"repo_name": "meteorfox/PerfKitBenchmarker",
"id": "fb5705b15c0461bd020e6354fc7717ac40a263d2",
"size": "2276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "perfkitbenchmarker/scripts/netperf_test_scripts/netperf_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "1547"
},
{
"name": "Python",
"bytes": "1843285"
},
{
"name": "Shell",
"bytes": "23474"
}
],
"symlink_target": ""
} |
from pathlib import Path
import numpy as np
import pandas as pd
def load_srim_data(directory_path):
''' assume files were saved as energy-{value}eV_depth-{value}A.txt'''
directory_path = Path(directory_path)
df = pd.DataFrame()
for i, filename in enumerate(directory_path.iterdir()):
energy_eV, depth_A = get_info_from_filename(filename)
data = load_data(filename)
df[str(i)] = [energy_eV, depth_A, *sorted(data)]
return (df.T
.rename(columns={0: 'energy_eV', 1: 'depth_A'})
.sort_values(by=['energy_eV', 'depth_A'], ascending=True))
def get_info_from_filename(filename):
energy, depth = filename.parts[-1].split('_')
energy_eV = float(energy.split('-')[1][:-2])
depth_A = float(depth.split('-')[1][:-1])
return energy_eV, depth_A
def load_data(filename):
# need to preprocess so that np.loadtxt works
with filename.open('r') as f:
data = f.read().replace('T', '')
with filename.open('w') as f:
f.write(data)
_, _, data, *_ = np.loadtxt(filename, skiprows=12, unpack=True)
return data
| {
"content_hash": "064b476838e3c4617084a203c2e3afd0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 73,
"avg_line_length": 32.705882352941174,
"alnum_prop": 0.6160071942446043,
"repo_name": "mmoran0032/pyne",
"id": "a9060bacde05e65acf42492f7fea72bdb1266fbd",
"size": "1114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tree/srim_data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "28073"
}
],
"symlink_target": ""
} |
import threading,xbmc,xbmcplugin,xbmcgui,re,os,xbmcaddon,sys
import shutil
import glob
import zipfile
import urlparse
import xbmcgui
import time
import extract
ADDON=xbmcaddon.Addon(id='plugin.video.stargate-streams')
dialog = xbmcgui.Dialog()
dialogprocess = xbmcgui.DialogProgress()
log_path = xbmc.translatePath('special://logpath/')
USERDATA = xbmc.translatePath(os.path.join('special://home/userdata',''))
GUI = xbmc.translatePath(os.path.join(USERDATA,'guisettings.xml'))
GUIFIX = xbmc.translatePath(os.path.join(USERDATA,'guifix.xml'))
INSTALL = xbmc.translatePath(os.path.join(USERDATA,'install.xml'))
FAVS = xbmc.translatePath(os.path.join(USERDATA,'favourites.xml'))
SOURCE = xbmc.translatePath(os.path.join(USERDATA,'sources.xml'))
ADVANCED = xbmc.translatePath(os.path.join(USERDATA,'advancedsettings.xml'))
PROFILES = xbmc.translatePath(os.path.join(USERDATA,'profiles.xml'))
RSS = xbmc.translatePath(os.path.join(USERDATA,'RssFeeds.xml'))
KEYMAPS = xbmc.translatePath(os.path.join(USERDATA,'keymaps','keyboard.xml'))
HOME = xbmc.translatePath('special://home/')
zip = ADDON.getSetting('zip')
USB = xbmc.translatePath(os.path.join(zip))
skin = xbmc.getSkinDir()
try:
from sqlite3 import dbapi2 as database
except:
from pysqlite2 import dbapi2 as database
def Add_Directory_Item(handle, url, listitem, isFolder):
xbmcplugin.addDirectoryItem(handle, url, listitem, isFolder)
def Fix_Special(url):
dialogprocess.create("Stargate","Renaming paths...",'', 'Please Wait')
for root, dirs, files in os.walk(url): #Search all xml files and replace physical with special
for file in files:
if file.endswith(".xml"):
dialogprocess.update(0,"Fixing",file, 'Please Wait')
a=open((os.path.join(root, file))).read()
b=a.replace(HOME, 'special://home/')
f = open((os.path.join(root, file)), mode='w')
f.write(str(b))
f.close()
def Read_Zip(url):
z = zipfile.ZipFile(url, "r")
for filename in z.namelist():
if 'guisettings.xml' in filename:
a = z.read(filename)
r='<setting type="(.+?)" name="%s.(.+?)">(.+?)</setting>'% skin
match=re.compile(r).findall(a)
for type,string,setting in match:
setting=setting.replace('"','') .replace('&','&')
xbmc.executebuiltin("Skin.Set%s(%s,%s)"%(type.title(),string,setting))
if 'favourites.xml' in filename:
a = z.read(filename)
f = open(FAVS, mode='w')
f.write(a)
f.close()
if 'sources.xml' in filename:
a = z.read(filename)
f = open(SOURCE, mode='w')
f.write(a)
f.close()
if 'advancedsettings.xml' in filename:
a = z.read(filename)
f = open(ADVANCED, mode='w')
f.write(a)
f.close()
if 'RssFeeds.xml' in filename:
a = z.read(filename)
f = open(RSS, mode='w')
f.write(a)
f.close()
if 'keyboard.xml' in filename:
a = z.read(filename)
f = open(KEYMAPS, mode='w')
f.write(a)
f.close()
def Archive_File(sourcefile, destfile):
zipobj = zipfile.ZipFile(destfile , 'w', zipfile.ZIP_DEFLATED)
rootlen = len(sourcefile)
for_progress = []
ITEM =[]
dialogprocess.create("Stargate","Archiving...",'', 'Please Wait')
for base, dirs, files in os.walk(sourcefile):
for file in files:
ITEM.append(file)
N_ITEM =len(ITEM)
for base, dirs, files in os.walk(sourcefile):
for file in files:
for_progress.append(file)
progress = len(for_progress) / float(N_ITEM) * 100
dialogprocess.update(int(progress),"Backing Up",'[COLOR yellow]%s[/COLOR]'%file, 'Please Wait')
dialogprocess.update(int(progress),"Backing Up",'[COLOR yellow]%s[/COLOR]'%file, 'Please Wait')
fn = os.path.join(base, file)
if not 'temp' in dirs:
if not 'plugin.program.TardisWizard' in dirs:
import time
FORCE= '01/01/1980'
FILE_DATE=time.strftime('%d/%m/%Y', time.gmtime(os.path.getmtime(fn)))
if FILE_DATE > FORCE:
zipobj.write(fn, fn[rootlen:])
zipobj.close()
dialogprocess.close()
def Delete_Packages():
print 'DELETING PACKAGES'
packages_cache_path = xbmc.translatePath(os.path.join('special://home/addons/packages', ''))
for root, dirs, files in os.walk(packages_cache_path):
file_count = 0
file_count += len(files)
# Count files and give option to delete
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
def Delete_Logs():
for infile in glob.glob(os.path.join(log_path, 'xbmc_crashlog*.*')):
File=infile
print infile
os.remove(infile)
dialog = xbmcgui.Dialog()
dialog.ok("Crash Logs Deleted", "Your old crash logs have now been deleted.")
def Delete_Userdata():
print '############################################################ DELETING USERDATA ###############################################################'
addon_data_path = xbmc.translatePath(os.path.join('special://home/userdata/addon_data', ''))
for root, dirs, files in os.walk(addon_data_path):
file_count = 0
file_count += len(files)
# Count files and give option to delete
if file_count >= 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
def Wipe_Cache():
xbmc_cache_path = os.path.join(xbmc.translatePath('special://home'), 'cache')
if os.path.exists(xbmc_cache_path)==True:
for root, dirs, files in os.walk(xbmc_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
try:
os.unlink(os.path.join(root, f))
except:
pass
for d in dirs:
try:
shutil.rmtree(os.path.join(root, d))
except:
pass
if xbmc.getCondVisibility('system.platform.ATV2'):
atv2_cache_a = os.path.join('/private/var/mobile/Library/Caches/AppleTV/Video/', 'Other')
for root, dirs, files in os.walk(atv2_cache_a):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
atv2_cache_b = os.path.join('/private/var/mobile/Library/Caches/AppleTV/Video/', 'LocalAndRental')
for root, dirs, files in os.walk(atv2_cache_b):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
# Set path to script.module.simple.downloader cache files
downloader_cache_path = os.path.join(xbmc.translatePath('special://profile/addon_data/script.module.simple.downloader'), '')
if os.path.exists(downloader_cache_path)==True:
for root, dirs, files in os.walk(downloader_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
# Set path to script.image.music.slideshow cache files
imageslideshow_cache_path = os.path.join(xbmc.translatePath('special://profile/addon_data/script.image.music.slideshow/cache'), '')
if os.path.exists(imageslideshow_cache_path)==True:
for root, dirs, files in os.walk(imageslideshow_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
# Set path to BBC iPlayer cache files
iplayer_cache_path= os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.iplayer/iplayer_http_cache'), '')
if os.path.exists(iplayer_cache_path)==True:
for root, dirs, files in os.walk(iplayer_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
itv_cache_path = os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.itv/Images'), '')
if os.path.exists(itv_cache_path)==True:
for root, dirs, files in os.walk(itv_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
navix_cache_path= os.path.join(xbmc.translatePath('special://profile/addon_data/script.navi-x/cache'), '')
if os.path.exists(navix_cache_path)==True:
for root, dirs, files in os.walk(navix_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
phoenix_cache_path= os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.phstreams/Cache'), '')
if os.path.exists(phoenix_cache_path)==True:
for root, dirs, files in os.walk(phoenix_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
ramfm_cache_path= os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.audio.ramfm/cache'), '')
if os.path.exists(ramfm_cache_path)==True:
for root, dirs, files in os.walk(ramfm_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
wtf_cache_path = os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.whatthefurk/cache'), '')
if os.path.exists(wtf_cache_path)==True:
for root, dirs, files in os.walk(wtf_cache_path):
file_count = 0
file_count += len(files)
if file_count > 0:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
try:
genesisCache = os.path.join(xbmc.translatePath('special://profile/addon_data/plugin.video.genesis'), 'cache.db')
dbcon = database.connect(genesisCache)
dbcur = dbcon.cursor()
dbcur.execute("DROP TABLE IF EXISTS rel_list")
dbcur.execute("VACUUM")
dbcon.commit()
dbcur.execute("DROP TABLE IF EXISTS rel_lib")
dbcur.execute("VACUUM")
dbcon.commit()
except:
pass
def Destroy_Path(path):
dialogprocess.create("Stargate ","Cleaning...",'', 'Please Wait')
shutil.rmtree(path, ignore_errors=True)
def Remove_Textures():
textures = xbmc.translatePath('special://home/userdata/Database/Textures13.db')
try:
dbcon = database.connect(textures)
dbcur = dbcon.cursor()
dbcur.execute("DROP TABLE IF EXISTS path")
dbcur.execute("VACUUM")
dbcon.commit()
dbcur.execute("DROP TABLE IF EXISTS sizes")
dbcur.execute("VACUUM")
dbcon.commit()
dbcur.execute("DROP TABLE IF EXISTS texture")
dbcur.execute("VACUUM")
dbcon.commit()
dbcur.execute("""CREATE TABLE path (id integer, url text, type text, texture text, primary key(id))""")
dbcon.commit()
dbcur.execute("""CREATE TABLE sizes (idtexture integer,size integer, width integer, height integer, usecount integer, lastusetime text)""")
dbcon.commit()
dbcur.execute("""CREATE TABLE texture (id integer, url text, cachedurl text, imagehash text, lasthashcheck text, PRIMARY KEY(id))""")
dbcon.commit()
except:
pass
def Read_Zip(url):
import zipfile
z = zipfile.ZipFile(url, "r")
for filename in z.namelist():
if 'guisettings.xml' in filename:
a = z.read(filename)
r='<setting type="(.+?)" name="%s.(.+?)">(.+?)</setting>'% skin
match=re.compile(r).findall(a)
print match
for type,string,setting in match:
setting=setting.replace('"','') .replace('&','&')
xbmc.executebuiltin("Skin.Set%s(%s,%s)"%(type.title(),string,setting))
if 'favourites.xml' in filename:
a = z.read(filename)
f = open(FAVS, mode='w')
f.write(a)
f.close()
if 'sources.xml' in filename:
a = z.read(filename)
f = open(SOURCE, mode='w')
f.write(a)
f.close()
if 'advancedsettings.xml' in filename:
a = z.read(filename)
f = open(ADVANCED, mode='w')
f.write(a)
f.close()
if 'RssFeeds.xml' in filename:
a = z.read(filename)
f = open(RSS, mode='w')
f.write(a)
f.close()
if 'keyboard.xml' in filename:
a = z.read(filename)
f = open(KEYMAPS, mode='w')
f.write(a)
f.close()
def Check_Path():
if zip=='':
if dialog.yesno("Tardis Wizard", "You Have Not Set Your Storage Path", 'Set The Storage Path Now ?',''):
ADDON.openSettings()
print '######### ZIP DIRECTORY #########'
for filename in os.listdir(USB):
print filename
def RestoreIt():
import time
dialog = xbmcgui.Dialog()
if zip == '':
dialog.ok('Tardis Wizard','You have not set your ZIP Folder.\nPlease update the addon settings and try again.','','')
ADDON.openSettings(sys.argv[0])
dialogprocess.create("Tardis Wizard","Restoring",'', 'Please Wait')
lib=xbmc.translatePath(os.path.join(zip,'backup.zip'))
Read_Zip(lib)
dialogprocess.create("Tardis Wizard","Checking ",'', 'Please Wait')
HOME = xbmc.translatePath(os.path.join('special://','home'))
dialogprocess.update(0,"", "Extracting Zip Please Wait")
extract.all(lib,HOME,dialogprocess)
time.sleep(1)
xbmc.executebuiltin('UpdateLocalAddons ')
xbmc.executebuiltin("UpdateAddonRepos")
time.sleep(1)
xbmc.executebuiltin('UnloadSkin()')
xbmc.executebuiltin('ReloadSkin()')
xbmc.executebuiltin("Loadialogprocessrofile(Master user)")
dialogprocess.close()
dialog.ok("Tardis Wizard", "All Done, DONT PRESS OK", "Wait a 5 minutes and pull the Power","")
def Backupzip():
if zip == '':
dialog.ok('Tardis Wizard','You have not set your ZIP Folder.\nPlease update the addon settings and try again.','','')
ADDON.openSettings(sys.argv[0])
to_backup = xbmc.translatePath(os.path.join('special://','home'))
backup_zip = xbmc.translatePath(os.path.join(USB,'backup.zip'))
Delete_Packages()
import zipfile
dialogprocess.create("Tardis Wizard","Backing Up",'', 'Please Wait')
zipobj = zipfile.ZipFile(backup_zip , 'w', zipfile.ZIP_DEFLATED)
rootlen = len(to_backup)
for_progress = []
ITEM =[]
for base, dirs, files in os.walk(to_backup):
for file in files:
ITEM.append(file)
N_ITEM =len(ITEM)
for base, dirs, files in os.walk(to_backup):
for file in files:
for_progress.append(file)
progress = len(for_progress) / float(N_ITEM) * 100
dialogprocess.update(int(progress),"Backing Up",'[COLOR yellow]%s[/COLOR]'%file, 'Please Wait')
fn = os.path.join(base, file)
if not 'temp' in dirs:
if not 'plugin.program.TardisWizard' in dirs:
import time
Spaf= '01/01/1980'
FILE_DATE=time.strftime('%d/%m/%Y', time.gmtime(os.path.getmtime(fn)))
if FILE_DATE > Spaf:
zipobj.write(fn, fn[rootlen:])
zipobj.close()
dialogprocess.close()
dialog.ok("Tardis Wizard", "You Are Now Backed Up", '','')
| {
"content_hash": "d9e1b3f08b3f0d4f39e1bb18851913e9",
"timestamp": "",
"source": "github",
"line_count": 400,
"max_line_length": 172,
"avg_line_length": 44.1575,
"alnum_prop": 0.5572666025024061,
"repo_name": "TheWardoctor/Wardoctors-repo",
"id": "61b8f2602cce0ddf10d3509dbca36ccbb1a60c11",
"size": "17663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugin.video.stargate-streams/GoDev.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3208"
},
{
"name": "JavaScript",
"bytes": "115722"
},
{
"name": "Python",
"bytes": "34405207"
},
{
"name": "Shell",
"bytes": "914"
}
],
"symlink_target": ""
} |
"""
Turbogo has to call cosmoprep. Cosmoprep is a huge mess of pexpect calls and
expectations, it is broken out here for isolation purposes and to track
differences between changes to the turbogo file and adjustments to cosmo case
handling.
Calls expect a job. Not a standalone script. Called only from turbogo.py
"""
import pexpect # pragma: no cover
import logging
import os
import turbogo_helpers
TURBODIR=os.getenv('TURBODIR')
TURBOSYS=os.getenv('TURBOMOLE_SYSNAME')
if not TURBOSYS:
TURBOSYS = 'em64t-unknown-linux-gnu'
if TURBODIR:
TURBOSCRIPT = os.path.join(TURBODIR, 'bin', TURBOSYS)
else:
TURBOSCRIPT = ''
class Error(Exception):
"""Base class for exceptions in this module."""
pass
class CosmoError(Error):
"""
General Exception for cosmoprep errors
Attributes:
value = exception value passed through
"""
def __init__(self, value):
self.value = value
class Cosmo():
"""Make cosmo a callable object"""
def __init__(self, timeout=60):
"""
Start a cosmoprep object for specified job with optional timeout
modification (in seconds)
"""
self.timeout = timeout
logging.debug("Cosmoprep instance initiated")
def setup_cosmo(self, job):
"""Set up the parameters for a cosmoprep job"""
self.make_parameters(job)
def start_cosmo(self):
"""Spawns a cosmoprep instance, with optional logfile tracking"""
try:
self.cosmo = pexpect.spawn("cosmoprep")
except Exception as e:
try:
self.cosmo = pexpect.spawn(os.path.join(TURBOSCRIPT, 'cosmoprep'))
except Exception as e:
raise CosmoError(
"Error starting cosmoprep: {} Check the environment is set up".format(
str(e)))
else:
logging.debug("Environment not loaded. cosmoprep loaded manually.")
else:
logging.debug("Cosmoprep instance spawned and active.")
self.cosmo.timeout = self.timeout
fout = file('cosmolog.txt','w')
self.cosmo.logfile = fout
def make_parameters (self, job):
"""Convert job parameters into cosmoprep parameters"""
try:
if turbogo_helpers.is_positive_float(job.cosmo):
self.epsilon = job.cosmo
else:
#cosmo can be called with infinite epsilon (default)
self.epsilon = ''
logging.debug('parameters made')
except Exception as e:
self.epsilon = ''
logging.warn("Error in make_parameters: {}".format(e))
def run_cosmo(self):
"""Run cosmo depending on parameters list"""
try:
out = self.cosmo.expect([
'Keyword $cosmo already exists',
'epsilon'
])
if out == 0:
self.cosmo.sendline('d')
logging.debug('Cleared old cosmo data')
if self.epsilon and turbogo_helpers.is_positive_float(self.epsilon):
self.cosmo.sendline(self.epsilon)
logging.debug('Epsilon of {} set'.format(self.epsilon))
else:
self.cosmo.sendline('')
logging.debug('Default epsilon set')
#lots of opportunity to expand cosmo interaction here
self.cosmo.expect('refind')
self.cosmo.sendline('')
self.cosmo.expect('LR terms on')
self.cosmo.sendline('')
self.cosmo.expect('COSMO RF equil. is not set')
self.cosmo.sendline('')
self.cosmo.expect('nppa')
self.cosmo.sendline('')
self.cosmo.expect('nspa')
self.cosmo.sendline('')
self.cosmo.expect('disex')
self.cosmo.sendline('')
self.cosmo.expect('rsolv')
self.cosmo.sendline('')
self.cosmo.expect('routf')
self.cosmo.sendline('')
self.cosmo.expect('cavity')
self.cosmo.sendline('')
self.cosmo.expect('amat')
self.cosmo.sendline('')
self.cosmo.expect('if radius is in Bohr units append b')
self.cosmo.sendline('r all b')
self.cosmo.sendline('*')
self.cosmo.expect('COSMO output file')
self.cosmo.sendline('')
self.cosmo.expect('y/n, default = n')
self.cosmo.sendline('')
except Exception as e:
logging.warn('Cosmo Error: {}'.format(e))
raise CosmoError('Error in running cosmoprep. Error: {}'.format(e))
exitcode = self._end_cosmo()
return exitcode
def _end_cosmo(self):
"""close cosmo, first graceful then forced"""
try:
self.cosmo.wait()
logging.debug('Cosmoprep ended successfully.')
except:
try:
self.cosmo.close()
except:
logging.warning("Cosmoprep isn't closing correctly.")
try:
self.cosmo.close(force=True)
except:
logging.critical("Cosmoprep isn't closing with force.")
try:
if self.cosmo.signalstatus:
logging.debug('signalstatus: {}'.format(self.cosmo.signalstatus))
return self.cosmo.signalstatus
if self.cosmo.exitstatus:
logging.debug('exitstatus: {}'.format(self.cosmo.exitstatus))
return self.cosmo.exitstatus
except:
return -99
if __name__ == "__main__":
print "Not a callable script. Please run Turbogo or TurboControl."
exit()
| {
"content_hash": "8a41bfc984ead8e0d04e221e1458725d",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 90,
"avg_line_length": 34,
"alnum_prop": 0.5667016806722689,
"repo_name": "pbulsink/turbocontrol",
"id": "9b8c9baa65765254609d665b40213353d8b9b798",
"size": "5734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "turbocontrol/cosmo_op.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "168532"
}
],
"symlink_target": ""
} |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/struct.proto',
package='google.protobuf',
syntax='proto3',
serialized_pb=_b('\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42N\n\x13\x63om.google.protobufB\x0bStructProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_NULLVALUE = _descriptor.EnumDescriptor(
name='NullValue',
full_name='google.protobuf.NullValue',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NULL_VALUE', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=474,
serialized_end=501,
)
_sym_db.RegisterEnumDescriptor(_NULLVALUE)
NullValue = enum_type_wrapper.EnumTypeWrapper(_NULLVALUE)
NULL_VALUE = 0
_STRUCT_FIELDSENTRY = _descriptor.Descriptor(
name='FieldsEntry',
full_name='google.protobuf.Struct.FieldsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.protobuf.Struct.FieldsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.Struct.FieldsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=113,
serialized_end=182,
)
_STRUCT = _descriptor.Descriptor(
name='Struct',
full_name='google.protobuf.Struct',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fields', full_name='google.protobuf.Struct.fields', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_STRUCT_FIELDSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=182,
)
_VALUE = _descriptor.Descriptor(
name='Value',
full_name='google.protobuf.Value',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='null_value', full_name='google.protobuf.Value.null_value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='number_value', full_name='google.protobuf.Value.number_value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='string_value', full_name='google.protobuf.Value.string_value', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bool_value', full_name='google.protobuf.Value.bool_value', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='struct_value', full_name='google.protobuf.Value.struct_value', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='list_value', full_name='google.protobuf.Value.list_value', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='kind', full_name='google.protobuf.Value.kind',
index=0, containing_type=None, fields=[]),
],
serialized_start=185,
serialized_end=419,
)
_LISTVALUE = _descriptor.Descriptor(
name='ListValue',
full_name='google.protobuf.ListValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='google.protobuf.ListValue.values', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=421,
serialized_end=472,
)
_STRUCT_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE
_STRUCT_FIELDSENTRY.containing_type = _STRUCT
_STRUCT.fields_by_name['fields'].message_type = _STRUCT_FIELDSENTRY
_VALUE.fields_by_name['null_value'].enum_type = _NULLVALUE
_VALUE.fields_by_name['struct_value'].message_type = _STRUCT
_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['null_value'])
_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['number_value'])
_VALUE.fields_by_name['number_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['string_value'])
_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['bool_value'])
_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['struct_value'])
_VALUE.fields_by_name['struct_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['list_value'])
_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_LISTVALUE.fields_by_name['values'].message_type = _VALUE
DESCRIPTOR.message_types_by_name['Struct'] = _STRUCT
DESCRIPTOR.message_types_by_name['Value'] = _VALUE
DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE
DESCRIPTOR.enum_types_by_name['NullValue'] = _NULLVALUE
Struct = _reflection.GeneratedProtocolMessageType('Struct', (_message.Message,), dict(
FieldsEntry = _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), dict(
DESCRIPTOR = _STRUCT_FIELDSENTRY,
__module__ = 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Struct.FieldsEntry)
))
,
DESCRIPTOR = _STRUCT,
__module__ = 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Struct)
))
_sym_db.RegisterMessage(Struct)
_sym_db.RegisterMessage(Struct.FieldsEntry)
Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
DESCRIPTOR = _VALUE,
__module__ = 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Value)
))
_sym_db.RegisterMessage(Value)
ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict(
DESCRIPTOR = _LISTVALUE,
__module__ = 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.ListValue)
))
_sym_db.RegisterMessage(ListValue)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\013StructProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
_STRUCT_FIELDSENTRY.has_options = True
_STRUCT_FIELDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| {
"content_hash": "6b8e56b953fbb798f737ff0021284654",
"timestamp": "",
"source": "github",
"line_count": 278,
"max_line_length": 998,
"avg_line_length": 37.82374100719424,
"alnum_prop": 0.7111745126010461,
"repo_name": "huangkuan/hack",
"id": "922b663ce603d7cdf2199c25dba7a9224b10ad41",
"size": "10614",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "lib/google/protobuf/struct_pb2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2573"
},
{
"name": "Makefile",
"bytes": "888"
},
{
"name": "Protocol Buffer",
"bytes": "90433"
},
{
"name": "Python",
"bytes": "5447434"
}
],
"symlink_target": ""
} |
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUOSPFAreasFetcher
from bambou import NURESTObject
class NUOSPFInstance(NURESTObject):
""" Represents a OSPFInstance in the VSD
Notes:
The OSPF instance is the highest hierarchical OSPF configuration object in a domain. The OSPF instance allows you to assign global import and export routing policies for OSPF traffic in the domain.
"""
__rest_name__ = "ospfinstance"
__resource_name__ = "ospfinstances"
## Constants
CONST_IP_TYPE_IPV4 = "IPV4"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_OSPF_TYPE_OSPFV2 = "OSPFv2"
CONST_OSPF_TYPE_OSPFV3 = "OSPFv3"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a OSPFInstance instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> ospfinstance = NUOSPFInstance(id=u'xxxx-xxx-xxx-xxx', name=u'OSPFInstance')
>>> ospfinstance = NUOSPFInstance(data=my_dict)
"""
super(NUOSPFInstance, self).__init__()
# Read/Write Attributes
self._ip_type = None
self._ospf_type = None
self._name = None
self._last_updated_by = None
self._last_updated_date = None
self._description = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._preference = None
self._associated_export_routing_policy_id = None
self._associated_import_routing_policy_id = None
self._super_backbone_enabled = None
self._owner = None
self._export_limit = None
self._export_to_overlay = None
self._external_id = None
self._external_preference = None
self.expose_attribute(local_name="ip_type", remote_name="IPType", attribute_type=str, is_required=False, is_unique=False, choices=[u'IPV4'])
self.expose_attribute(local_name="ospf_type", remote_name="OSPFType", attribute_type=str, is_required=False, is_unique=True, choices=[u'OSPFv2', u'OSPFv3'])
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="preference", remote_name="preference", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_export_routing_policy_id", remote_name="associatedExportRoutingPolicyID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_import_routing_policy_id", remote_name="associatedImportRoutingPolicyID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="super_backbone_enabled", remote_name="superBackboneEnabled", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="export_limit", remote_name="exportLimit", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="export_to_overlay", remote_name="exportToOverlay", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="external_preference", remote_name="externalPreference", attribute_type=int, is_required=False, is_unique=False)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.ospf_areas = NUOSPFAreasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def ip_type(self):
""" Get ip_type value.
Notes:
The IP Type of the OSPF Instance, currently only IPv4 is supported.
This attribute is named `IPType` in VSD API.
"""
return self._ip_type
@ip_type.setter
def ip_type(self, value):
""" Set ip_type value.
Notes:
The IP Type of the OSPF Instance, currently only IPv4 is supported.
This attribute is named `IPType` in VSD API.
"""
self._ip_type = value
@property
def ospf_type(self):
""" Get ospf_type value.
Notes:
Type of the OSPF protocol, possible values are OSPFv2 and OSPFv3.
This attribute is named `OSPFType` in VSD API.
"""
return self._ospf_type
@ospf_type.setter
def ospf_type(self, value):
""" Set ospf_type value.
Notes:
Type of the OSPF protocol, possible values are OSPFv2 and OSPFv3.
This attribute is named `OSPFType` in VSD API.
"""
self._ospf_type = value
@property
def name(self):
""" Get name value.
Notes:
Name of OSPF Instance
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of OSPF Instance
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def description(self):
""" Get description value.
Notes:
Description of OSPF Instance
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of OSPF Instance
"""
self._description = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def preference(self):
""" Get preference value.
Notes:
Preference for OSPF Internal Routes.
"""
return self._preference
@preference.setter
def preference(self, value):
""" Set preference value.
Notes:
Preference for OSPF Internal Routes.
"""
self._preference = value
@property
def associated_export_routing_policy_id(self):
""" Get associated_export_routing_policy_id value.
Notes:
Export OSPF Routing Policy ID
This attribute is named `associatedExportRoutingPolicyID` in VSD API.
"""
return self._associated_export_routing_policy_id
@associated_export_routing_policy_id.setter
def associated_export_routing_policy_id(self, value):
""" Set associated_export_routing_policy_id value.
Notes:
Export OSPF Routing Policy ID
This attribute is named `associatedExportRoutingPolicyID` in VSD API.
"""
self._associated_export_routing_policy_id = value
@property
def associated_import_routing_policy_id(self):
""" Get associated_import_routing_policy_id value.
Notes:
Import OSPF Routing Policy ID
This attribute is named `associatedImportRoutingPolicyID` in VSD API.
"""
return self._associated_import_routing_policy_id
@associated_import_routing_policy_id.setter
def associated_import_routing_policy_id(self, value):
""" Set associated_import_routing_policy_id value.
Notes:
Import OSPF Routing Policy ID
This attribute is named `associatedImportRoutingPolicyID` in VSD API.
"""
self._associated_import_routing_policy_id = value
@property
def super_backbone_enabled(self):
""" Get super_backbone_enabled value.
Notes:
Flag to determine whether SuperBackbone is enabled or not.
This attribute is named `superBackboneEnabled` in VSD API.
"""
return self._super_backbone_enabled
@super_backbone_enabled.setter
def super_backbone_enabled(self, value):
""" Set super_backbone_enabled value.
Notes:
Flag to determine whether SuperBackbone is enabled or not.
This attribute is named `superBackboneEnabled` in VSD API.
"""
self._super_backbone_enabled = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def export_limit(self):
""" Get export_limit value.
Notes:
This command configures the maximum number of routes (prefixes) that can be exported into OSPF from the route table.
This attribute is named `exportLimit` in VSD API.
"""
return self._export_limit
@export_limit.setter
def export_limit(self, value):
""" Set export_limit value.
Notes:
This command configures the maximum number of routes (prefixes) that can be exported into OSPF from the route table.
This attribute is named `exportLimit` in VSD API.
"""
self._export_limit = value
@property
def export_to_overlay(self):
""" Get export_to_overlay value.
Notes:
Flag which determines whether the routes learnt through BGP and OSPF will be exported to VSC or not. This flag also exists in the NSGRoutingPolicyBinding entity. The NSGRoutingPolicyBinding flag takes precedence over this one.
This attribute is named `exportToOverlay` in VSD API.
"""
return self._export_to_overlay
@export_to_overlay.setter
def export_to_overlay(self, value):
""" Set export_to_overlay value.
Notes:
Flag which determines whether the routes learnt through BGP and OSPF will be exported to VSC or not. This flag also exists in the NSGRoutingPolicyBinding entity. The NSGRoutingPolicyBinding flag takes precedence over this one.
This attribute is named `exportToOverlay` in VSD API.
"""
self._export_to_overlay = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def external_preference(self):
""" Get external_preference value.
Notes:
Preference for OSPF External Routes.
This attribute is named `externalPreference` in VSD API.
"""
return self._external_preference
@external_preference.setter
def external_preference(self, value):
""" Set external_preference value.
Notes:
Preference for OSPF External Routes.
This attribute is named `externalPreference` in VSD API.
"""
self._external_preference = value
| {
"content_hash": "54e29f2bdd237657bdf1063349040f4d",
"timestamp": "",
"source": "github",
"line_count": 586,
"max_line_length": 296,
"avg_line_length": 30.358361774744026,
"alnum_prop": 0.5749859471613266,
"repo_name": "nuagenetworks/vspk-python",
"id": "17e1d09bf110e24206b41ecb4d0489c6f029ac6d",
"size": "19403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vspk/v6/nuospfinstance.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12909327"
}
],
"symlink_target": ""
} |
import os
class BaseConfig(object):
DEBUG = False
TESTING = False
SECRET_KEY = "DEFINE SECRET_KEY ENV VAR IN PRODUCTION"
ELASTICSEARCH_URL = os.getenv("ELASTICSEARCH_URL")
ERROR_404_HELP = False
ERROR_INCLUDE_MESSAGE = False
class DevelopmentConfig(BaseConfig):
DEBUG = True
class TestConfig(BaseConfig):
DEBUG = True
TESTING = True
SECRET_KEY = os.getenv("SECRET_KEY", default=BaseConfig.SECRET_KEY)
class ProductionConfig(BaseConfig):
DEBUG = False
SECRET_KEY = os.getenv("SECRET_KEY") | {
"content_hash": "f15dd93dc7cb47cf7f02c950d4f6c48b",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 71,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.7031539888682746,
"repo_name": "willrogerpereira/willbuyer",
"id": "5379340b2eababff559b5d39040ab868ba1bef21",
"size": "539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "willstores/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4847"
},
{
"name": "HTML",
"bytes": "15697"
},
{
"name": "JavaScript",
"bytes": "38943"
},
{
"name": "Python",
"bytes": "83645"
}
],
"symlink_target": ""
} |
def test_1():
import setup
| {
"content_hash": "374e247674f03e422f8458a8b32d1e68",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 16,
"avg_line_length": 15.5,
"alnum_prop": 0.6129032258064516,
"repo_name": "ojcastillo/emacs_config",
"id": "8677697a84479eb5339bffcc3d2660d4327a9dde",
"size": "88",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "extensions/pymacs/tests/t10_pyfile_loads.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "33117"
},
{
"name": "Python",
"bytes": "97756"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_garyn_theif_zabrak_male_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","zabrak_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "36d37b154a35699a53a95b1558bc7860",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 80,
"avg_line_length": 24.23076923076923,
"alnum_prop": 0.6984126984126984,
"repo_name": "obi-two/Rebelion",
"id": "00754062754d7b3ecb36dac912e3b19ef01725ee",
"size": "460",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/mobile/shared_dressed_garyn_theif_zabrak_male_01.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('.')
sys.path.append('..')
from tasks import execute_home_task
if __name__ == '__main__':
execute_home_task()
| {
"content_hash": "078db496c6f8751b0621523b20760ac4",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 35,
"avg_line_length": 16,
"alnum_prop": 0.625,
"repo_name": "yzsz/weibospider",
"id": "3c74145c40d192d75ee9e0c315db40cca8278814",
"size": "144",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "first_task_execution/home_first.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1401"
},
{
"name": "Python",
"bytes": "187164"
},
{
"name": "Shell",
"bytes": "623"
}
],
"symlink_target": ""
} |
"""Test the ZMQ API."""
import configparser
import os
import struct
from test_framework.test_framework import PlanbcoinTestFramework, SkipTest
from test_framework.util import (assert_equal,
bytes_to_hex_str,
)
class ZMQTest (PlanbcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def setup_nodes(self):
# Try to import python3-zmq. Skip this test if the import fails.
try:
import zmq
except ImportError:
raise SkipTest("python3-zmq module not available.")
# Check that planbcoin has been built with ZMQ enabled
config = configparser.ConfigParser()
if not self.options.configfile:
self.options.configfile = os.path.dirname(__file__) + "/../config.ini"
config.read_file(open(self.options.configfile))
if not config["components"].getboolean("ENABLE_ZMQ"):
raise SkipTest("planbcoind has not been built with zmq enabled.")
self.zmqContext = zmq.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.set(zmq.RCVTIMEO, 60000)
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
ip_address = "tcp://127.0.0.1:29067"
self.zmqSubSocket.connect(ip_address)
extra_args = [['-zmqpubhashtx=%s' % ip_address, '-zmqpubhashblock=%s' % ip_address], []]
self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
def run_test(self):
try:
self._zmq_test()
finally:
# Destroy the zmq context
self.log.debug("Destroying zmq context")
self.zmqContext.destroy(linger=None)
def _zmq_test(self):
genhashes = self.nodes[0].generate(1)
self.sync_all()
self.log.info("Wait for tx")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
assert_equal(topic, b"hashtx")
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) # must be sequence 0 on hashtx
self.log.info("Wait for block")
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, 0) # must be sequence 0 on hashblock
blkhash = bytes_to_hex_str(body)
assert_equal(genhashes[0], blkhash) # blockhash from generate must be equal to the hash received over zmq
self.log.info("Generate 10 blocks (and 10 coinbase txes)")
n = 10
genhashes = self.nodes[1].generate(n)
self.sync_all()
zmqHashes = []
blockcount = 0
for x in range(n * 2):
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
if topic == b"hashblock":
zmqHashes.append(bytes_to_hex_str(body))
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount + 1)
blockcount += 1
for x in range(n):
assert_equal(genhashes[x], zmqHashes[x]) # blockhash from generate must be equal to the hash received over zmq
self.log.info("Wait for tx from second node")
# test tx from a second node
hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
# now we should receive a zmq msg because the tx was broadcast
msg = self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
assert_equal(topic, b"hashtx")
hashZMQ = bytes_to_hex_str(body)
msgSequence = struct.unpack('<I', msg[-1])[-1]
assert_equal(msgSequence, blockcount + 1)
assert_equal(hashRPC, hashZMQ) # txid from sendtoaddress must be equal to the hash received over zmq
if __name__ == '__main__':
ZMQTest().main()
| {
"content_hash": "fabfa058c923e61f79dc72b9bf05cc79",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 123,
"avg_line_length": 37.19090909090909,
"alnum_prop": 0.5939868002933268,
"repo_name": "planbcoin/planbcoin",
"id": "b436015895a4f2a65d1ee3d833b504d36d1f4108",
"size": "4302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/zmq_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28452"
},
{
"name": "C",
"bytes": "692639"
},
{
"name": "C++",
"bytes": "5272684"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30306"
},
{
"name": "M4",
"bytes": "191906"
},
{
"name": "Makefile",
"bytes": "112818"
},
{
"name": "Objective-C",
"bytes": "3767"
},
{
"name": "Objective-C++",
"bytes": "7235"
},
{
"name": "Protocol Buffer",
"bytes": "2336"
},
{
"name": "Python",
"bytes": "1197315"
},
{
"name": "QMake",
"bytes": "758"
},
{
"name": "Shell",
"bytes": "57700"
}
],
"symlink_target": ""
} |
"""
Command and control daemon that runs on each node in the metadata service.
It is responsible for managing metadata servers on its node.
"""
import sys
import os
import subprocess
import socket
import time
import random
import signal
import traceback
import StringIO
import threading
import shutil
sys.path.append("/usr/share/SMDS")
from SMDS.xmlrpc_ssl import *
from SMDS.faults import *
from SMDS.config import Config
import SMDS.logger as logger
# global configuration
conf = None
# track available ports for MS volumes
ms_portset = None
# track volume IDs
vol_id_counter = None
# track UIDs
uid_counter = None
# determine the control root directory for a volume
def VOLUME_CTL_ROOT( c, mdserver_dict ):
return os.path.join( c['MD_CTL_DIR'], os.path.join("volumes", str( mdserver_dict['NAME'] )) )
# determine configuration file name
def VOLUME_CONF_PATH( ctl_root ):
return os.path.join( ctl_root, "md_server.conf" )
# determine logfile name
def LOGFILE_PATH( ctl_root ):
return os.path.join( ctl_root, "md_logfile.txt" )
# determine pid file name
def PIDFILE_PATH( ctl_root ):
return os.path.join( ctl_root, "md_pid" )
# determine secrets file name
def SECRETS_PATH( ctl_root ):
return os.path.join( ctl_root, "md_secrets.txt" )
# get the names of all existing volumes
def VOLUME_NAMES( c ):
return os.listdir( os.path.join( c['MD_CTL_DIR'], "volumes" ) )
class PortSet:
"""
Set of ports for allocating to MS processes
"""
def __init__( self, low, high ):
self.low = low
self.high = high
self.allotted_ports = []
self.allotted_ports_lock = threading.Lock()
def load( self, c ):
"""
Walk through the MS config root and read the list of ports in each config.
"""
volume_names = VOLUME_NAMES( c )
for volume in volume_names:
# get the control root directory for this volume
ctl_root = VOLUME_CTL_ROOT( c, {'NAME': volume} )
# get the config for this volume
vol_conf_path = VOLUME_CONF_PATH( ctl_root )
# read it
vol_conf = None
try:
vol_conf = read_config( vol_conf_path, ['PORTNUM'] )
if vol_conf:
self.register_ports( [ int(vol_conf['PORTNUM']) ] )
except:
pass
def register_ports( self, portlist ):
"""
Add a set of ports to this PortSet.
Return True if all added; False if any overlap.
"""
ret = True
self.allotted_ports_lock.acquire()
for p in portlist:
if p in self.allotted_ports:
ret = False
break
if ret:
self.allotted_ports += portlist
self.allotted_ports_lock.release()
return ret
def unregister_ports( self, portlist ):
"""
Remove a set of ports from this PortSet.
"""
self.allotted_ports_lock.acquire()
for p in portlist:
try:
self.allotted_ports.remove( p )
except:
pass
self.allotted_ports_lock.release()
def unavailable_ports( self ):
return self.allotted_ports[:]
class AtomicCounter:
"""
Atomic counter implementation
"""
def __init__(self, start=0, save=None):
self.value = start
self.lock = threading.Lock()
self.fd = None
if save != None:
# read old value from disk
if os.path.isfile( save ):
self.fd = open(save, "r+")
data = self.fd.read()
try:
self.value = int(data)
except Exception, e:
self.fd.close()
self.fd = open(save, "w")
else:
self.fd = open(save, "w")
def save_nolock( self ):
if self.fd != None:
self.fd.seek(0)
self.fd.write( str(self.value) )
self.fd.flush()
def save( self ):
if self.fd != None:
self.lock.acquire()
self.save_nolock()
self.lock.release()
def add( self, c ):
self.lock.acquire()
self.value += c
self.save_nolock()
self.lock.release()
def next( self ):
self.lock.acquire()
ret = self.value
self.value += 1
self.save_nolock()
self.lock.release()
return ret
def get( self ):
return self.value
# make sure the required attributes are present
def conf_required( c, required_attrs ):
for attr in required_attrs:
if not c.has_key( attr ):
return False
return True
# make sure only the given attributes are present
def conf_forbidden( c, allowed_attrs ):
for attr in c.keys():
if attr not in allowed_attrs:
return True
return False
def read_config( config_file, required_attrs=None ):
"""
Read the configuration, and assert that every required attribute
is present.
"""
try:
fd = open( config_file, "r" )
except Exception, e:
logger.exception( e, "Could not open %s" % config_file )
return None
c = {}
valid = True
while True:
line = fd.readline()
if len(line) == 0:
break
line = line.strip()
if len(line) == 0:
continue
if line[0] == '#':
continue
parts = line.split('=')
if len(parts) < 2:
logger.error( "Invalid config line %s" % config_file )
valid = False
if valid:
varname = parts[0].strip()
# collate values
values = []
for value in parts[1:]:
s = value.strip('"')
values.append( s )
if len(values) == 1:
values = values[0] # this is a scalar
# keep list of values if variable occurs multiple times
if c.has_key( varname ):
if type( c[varname] ) != type([]):
tmp = c[varname]
c[varname] = [tmp]
else:
c[varname].append( values )
else:
c[varname] = values
fd.close()
if not valid:
return None
attr = None
try:
if required_attrs:
for rc in required_attrs:
attr = rc
assert rc in c
except AssertionError, ae:
raise MDInternalError( "read_config assert %s failed" % attr )
return c
def make_secret_entry( user_dict ):
"""
Given a user, return a line of text suitable for a secrets file entry.
"""
try:
assert 'uid' in user_dict
assert 'username' in user_dict
assert 'password' in user_dict
except:
raise MDInternalError( "make_secret_entry assertion failed" )
return "%s:%s:%s" % (user_dict['uid'], user_dict['username'], user_dict['password'])
def cleanup_ctl_dir( ctl_dir ):
"""
Remove all entries in a control directory
"""
try:
os.system("/bin/rm -rf %s" % ctl_dir )
except:
pass
def generate_config_file( defaults_file, template_file, extra_config=None ):
"""
Generate a configuration file from a template file, default values file, and extra configuration data
"""
# read the default configuration file and add in the extras
conf_vars = {}
try:
conf_vars = read_config( defaults_file )
except:
pass
if extra_config and isinstance( extra_config, dict):
conf_vars.update( extra_config )
stdout = StringIO.StringIO()
stderr = StringIO.StringIO()
# rendering function
def OUT( s ):
stdout.write( s )
stdout.write("\n")
def ERR( s ):
stderr.write( s )
stderr.write("\n")
# evaluate our config-generating script
try:
template_fd = open(template_file, "r")
template_code = template_fd.read()
template_fd.close()
conf_vars['OUT'] = OUT
conf_vars['ERR'] = ERR
exec template_code in conf_vars
config_data = stdout.getvalue()
config_err = stderr.getvalue()
except Exception, e:
raise MDMethodFailed( "generate_config_file", e )
return (config_data, config_err)
def write_config_file( output_file, data ):
"""
Write out a config file
"""
try:
f = open( output_file, "w" )
f.write( data )
f.close()
except Exception, e:
raise MDMethodFailed( "write_config_file", e )
return
def read_secrets_file( secrets_file ):
"""
Read a secrets file and return a list of users
"""
try:
sf = open( secrets_file, "r" )
except Exception, e:
raise MDMethodFailed( "read_secrets_file", e )
users = []
lineno = 0
while True:
line = sf.readline()
lineno += 1
if len(line) == 0:
break
line = line.strip()
if len(line) == 0:
continue
parts = line.split(":")
if len(parts) != 3:
logger.warn("Invalid line %s in %s" % (lineno, secret_line))
continue
users.append( user_entry( int(parts[0]), parts[1], parts[2] ) )
return users
def write_secrets_file( user_list, secrets_file ):
"""
Generate a secrets file from a list of user dictionaries.
"""
# create the secrets file
try:
sf = open( secrets_file, "w" )
except Exception, e:
raise MDMethodFailed( "write_secrets_file", e )
for user in user_list:
secret_line = make_secret_entry( user )
try:
sf.write( secret_line + "\n" )
except Exception, e:
raise MDMethodFailed( "write_secrets_file", e )
sf.close()
return
def user_entry( uid, username, password_hash ):
return {'uid': uid, 'username': username, 'password': password_hash}
def va_entry( username, password_hash ):
return {'uid': 0, 'username': username, 'password': password_hash}
def get_open_ports( ctl_dir, port_low, port_high, num_ports ):
"""
Get a list of available port numbers, using netstat
"""
global ms_portset
# get a list of open ports
try:
open_ports_sh = "netstat -tuan --numeric-hosts | tail -n +3 | awk '{n=split($4,a,\":\"); print a[n]}'"
proc = subprocess.Popen( open_ports_sh, stdout=subprocess.PIPE, shell=True )
ports_str, _ = proc.communicate()
proc.wait()
except Exception, e:
raise MDMethodFailed( "get_open_ports", e )
used_ports = [int(x) for x in ports_str.split()]
unavailable_ports = [port_low - 1] + filter( lambda x: x >= port_low and x < port_high, used_ports ) + ms_portset.unavailable_ports() + [port_high + 1]
unavailable_ports.sort()
if port_high - port_low - len(unavailable_ports) < num_ports:
raise MDInternalError( "get_open_ports: not enough open ports" )
ret = []
for i in xrange(0, num_ports):
c = random.randint( 0, port_high - port_low - (len(unavailable_ports) - 2) )
p = 0
h = 0
s = 0
for k in xrange(0,len(unavailable_ports)-1):
h = unavailable_ports[k]
ran = unavailable_ports[k+1] - unavailable_ports[k] - 1
if p + ran < c:
p += ran
continue
else:
s = h + 1 + c - p
break
ret.append( s )
unavailable_ports.append( s )
unavailable_ports.sort()
rc = ms_portset.register_ports( ret )
if not rc:
raise MDInternalError( "get_open_ports: not enough open ports" )
return ret
def install_volume_config( mdserver_dict, users_list, extra_params ):
"""
Install the configuration files for a volume.
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, mdserver_dict )
# create the secrets file, if we have secrets data
if users_list != None:
secrets_file = SECRETS_PATH( ctl_root )
try:
write_secrets_file( users_list, secrets_file )
extra_params['SECRETS_FILE'] = secrets_file
except Exception, e:
raise e
# create a config file
if mdserver_dict != None or extra_params != None:
config_file = VOLUME_CONF_PATH( ctl_root )
try:
all_config = {}
if mdserver_dict != None:
all_config.update( mdserver_dict )
if extra_params != None:
all_config.update( extra_params )
out, err = generate_config_file( conf['MD_CONFIG_DEFAULTS'], conf['MD_CONFIG_TEMPLATE'], all_config )
write_config_file( config_file, out )
except Exception, e:
raise e
return 0
def get_volume_pid( md_pidfile ):
"""
Get the PID form a pidfile
"""
try:
pid_f = open(md_pidfile, "r")
except:
return None
try:
pid = int( pid_f.read() )
pid_f.close()
return pid
except:
raise MDInternalError( "corrupt PID file" )
def is_volume_running( md_pidfile ):
"""
Determine whether or not a metadata server with the given PID file is running
"""
global conf
pid = get_volume_pid( md_pidfile )
if pid == None:
return False
try:
os.kill( pid, 0 )
return True
except:
# no such PID
return False
def get_io_urls( md_conf ):
"""
Generate the read/write URLs for a volume
"""
proto = 'http://'
if md_conf.has_key( 'SSL_PKEY' ) and md_conf.has_key( 'SSL_CERT' ):
proto = "https://"
host = socket.gethostname()
read_url = proto + host + ":" + str(md_conf['PORTNUM']) + "/"
write_url = None
if md_conf['AUTH_OPERATIONS'] == "readwrite" or md_conf['AUTH_OPERATIONS'] == "write":
write_url = proto + host + ":" + str(md_conf['PORTNUM']) + "/"
return (read_url, write_url)
def reload_volume( mdserver_name ):
"""
Reload a volume's configuration--send it a message to reload.
Raise an exception if the volume isn't running or can't be reached
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
pidfile_path = PIDFILE_PATH( ctl_root )
# extract the pid
pid = get_volume_pid( pidfile_path )
if pid == None:
raise MDMethodFailed( "reload_volume", "Could not get volume PID")
# reload
print "reload_volume: about to reload %s (pid = %s)" % (mdserver_name, pid)
os.system("ps aux | grep mdserverd")
print "command: %s -k %s" % (conf['MD_BINARY'], str(pid))
md_proc = subprocess.Popen( [conf['MD_BINARY'], '-k', str(pid)], close_fds = True )
rc = md_proc.wait()
time.sleep(1.0)
print "reload_volume: reloaded, rc = %s" % rc
os.system("ps aux | grep mdserverd")
return rc
def create_volume( mdserver_name, mdserver_dict, va_username, va_pwhash ):
"""
Given a dictionary containing the fields of a metadata server
and a list of dictionaries describing each user to run this server,
set up a metadata server (but don't start it)
"""
global conf
global vol_id_counter
required_attrs = [
'AUTH_OPERATIONS',
'BLOCKING_FACTOR',
]
# for now, fill in defaults
if 'AUTH_OPERATIONS' not in mdserver_dict.keys():
mdserver_dict['AUTH_OPERATIONS'] = "readwrite"
if 'BLOCKING_FACTOR' not in mdserver_dict.keys():
mdserver_dict['BLOCKING_FACTOR'] = 102400
if not conf_required( mdserver_dict, required_attrs ):
raise MDInvalidArgument( "Missing attributes. Required: %s" % (', '.join(required_attrs)), 'create_volume' )
# create the directory to store information on
ctl_root = VOLUME_CTL_ROOT( conf, mdserver_dict )
if os.path.isdir( ctl_root ):
raise MDMethodFailed( "create_volume", "Volume '%s' already exists" % mdserver_name )
cleanup_ctl_dir( ctl_root )
try:
os.makedirs( ctl_root )
except Exception, e:
cleanup_ctl_dir( ctl_root )
raise MDMethodFailed( "create_volume setup", e )
# create the master copy
mc_root = os.path.join( ctl_root, "master_copy" )
try:
os.makedirs( mc_root )
except Exception, e:
cleanup_ctl_dir( ctl_root )
raise MDMethodFailed( "create_volume mcroot", e )
# create the PID file path (for the config)
md_pidfile = PIDFILE_PATH( ctl_root )
# get an HTTP and query port
md_portnums = get_open_ports( conf['MD_CTL_DIR'], int(conf['MD_CTL_PORT_LOW']), int(conf['MD_CTL_PORT_HIGH']), 2 )
http_portnum = md_portnums[0]
query_portnum = md_portnums[1]
volume_id = vol_id_counter.next()
try:
# install this volume's configuration
params = {
'MDROOT': mc_root,
'PORTNUM': str(http_portnum),
'QUERY_PORTNUM': str(query_portnum),
'PIDFILE': md_pidfile,
'AUTH_OPERATIONS':mdserver_dict['AUTH_OPERATIONS'],
'BLOCKING_FACTOR':mdserver_dict['BLOCKING_FACTOR'],
'SSL_PKEY': conf['MD_SSL_PKEY'],
'SSL_CERT': conf['MD_SSL_CERT'],
'VOLUME_ID': volume_id
}
read_url, write_url = get_io_urls( params )
if read_url:
params[ 'METADATA_READ_URL' ] = read_url
if write_url:
params[ 'METADATA_WRITE_URL' ] = write_url
user_list = [va_entry( va_username, va_pwhash )]
install_volume_config( mdserver_dict, user_list, params )
except Exception, e:
cleanup_ctl_dir( ctl_root )
raise e
return 1
# lock to prevent concurrent create_VACE operations
VACE_lock = threading.Lock()
def create_VACE( mdserver_name, username, pwhash, role ):
"""
Associate a given user with a role in the context of a given volume. Return 1 on success.
"""
global conf
global uid_counter
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
secrets_file = SECRETS_PATH( ctl_root )
VACE_lock.acquire()
users = []
try:
# is this user represented?
users = read_secrets_file( secrets_file )
except Exception, e:
VACE_lock.release()
raise e
for user in users:
if user['username'] == username:
VACE_lock.release()
raise MDInvalidArgument( "User '%s' already exists\n" % username )
new_user = None
if role == "VA" or role == "SA":
# volume or syndicate admin--they get all rights
new_user = va_entry( username, pwhash )
else:
uid = uid_counter.next()
new_user = user_entry( uid, username, pwhash )
users.append( new_user )
try:
write_secrets_file( users, secrets_file )
except Exception, e:
VACE_lock.release()
raise e
VACE_lock.release()
if is_volume_running( PIDFILE_PATH( ctl_root ) ):
rc = reload_volume( mdserver_name )
if rc != 0:
raise MDInternalError( "Failed to reload '%s'\n", mdserver_name )
return 1
def delete_VACE( mdserver_name, username_or_id ):
"""
Delete the user from a volume. Return 1 on success.
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
secrets_file = SECRETS_PATH( ctl_root )
VACE_lock.acquire()
try:
users = read_secrets_file( secrets_file )
except Exception, e:
VACE_lock.release()
raise e
found_idx = -1
for i in xrange(0,len(users)):
user = users[i]
if user['username'] == username_or_id or user['uid'] == username_or_id:
found_idx = i
break
if found_idx == -1:
# not found
VACE_lock.release()
raise MDInvalidArgument( "No such user '%s'" % username_or_id )
users.remove( users[found_idx] )
try:
write_secrets_file( users, secrets_file )
except Exception, e:
VACE_lock.release()
raise e
VACE_lock.release()
if is_volume_running( PIDFILE_PATH( ctl_root ) ):
rc = reload_volume( mdserver_name )
if rc != 0:
raise MDInternalError( "Failed to reload '%s'\n", mdserver_name )
return 1
def start_volume( mdserver_name ):
"""
Start up an existing metadata server for a volume. Return 1 on success.
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
config_file = VOLUME_CONF_PATH( ctl_root )
md_logfile = LOGFILE_PATH( ctl_root )
md_pidfile = PIDFILE_PATH( ctl_root )
# Get this server's configuration file
try:
md_conf = read_config( config_file )
except Exception, e:
raise MDMethodFailed( 'start_volume', "read config exception = '%s'" % e )
# make sure we're not running...
if is_volume_running( md_pidfile ):
return 1
try:
assert os.path.isdir( ctl_root ), "Control directory does not exist"
assert os.path.isfile( config_file ), "Config file does not exist"
assert os.path.isdir( md_conf['MDROOT'] ), "Master copy '%s' does not exist" % (md_conf['MDROOT'])
except AssertionError, e:
raise MDInternalError( "Server is not fully set up: %s" % str(e) )
try:
assert not os.path.isfile( md_pidfile )
except AssertionError, e:
raise MDInternalError( "Server is already running" )
# fire up the binary
md_proc = subprocess.Popen( [conf['MD_BINARY'], '-c', config_file, '-l', md_logfile ], close_fds = True )
rc = md_proc.wait()
if rc != 0:
# could not start the server
# make sure we remove runtime files, just in case
try:
os.unlink( md_pidfile )
except:
pass
raise MDMethodFailed( "start_volume", "rc = %s when starting metadata server" % rc )
return 1
def stop_volume( mdserver_name ):
"""
Stop a running metadata server.
Return 1 on success
Return 0 if the server isn't running
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
md_pidfile = PIDFILE_PATH( ctl_root )
md_logfile = LOGFILE_PATH( ctl_root )
# if we're not running, then do nothing
if not is_volume_running( md_pidfile ):
# make sure these files don't exist
try:
os.unlink( md_pidfile )
except:
pass
return 0
try:
assert os.path.isdir( ctl_root ), "Control directory does not exist"
except AssertionError, e:
raise MDInternalError( "Server is not correctly set up: %s" % str(e) )
pid = get_volume_pid( md_pidfile )
if pid == None:
raise MDMethodFailed( "stop_volume", "could not read PID file: %s" + str(e))
else:
# send SIGTERM to this metadata server
os.kill( pid, signal.SIGTERM )
dead = False
# wait until the process dies...
t_start = time.time()
while time.time() - t_start < 10:
try:
os.kill( pid, 0 )
time.sleep(0.1)
except:
dead = True
break # process is dead
if not dead:
# the process still runs, or it crashed
# kill -9 either way.
os.kill( pid, signal.SIGKILL )
try:
os.unlink( md_pidfile )
except:
pass
try:
os.rename( md_logfile, md_logfile + "." + str(int(time.time())) )
except:
pass
return 1
def delete_volume( mdserver_name ):
"""
Destroy a volume
"""
global conf
global ms_portset
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
try:
stop_volume( mdserver_name )
except:
pass
# release ports
try:
config = read_config( VOLUME_CONF_PATH( ctl_root ), ['PORTNUM', 'QUERY_PORTNUM'] )
ms_portset.unregister_ports( [config['PORTNUM'], config['QUERY_PORTNUM']] )
except:
pass
cleanup_ctl_dir( ctl_root )
return 1
def read_volume( mdserver_name, fields ):
"""
Read configuration fields from a volume, given its name
"""
global conf
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
conf_path = VOLUME_CONF_PATH( ctl_root )
try:
vol_conf = read_config( conf_path, fields )
except Exception, e:
raise MDMethodFailed( "read_volume", "could not read config, exception = '%s'" % e )
ret = {}
for f in fields:
ret[f] = vol_conf[f]
return ret
def list_volumes( fields ):
"""
Get a list of all volumes' fields
"""
global conf
volume_names = VOLUME_NAMES( conf )
ret = []
for name in volume_names:
vol_conf = read_volume( name, fields )
vol_conf['NAME'] = name
ret.append( vol_conf )
return ret
def update_volume( mdserver_name, mdserver_dict ):
"""
Update this volume's configuration
"""
global conf
allowed_attrs = [
'REPLICA_URL',
'AUTH_OPERATIONS',
'NAME'
]
if conf_forbidden( mdserver_dict, allowed_attrs ):
raise MDInvalidArgument( "Invalid attributes. Allowed: %s" % (', '.join(allowed_attrs)), "update_volume" )
ctl_root = VOLUME_CTL_ROOT( conf, {'NAME': mdserver_name} )
conf_file = VOLUME_CONF_PATH( ctl_root )
try:
assert os.path.isdir( ctl_root ), "Control directory does not exist"
assert os.path.isfile( conf_file ), "Control directory does not have a server config"
except AssertionError, e:
raise MDInvalidArgument( "Cannot use new name %s: path exists" % new_name )
restart = True # restart after we regenerate the secrets and config
try:
# make a config that uses the old name of this volume
rc = stop_volume( mdserver_name )
if rc == 0:
# wasn't running in the first place
restart = False
except Exception, e:
raise MDMethodFailed( "Could not stop server", e )
# regenerate the config and secrets
# get the old config
old_conf = {}
try:
old_conf = read_config( conf_file )
except Exception, e:
raise MDMethodFailed( "Could not read old config", e )
# migrate data to new name, if need be
if mdserver_dict.has_key( 'NAME' ) and mdserver_name != mdserver_dict['NAME']:
new_ctl_root = VOLUME_CTL_ROOT( conf, mdserver_dict )
shutil.move( ctl_root, new_ctl_root )
ctl_root = new_ctl_root
mdserver_dict['NAME'] = mdserver_name
# regenerate the config
try:
install_volume_config( mdserver_dict, None, old_conf )
except Exception, e:
raise MDMethodFailed( "update_volume", "Could not generate config file, exception = %s" % e )
except Exception, e:
cleanup_ctl_dir( ctl_root )
raise e
# restart the server, if it was running before
if restart:
try:
start_volume( mdserver_name )
except Exception, e:
raise MDMethodFailed( "Could not start server", e )
return 1
def init():
global conf
global ms_portset
global vol_id_counter
global uid_counter
logger.init()
conf = read_config( "/etc/syndicate/syndicate-metadata-service.conf" )
ctl_root = conf['MD_CTL_DIR']
if not os.path.isdir( os.path.join(ctl_root, "volumes") ):
os.makedirs( os.path.join(ctl_root, "volumes") )
ms_portset = PortSet( conf['MD_CTL_PORT_LOW'], conf['MD_CTL_PORT_HIGH'] )
ms_portset.load( conf )
vol_id_counter = AtomicCounter( 1, os.path.join( ctl_root, "volid" ) )
uid_counter = AtomicCounter( 10000, os.path.join( ctl_root, "uid" ) )
def test():
mdserver_dict = {
'NAME': "test",
'AUTH_OPERATIONS': 'readwrite',
'BLOCKING_FACTOR': 61440
}
print "create_volume"
rc = create_volume( "test", mdserver_dict, "jcnelson", "0123456789abcdef" )
print "create_volume rc = %s" % rc
print ""
print "create_VACE"
rc = create_VACE( "test", "llp", "abcdef0123456789", "VP" )
print "create_VACE rc = %s" % rc
print ""
print "read_volume"
fields = ["METADATA_READ_URL", "METADATA_WRITE_URL"]
rc = read_volume( "test", fields )
print "read_volume(%s) rc = %s" % (','.join(fields), rc)
print ""
print "start_volume"
rc = start_volume( "test" )
print "start_volume rc = %s" % rc
print ""
os.system("ps aux | grep mdserverd; ls /tmp/syndicate_md/volumes/test/")
print "create_VACE"
rc = create_VACE( "test", "foo", "asdfasdfasdf", "VP" )
print "create_VACE rc = %s" % rc
time.sleep(1.0)
os.system("ps aux | grep mdserverd; ls /tmp/syndicate_md/volumes/test/")
print "update_volume"
rc = update_volume( "test", {"REPLICA_URL": ["http://www.foo.com:12345/", "http://www.bar.com:23456/"]} )
print "update_volume rc = %s" % rc
print ""
time.sleep(1.0)
os.system("ps aux | grep mdserverd; ls /tmp/syndicate_md/volumes/test/")
print "delete_VACE"
rc = delete_VACE( "test", "foo" )
print "delete_VACE rc = %s" % rc
print ""
time.sleep(1.0)
os.system("ps aux | grep mdserverd; ls /tmp/syndicate_md/volumes/test/")
#print "delete_VACE"
#rc = delete_VACE( "test", "llp" )
#print "delete_VACE(llp) rc = %s" % rc
print ""
#print "stop_volume"
#rc = stop_volume( "test" )
#print "stop_volume rc = %s" % rc
if __name__ == "__main__":
# start up a server...
init()
logfile = open(conf['MD_LOGFILE'], "a")
#test()
#sys.exit(0)
server = MD_XMLRPC_SSL_Server( ("", int(conf['MD_CTL_RPC_PORT'])), MD_XMLRPC_RequestHandler, conf['MD_SSL_PKEY'], conf['MD_SSL_CERT'], logfile, True, cacerts=conf['MD_SSL_CACERT'], client_cert=True )
server.register_multicall_functions()
server.register_function( create_volume )
server.register_function( delete_volume )
server.register_function( read_volume )
server.register_function( list_volumes )
server.register_function( update_volume )
server.register_function( start_volume )
server.register_function( stop_volume )
try:
server.serve_forever()
except Exception, e:
logfile.flush()
logfile.write( str(e) + "\n" + traceback.format_exc() + "\n" )
logfile.close()
| {
"content_hash": "0fd99330b83b756bcff1c921858ada33",
"timestamp": "",
"source": "github",
"line_count": 1184,
"max_line_length": 202,
"avg_line_length": 25.18412162162162,
"alnum_prop": 0.5878663894292039,
"repo_name": "jcnelson/syndicate",
"id": "5bd12c502ffc65c3b676ece456dd6d42bca3be6c",
"size": "29837",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "old/md-service/SMDS/mdctl.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "343449"
},
{
"name": "C++",
"bytes": "3136667"
},
{
"name": "CSS",
"bytes": "321366"
},
{
"name": "Gnuplot",
"bytes": "3596"
},
{
"name": "HTML",
"bytes": "172638"
},
{
"name": "JavaScript",
"bytes": "55112"
},
{
"name": "Makefile",
"bytes": "43170"
},
{
"name": "Perl",
"bytes": "8025"
},
{
"name": "Protocol Buffer",
"bytes": "20793"
},
{
"name": "Python",
"bytes": "3273669"
},
{
"name": "Ruby",
"bytes": "13015"
},
{
"name": "Shell",
"bytes": "63133"
},
{
"name": "TeX",
"bytes": "605910"
},
{
"name": "Thrift",
"bytes": "2996"
}
],
"symlink_target": ""
} |
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from fiware_cloto.cloto.manager import InfoManager
from django.core.exceptions import ObjectDoesNotExist
import json
@csrf_exempt
def info(request):
try:
info = InfoManager.InfoManager().get_information()
return HttpResponse(json.dumps(info.getVars(), indent=4))
except ObjectDoesNotExist:
return HttpResponse(json.dumps({"badRequest": {"code": 500, "message":
"Server Database does not contain information server"}}, indent=4), status=500)
def fail(request, reason="csrf fails"):
return HttpResponse("csrf fails", status=400)
| {
"content_hash": "35b8844c4e2591fb58842272edba1fb1",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 107,
"avg_line_length": 36.05263157894737,
"alnum_prop": 0.7182481751824817,
"repo_name": "Fiware/cloud.Cloto",
"id": "f0369b8991f5525fa847d38e24ba7f4d0ba51758",
"size": "1504",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "fiware_cloto/cloto/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "API Blueprint",
"bytes": "50363"
},
{
"name": "Gherkin",
"bytes": "66746"
},
{
"name": "Python",
"bytes": "324830"
},
{
"name": "Shell",
"bytes": "9496"
}
],
"symlink_target": ""
} |
"""Generates the appropriate build.json data for all the end2end tests."""
import yaml
import collections
import hashlib
FixtureOptions = collections.namedtuple(
'FixtureOptions',
'fullstack includes_proxy dns_resolver secure platforms ci_mac tracing exclude_configs')
default_unsecure_fixture_options = FixtureOptions(
True, False, True, False, ['windows', 'linux', 'mac', 'posix'], True, False, [])
socketpair_unsecure_fixture_options = default_unsecure_fixture_options._replace(fullstack=False, dns_resolver=False)
default_secure_fixture_options = default_unsecure_fixture_options._replace(secure=True)
uds_fixture_options = default_unsecure_fixture_options._replace(dns_resolver=False, platforms=['linux', 'mac', 'posix'])
# maps fixture name to whether it requires the security library
END2END_FIXTURES = {
'h2_compress': default_unsecure_fixture_options,
'h2_census': default_unsecure_fixture_options,
'h2_fakesec': default_secure_fixture_options._replace(ci_mac=False),
'h2_full': default_unsecure_fixture_options,
'h2_full+pipe': default_unsecure_fixture_options._replace(
platforms=['linux']),
'h2_full+trace': default_unsecure_fixture_options._replace(tracing=True),
'h2_oauth2': default_secure_fixture_options._replace(ci_mac=False),
'h2_proxy': default_unsecure_fixture_options._replace(includes_proxy=True,
ci_mac=False),
'h2_sockpair_1byte': socketpair_unsecure_fixture_options._replace(
ci_mac=False, exclude_configs=['msan']),
'h2_sockpair': socketpair_unsecure_fixture_options._replace(ci_mac=False),
'h2_sockpair+trace': socketpair_unsecure_fixture_options._replace(
ci_mac=False, tracing=True),
'h2_ssl': default_secure_fixture_options,
'h2_ssl_cert': default_secure_fixture_options,
'h2_ssl_proxy': default_secure_fixture_options._replace(includes_proxy=True,
ci_mac=False),
'h2_uds': uds_fixture_options,
}
TestOptions = collections.namedtuple(
'TestOptions', 'needs_fullstack needs_dns proxyable secure traceable cpu_cost')
default_test_options = TestOptions(False, False, True, False, True, 1.0)
connectivity_test_options = default_test_options._replace(needs_fullstack=True)
LOWCPU = 0.1
# maps test names to options
END2END_TESTS = {
'bad_hostname': default_test_options,
'binary_metadata': default_test_options,
'call_creds': default_test_options._replace(secure=True),
'cancel_after_accept': default_test_options._replace(cpu_cost=LOWCPU),
'cancel_after_client_done': default_test_options._replace(cpu_cost=LOWCPU),
'cancel_after_invoke': default_test_options._replace(cpu_cost=LOWCPU),
'cancel_before_invoke': default_test_options._replace(cpu_cost=LOWCPU),
'cancel_in_a_vacuum': default_test_options._replace(cpu_cost=LOWCPU),
'cancel_with_status': default_test_options._replace(cpu_cost=LOWCPU),
'compressed_payload': default_test_options._replace(proxyable=False, cpu_cost=LOWCPU),
'connectivity': connectivity_test_options._replace(proxyable=False, cpu_cost=LOWCPU),
'default_host': default_test_options._replace(needs_fullstack=True,
needs_dns=True),
'disappearing_server': connectivity_test_options,
'empty_batch': default_test_options,
'filter_causes_close': default_test_options,
'graceful_server_shutdown': default_test_options._replace(cpu_cost=LOWCPU),
'hpack_size': default_test_options._replace(proxyable=False,
traceable=False),
'high_initial_seqno': default_test_options,
'idempotent_request': default_test_options,
'invoke_large_request': default_test_options,
'large_metadata': default_test_options,
'max_concurrent_streams': default_test_options._replace(proxyable=False),
'max_message_length': default_test_options._replace(cpu_cost=LOWCPU),
'negative_deadline': default_test_options,
'no_op': default_test_options,
'payload': default_test_options._replace(cpu_cost=LOWCPU),
'ping_pong_streaming': default_test_options,
'ping': connectivity_test_options._replace(proxyable=False),
'registered_call': default_test_options,
'request_with_flags': default_test_options._replace(proxyable=False),
'request_with_payload': default_test_options,
'server_finishes_request': default_test_options,
'shutdown_finishes_calls': default_test_options,
'shutdown_finishes_tags': default_test_options,
'simple_delayed_request': connectivity_test_options._replace(cpu_cost=LOWCPU),
'simple_metadata': default_test_options,
'simple_request': default_test_options,
'trailing_metadata': default_test_options,
}
def compatible(f, t):
if END2END_TESTS[t].needs_fullstack:
if not END2END_FIXTURES[f].fullstack:
return False
if END2END_TESTS[t].needs_dns:
if not END2END_FIXTURES[f].dns_resolver:
return False
if not END2END_TESTS[t].proxyable:
if END2END_FIXTURES[f].includes_proxy:
return False
if not END2END_TESTS[t].traceable:
if END2END_FIXTURES[f].tracing:
return False
return True
def without(l, e):
l = l[:]
l.remove(e)
return l
def main():
sec_deps = [
'grpc_test_util',
'grpc',
'gpr_test_util',
'gpr'
]
unsec_deps = [
'grpc_test_util_unsecure',
'grpc_unsecure',
'gpr_test_util',
'gpr'
]
json = {
'#': 'generated with test/end2end/gen_build_json.py',
'libs': [
{
'name': 'end2end_tests',
'build': 'private',
'language': 'c',
'secure': True,
'src': ['test/core/end2end/end2end_tests.c'] + [
'test/core/end2end/tests/%s.c' % t
for t in sorted(END2END_TESTS.keys())],
'headers': ['test/core/end2end/tests/cancel_test_helpers.h',
'test/core/end2end/end2end_tests.h'],
'deps': sec_deps,
'vs_proj_dir': 'test/end2end/tests',
}
] + [
{
'name': 'end2end_nosec_tests',
'build': 'private',
'language': 'c',
'secure': False,
'src': ['test/core/end2end/end2end_nosec_tests.c'] + [
'test/core/end2end/tests/%s.c' % t
for t in sorted(END2END_TESTS.keys())
if not END2END_TESTS[t].secure],
'headers': ['test/core/end2end/tests/cancel_test_helpers.h',
'test/core/end2end/end2end_tests.h'],
'deps': unsec_deps,
'vs_proj_dir': 'test/end2end/tests',
}
],
'targets': [
{
'name': '%s_test' % f,
'build': 'test',
'language': 'c',
'run': False,
'src': ['test/core/end2end/fixtures/%s.c' % f],
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms': (END2END_FIXTURES[f].platforms
if END2END_FIXTURES[f].ci_mac else without(
END2END_FIXTURES[f].platforms, 'mac')),
'deps': [
'end2end_tests'
] + sec_deps,
'vs_proj_dir': 'test/end2end/fixtures',
}
for f in sorted(END2END_FIXTURES.keys())
] + [
{
'name': '%s_nosec_test' % f,
'build': 'test',
'language': 'c',
'secure': 'no',
'src': ['test/core/end2end/fixtures/%s.c' % f],
'run': False,
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms': (END2END_FIXTURES[f].platforms
if END2END_FIXTURES[f].ci_mac else without(
END2END_FIXTURES[f].platforms, 'mac')),
'deps': [
'end2end_nosec_tests'
] + unsec_deps,
'vs_proj_dir': 'test/end2end/fixtures',
}
for f in sorted(END2END_FIXTURES.keys())
if not END2END_FIXTURES[f].secure
],
'tests': [
{
'name': '%s_test' % f,
'args': [t],
'exclude_configs': [],
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms': (END2END_FIXTURES[f].platforms
if END2END_FIXTURES[f].ci_mac else without(
END2END_FIXTURES[f].platforms, 'mac')),
'flaky': False,
'language': 'c',
'cpu_cost': END2END_TESTS[t].cpu_cost,
}
for f in sorted(END2END_FIXTURES.keys())
for t in sorted(END2END_TESTS.keys()) if compatible(f, t)
] + [
{
'name': '%s_nosec_test' % f,
'args': [t],
'exclude_configs': END2END_FIXTURES[f].exclude_configs,
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms': (END2END_FIXTURES[f].platforms
if END2END_FIXTURES[f].ci_mac else without(
END2END_FIXTURES[f].platforms, 'mac')),
'flaky': False,
'language': 'c',
'cpu_cost': END2END_TESTS[t].cpu_cost,
}
for f in sorted(END2END_FIXTURES.keys())
if not END2END_FIXTURES[f].secure
for t in sorted(END2END_TESTS.keys())
if compatible(f, t) and not END2END_TESTS[t].secure
],
'core_end2end_tests': dict(
(t, END2END_TESTS[t].secure)
for t in END2END_TESTS.keys()
)
}
print yaml.dump(json)
if __name__ == '__main__':
main()
| {
"content_hash": "7b59e2d02c929b74938f92cb1543706e",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 120,
"avg_line_length": 40.89626556016597,
"alnum_prop": 0.5797483766233766,
"repo_name": "shishaochen/TensorFlow-0.8-Win",
"id": "3e10ad580fc7a6584017f504f510d274814523ba",
"size": "11411",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/grpc/test/core/end2end/gen_build_yaml.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "33878"
},
{
"name": "C",
"bytes": "1390259"
},
{
"name": "C#",
"bytes": "1900628"
},
{
"name": "C++",
"bytes": "28129535"
},
{
"name": "CMake",
"bytes": "417657"
},
{
"name": "CSS",
"bytes": "1297"
},
{
"name": "Emacs Lisp",
"bytes": "7809"
},
{
"name": "GCC Machine Description",
"bytes": "1"
},
{
"name": "Go",
"bytes": "8549"
},
{
"name": "Groff",
"bytes": "1272396"
},
{
"name": "HTML",
"bytes": "849000"
},
{
"name": "Java",
"bytes": "3139664"
},
{
"name": "JavaScript",
"bytes": "417956"
},
{
"name": "Jupyter Notebook",
"bytes": "1772913"
},
{
"name": "M4",
"bytes": "78386"
},
{
"name": "Makefile",
"bytes": "1177180"
},
{
"name": "Objective-C",
"bytes": "2580186"
},
{
"name": "Objective-C++",
"bytes": "2897"
},
{
"name": "PHP",
"bytes": "342"
},
{
"name": "Protocol Buffer",
"bytes": "924786"
},
{
"name": "Python",
"bytes": "8241830"
},
{
"name": "Ruby",
"bytes": "82233"
},
{
"name": "Shell",
"bytes": "1875702"
},
{
"name": "Swift",
"bytes": "20550"
},
{
"name": "TypeScript",
"bytes": "395532"
},
{
"name": "VimL",
"bytes": "3759"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
import fluent_contents.utils.validators
class Migration(migrations.Migration):
dependencies = [
('fluent_contents', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='GoogleDocsViewerItem',
fields=[
('contentitem_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='fluent_contents.ContentItem')),
('url', models.URLField(help_text='Specify the URL of an online document, for example a PDF or DOCX file.', verbose_name='File URL')),
('width', models.CharField(default=b'100%', help_text='Specify the size in pixels, or a percentage of the container area size.', max_length=10, verbose_name='Width', validators=[fluent_contents.utils.validators.validate_html_size])),
('height', models.CharField(default=b'600', help_text='Specify the size in pixels.', max_length=10, verbose_name='Height', validators=[fluent_contents.utils.validators.validate_html_size])),
],
options={
'db_table': 'contentitem_googledocsviewer_googledocsvieweritem',
'verbose_name': 'Embedded document',
'verbose_name_plural': 'Embedded document',
},
bases=('fluent_contents.contentitem',),
),
]
| {
"content_hash": "c926f73ff207cf6cc2ac97d5a289a575",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 249,
"avg_line_length": 49.58620689655172,
"alnum_prop": 0.6397774687065368,
"repo_name": "ixc/django-fluent-contents",
"id": "9253f8e2cb3e3477b91d3e02fb5ccee2c8ca3565",
"size": "1462",
"binary": false,
"copies": "2",
"ref": "refs/heads/ixc",
"path": "fluent_contents/plugins/googledocsviewer/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13003"
},
{
"name": "HTML",
"bytes": "33138"
},
{
"name": "JavaScript",
"bytes": "81000"
},
{
"name": "Python",
"bytes": "449106"
}
],
"symlink_target": ""
} |
__all__ = ['TException']
class TException(Exception):
'''turboengine exception'''
def __str__(self):
return 'turboengine: %s'%(self.message)
def __unicode__(self):
return u'turboengine: %s'%(self.message) | {
"content_hash": "2dccab1c3d9460df6c1192bc1ba3e2ef",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 48,
"avg_line_length": 24.3,
"alnum_prop": 0.5761316872427984,
"repo_name": "carlitux/turboengine",
"id": "e8e8ba34f90b9d62b9e392bf48a30f9b29261880",
"size": "1564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/turboengine/errors/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25752"
}
],
"symlink_target": ""
} |
import collections
from uqbar.containers import UniqueTreeList
from supriya.realtime.Node import Node
class Group(Node, UniqueTreeList):
"""
A group.
::
>>> import supriya.realtime
>>> server = supriya.Server.default()
>>> server.boot()
<Server: udp://127.0.0.1:57751, 8i8o>
::
>>> group = supriya.realtime.Group()
>>> group.allocate()
<+ Group: 1000>
::
>>> group.free()
<- Group: ???>
::
>>> server.quit()
<Server: offline>
"""
### CLASS VARIABLES ###
__documentation_section__ = "Main Classes"
__slots__ = ("_children", "_control_interface", "_named_children")
### INITIALIZER ###
def __init__(self, children=None, name=None, node_id_is_permanent=False):
import supriya.realtime
self._control_interface = supriya.realtime.GroupInterface(client=self)
Node.__init__(self, name=name, node_id_is_permanent=node_id_is_permanent)
UniqueTreeList.__init__(self, children=children, name=name)
### SPECIAL METHODS ###
def __graph__(self):
graph = super().__graph__()
parent_node = graph[self._get_graphviz_name()]
for child in self:
graph.extend(child.__graph__())
child_node = graph[child._get_graphviz_name()]
parent_node.attach(child_node)
return graph
def __setitem__(self, i, expr):
"""
Sets `expr` in self at index `i`.
::
>>> group_one = supriya.realtime.Group()
>>> group_two = supriya.realtime.Group()
>>> group_one.append(group_two)
"""
# TODO: lean on uqbar's __setitem__ more.
self._validate(expr)
if isinstance(i, slice):
assert isinstance(expr, collections.Sequence)
if isinstance(i, str):
i = self.index(self._named_children[i])
if isinstance(i, int):
if i < 0:
i = len(self) + i
i = slice(i, i + 1)
if (
i.start == i.stop
and i.start is not None
and i.stop is not None
and i.start <= -len(self)
):
start, stop = 0, 0
else:
start, stop, stride = i.indices(len(self))
if not isinstance(expr, collections.Sequence):
expr = [expr]
if self.is_allocated:
self._set_allocated(expr, start, stop)
else:
self._set_unallocated(expr, start, stop)
def __str__(self):
result = []
node_id = self.node_id
if node_id is None:
node_id = "???"
if self.name:
string = f"{node_id} group ({self.name})"
else:
string = f"{node_id} group"
result.append(string)
for child in self:
assert child.parent is self
lines = str(child).splitlines()
for line in lines:
result.append(f" {line}")
return "\n".join(result)
### PRIVATE METHODS ###
def _as_graphviz_node(self):
node = super()._as_graphviz_node()
node.attributes["fillcolor"] = "lightsteelblue2"
return node
@staticmethod
def _iterate_setitem_expr(group, expr, start=0):
import supriya.realtime
if not start or not group:
outer_target_node = group
else:
outer_target_node = group[start - 1]
for outer_node in expr:
if outer_target_node is group:
outer_add_action = supriya.AddAction.ADD_TO_HEAD
else:
outer_add_action = supriya.AddAction.ADD_AFTER
outer_node_was_allocated = outer_node.is_allocated
yield outer_node, outer_target_node, outer_add_action
outer_target_node = outer_node
if (
isinstance(outer_node, supriya.realtime.Group)
and not outer_node_was_allocated
):
for (
inner_node,
inner_target_node,
inner_add_action,
) in Group._iterate_setitem_expr(outer_node, outer_node):
yield inner_node, inner_target_node, inner_add_action
def _collect_requests_and_synthdefs(self, expr, server, start=0):
import supriya.commands
import supriya.realtime
nodes = set()
paused_nodes = set()
synthdefs = set()
requests = []
iterator = Group._iterate_setitem_expr(self, expr, start)
for node, target_node, add_action in iterator:
nodes.add(node)
if node.is_allocated:
if add_action == supriya.AddAction.ADD_TO_HEAD:
request = supriya.commands.GroupHeadRequest(
node_id_pairs=[(node, target_node)]
)
else:
request = supriya.commands.NodeAfterRequest(
node_id_pairs=[(node, target_node)]
)
requests.append(request)
else:
if isinstance(node, supriya.realtime.Group):
request = supriya.commands.GroupNewRequest(
items=[
supriya.commands.GroupNewRequest.Item(
add_action=add_action,
node_id=node,
target_node_id=target_node,
)
]
)
requests.append(request)
else:
if node.synthdef not in server:
synthdefs.add(node.synthdef)
(settings, map_requests) = node.controls._make_synth_new_settings()
request = supriya.commands.SynthNewRequest(
add_action=add_action,
node_id=node,
synthdef=node.synthdef,
target_node_id=target_node,
**settings,
)
requests.append(request)
requests.extend(map_requests)
if node.is_paused:
paused_nodes.add(node)
return nodes, paused_nodes, requests, synthdefs
def _set_allocated(self, expr, start, stop):
# TODO: Consolidate this with Group.allocate()
# TODO: Perform tree mutations via command apply methods, not here
import supriya.commands
import supriya.realtime
old_nodes = self._children[start:stop]
self._children.__delitem__(slice(start, stop))
for old_node in old_nodes:
old_node._set_parent(None)
for child in expr:
if child in self and self.index(child) < start:
start -= 1
child._set_parent(self)
self._children.__setitem__(slice(start, start), expr)
new_nodes, paused_nodes, requests, synthdefs = self._collect_requests_and_synthdefs(
expr, self.server, start=start
)
nodes_to_free = [_ for _ in old_nodes if _ not in new_nodes]
if nodes_to_free:
requests.append(
supriya.commands.NodeFreeRequest(
node_ids=sorted(nodes_to_free, key=lambda x: x.node_id)
)
)
return self._allocate(paused_nodes, requests, self.server, synthdefs)
def _set_unallocated(self, expr, start, stop):
for node in expr:
node.free()
for old_child in tuple(self[start:stop]):
old_child._set_parent(None)
self._children[start:stop] = expr
for new_child in expr:
new_child._set_parent(self)
def _unregister_with_local_server(self):
for child in self:
child._unregister_with_local_server()
return Node._unregister_with_local_server(self)
def _validate(self, expr):
import supriya.realtime
assert all(isinstance(_, supriya.realtime.Node) for _ in expr)
parentage = self.parentage
for x in expr:
assert isinstance(x, supriya.realtime.Node)
if isinstance(x, supriya.realtime.Group):
assert x not in parentage
### PUBLIC METHODS ###
def allocate(
self, add_action=None, node_id_is_permanent=False, sync=False, target_node=None
):
# TODO: Consolidate this with Group.allocate()
import supriya.commands
import supriya.realtime
if self.is_allocated:
return
self._node_id_is_permanent = bool(node_id_is_permanent)
target_node = Node.expr_as_target(target_node)
server = target_node.server
group_new_request = supriya.commands.GroupNewRequest(
items=[
supriya.commands.GroupNewRequest.Item(
add_action=add_action,
node_id=self,
target_node_id=target_node.node_id,
)
]
)
(
nodes,
paused_nodes,
requests,
synthdefs,
) = self._collect_requests_and_synthdefs(self, server)
requests = [group_new_request, *requests]
if self.is_paused:
paused_nodes.add(self)
return self._allocate(paused_nodes, requests, server, synthdefs)
def free(self):
for node in self:
node._unregister_with_local_server()
Node.free(self)
return self
@property
def controls(self):
return self._control_interface
| {
"content_hash": "7a13827659aadfec3957ea18ed8520a7",
"timestamp": "",
"source": "github",
"line_count": 294,
"max_line_length": 92,
"avg_line_length": 33.07823129251701,
"alnum_prop": 0.5233933161953728,
"repo_name": "Pulgama/supriya",
"id": "c2e8a415308540c71a87972b8a2375367e608eb5",
"size": "9725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supriya/realtime/Group.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "6712"
},
{
"name": "CSS",
"bytes": "446"
},
{
"name": "HTML",
"bytes": "1083"
},
{
"name": "JavaScript",
"bytes": "6163"
},
{
"name": "Makefile",
"bytes": "6775"
},
{
"name": "Python",
"bytes": "2790612"
},
{
"name": "Shell",
"bytes": "569"
}
],
"symlink_target": ""
} |
from unittest import mock
import numpy as np
import pytest
from numpy.linalg import LinAlgError
from scipy.stats import norm
from pycalphad import Database, variables as v
from espei.error_functions.context import setup_context
from espei.optimizers.opt_mcmc import EmceeOptimizer
from espei.error_functions import get_zpf_data, get_thermochemical_data
from espei.error_functions.zpf_error import ZPFResidual
from espei.priors import rv_zero
from .fixtures import datasets_db
from .testing_data import *
def test_lnprob_calculates_multi_phase_probability_for_success(datasets_db):
"""lnprob() successfully calculates the probability for equilibrium """
dbf = Database.from_string(CU_MG_TDB, fmt='tdb')
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
comps = ['CU', 'MG', 'VA']
phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'LAVES_C15', 'CUMG2']
param = 'VV0001'
orig_val = dbf.symbols[param].args[0]
initial_params = {param: orig_val}
residual_objs = [
ZPFResidual(dbf, datasets_db, None, [param])
]
opt = EmceeOptimizer(dbf)
res = opt.predict([10], prior_rvs=[rv_zero()], symbols_to_fit=[param], residual_objs=residual_objs)
assert np.isreal(res)
assert not np.isinf(res)
assert np.isclose(res, -31.309645520830344, rtol=1e-6)
# The purpose of this part is to test that the driving forces (and probability)
# are different than the case of VV0001 = 10.
res_2 = opt.predict([-10000000], prior_rvs=[rv_zero()], symbols_to_fit=[param], residual_objs=residual_objs)
assert np.isreal(res_2)
assert not np.isinf(res_2)
# Accept a large rtol becuase the results should be _very_ different
assert not np.isclose(res_2, -31.309645520830344, rtol=1e-2)
def test_lnprob_calculates_single_phase_probability_for_success(datasets_db):
"""lnprob() succesfully calculates the probability from single phase data"""
dbf = Database.from_string(CU_MG_TDB_FCC_ONLY, fmt='tdb')
datasets_db.insert(CU_MG_HM_MIX_SINGLE_FCC_A1)
comps = ['CU', 'MG', 'VA']
phases = ['FCC_A1']
param = 'VV0003'
orig_val = -14.0865
opt = EmceeOptimizer(dbf)
ctx = setup_context(dbf, datasets_db, symbols_to_fit=[param])
res_orig = opt.predict([orig_val], prior_rvs=[rv_zero()], **ctx)
assert np.isreal(res_orig)
assert np.isclose(res_orig, -9.119484935312146, rtol=1e-6)
res_10 = opt.predict([10.0], prior_rvs=[rv_zero()], **ctx)
assert np.isreal(res_10)
assert np.isclose(res_10, -9.143559131626864, rtol=1e-6)
res_1e5 = opt.predict([1e5], prior_rvs=[rv_zero()], **ctx)
assert np.isreal(res_1e5)
assert np.isclose(res_1e5, -1359.1335466316268, rtol=1e-6)
def test_optimizer_computes_probability_with_activity_data(datasets_db):
"""EmceeOptimizer correctly computed probability with activity data
This test is mathematically redundant with test_error_functions.test_activity_error, but aims to test the functionality of using the Optimizer / ResidualFunction API
"""
datasets_db.insert(CU_MG_EXP_ACTIVITY)
dbf = Database(CU_MG_TDB)
opt = EmceeOptimizer(dbf)
# Having no degrees of freedom isn't currently allowed by setup_context
# we use VV0000 and the current value in the database
ctx = setup_context(dbf, datasets_db, symbols_to_fit=["VV0000"])
error = opt.predict(np.array([-32429.6]), **ctx)
assert np.isclose(error, -257.41020886970756, rtol=1e-6)
def _eq_LinAlgError(*args, **kwargs):
raise LinAlgError()
def _eq_ValueError(*args, **kwargs):
raise ValueError()
@mock.patch('espei.error_functions.zpf_error.equilibrium', _eq_LinAlgError)
@pytest.mark.xfail
def test_lnprob_does_not_raise_on_LinAlgError(datasets_db):
"""lnprob() should catch LinAlgError raised by equilibrium and return -np.inf"""
dbf = Database.from_string(CU_MG_TDB, fmt='tdb')
comps = ['CU', 'MG', 'VA']
phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'LAVES_C15', 'CUMG2']
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
opt = EmceeOptimizer(dbf)
residual_objs = [
ZPFResidual(dbf, datasets_db, None, ["VV0001"])
]
res = opt.predict([10], prior_rvs=[rv_zero()], symbols_to_fit=['VV0001'], residual_objs=residual_objs)
assert np.isneginf(res)
@mock.patch('espei.error_functions.zpf_error.equilibrium', _eq_ValueError)
@pytest.mark.xfail
def test_lnprob_does_not_raise_on_ValueError(datasets_db):
"""lnprob() should catch ValueError raised by equilibrium and return -np.inf"""
dbf = Database.from_string(CU_MG_TDB, fmt='tdb')
opt = EmceeOptimizer(dbf)
comps = ['CU', 'MG', 'VA']
phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'LAVES_C15', 'CUMG2']
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
opt = EmceeOptimizer(dbf)
residual_objs = [
ZPFResidual(dbf, datasets_db, None, ["VV0001"])
]
res = opt.predict([10], prior_rvs=[rv_zero()], symbols_to_fit=['VV0001'], residual_objs=residual_objs)
assert np.isneginf(res)
def test_parameter_initialization():
"""Determinisitically generated parameters should match."""
initial_parameters = np.array([1, 10, 100, 1000])
opt = EmceeOptimizer(Database())
deterministic_params = opt.initialize_new_chains(initial_parameters, 1, 0.10, deterministic=True)
expected_parameters = np.array([
[9.81708401e-01, 9.39027722e+00, 1.08016748e+02, 9.13512881e+02],
[1.03116874, 9.01412995, 112.79594345, 916.44725799],
[1.00664662e+00, 1.07178898e+01, 9.63696718e+01, 1.36872292e+03],
[1.07642366e+00, 1.16413520e+01, 8.71742457e+01, 9.61836382e+02]])
assert np.all(np.isclose(deterministic_params, expected_parameters))
def test_emcee_opitmizer_can_restart(datasets_db):
"""A restart trace can be passed to the Emcee optimizer """
dbf = Database.from_string(CU_MG_TDB, fmt='tdb')
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
param = 'VV0001'
opt = EmceeOptimizer(dbf)
restart_tr = -4*np.ones((2, 10, 1)) # 2 chains, 10 iterations, 1 parameter
opt.fit([param], datasets_db, iterations=1, chains_per_parameter=2, restart_trace=restart_tr)
assert opt.sampler.chain.shape == (2, 1, 1)
def test_equilibrium_thermochemical_correct_probability(datasets_db):
"""Integration test for equilibrium thermochemical error."""
dbf = Database(CU_MG_TDB)
opt = EmceeOptimizer(dbf)
datasets_db.insert(CU_MG_EQ_HMR_LIQUID)
ctx = setup_context(dbf, datasets_db, ['VV0017'])
ctx.update(opt.get_priors(None, ['VV0017'], [0]))
prob = opt.predict(np.array([-31626.6]), **ctx)
expected_prob = norm(loc=0, scale=500).logpdf([-31626.6*0.5*0.5]).sum()
assert np.isclose(prob, expected_prob)
# change to -40000
prob = opt.predict(np.array([-40000], dtype=np.float_), **ctx)
expected_prob = norm(loc=0, scale=500).logpdf([-40000*0.5*0.5]).sum()
assert np.isclose(prob, expected_prob)
def test_lnprob_calculates_associate_tdb(datasets_db):
"""lnprob() successfully calculates the probability for equilibrium """
dbf = Database.from_string(CU_MG_TDB_ASSOC, fmt='tdb')
datasets_db.insert(CU_MG_DATASET_ZPF_WORKING)
comps = ['CU', 'MG', 'VA']
phases = ['LIQUID', 'FCC_A1', 'HCP_A3', 'LAVES_C15', 'CUMG2']
param = 'VV0001'
orig_val = dbf.symbols[param].args[0]
initial_params = {param: orig_val}
residual_objs = [
ZPFResidual(dbf, datasets_db, None, [param])
]
opt = EmceeOptimizer(dbf)
res = opt.predict([10], prior_rvs=[rv_zero()], symbols_to_fit=[param], residual_objs=residual_objs)
assert np.isreal(res)
assert not np.isinf(res)
assert np.isclose(res, -31.309645520830344, rtol=1e-6)
# The purpose of this part is to test that the driving forces (and probability)
# are different than the case of VV0001 = 10.
res_2 = opt.predict([-10000000], prior_rvs=[rv_zero()], symbols_to_fit=[param], residual_objs=residual_objs)
assert np.isreal(res_2)
assert not np.isinf(res_2)
# Accept a large rtol becuase the results should be _very_ different
assert not np.isclose(res_2, -31.309645520830344, rtol=1e-2)
| {
"content_hash": "6d871070fcd71f55b8d8ab5bfbba585c",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 169,
"avg_line_length": 41.53333333333333,
"alnum_prop": 0.6831707618224472,
"repo_name": "PhasesResearchLab/ESPEI",
"id": "6837c67c239da686a5eefc70bd1e47baf32c4b3b",
"size": "8099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_mcmc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "587917"
}
],
"symlink_target": ""
} |
import os
from core import colors
import traceback
import sys
from prettytable import PrettyTable
from core import getpath
from core import moddbparser
from xml.etree import ElementTree
from xml.dom import minidom
from core.messages import *
def count():
isfile = os.path.isfile
join = os.path.join
directory = getpath.modules()
global module_count
module_count = sum(1 for item in os.listdir(directory) if isfile(join(directory, item)))
module_count = module_count - 1
count.mod = str(module_count)
def printoptions(modadd):
try:
print(" ")
t = PrettyTable([colors.red +'Option', 'Value', 'Description'+colors.end])
t.add_row(["------","------","-----------"])
t.align = 'l'
t.valing = 'm'
t.border = False
for key, val in modadd.variables.items():
t.add_row([key, val[0], val[1]])
print (t,'\n')
try:
print(modadd.option_notes,'\n')
except(AttributeError):
pass
except Exception as error:
print(colors.red+"error: module is corrupted\n")
traceback.print_exc(file=sys.stdout)
print(colors.end)
def writedb(root):
rough_string = ElementTree.tostring(root, 'utf-8').decode("utf-8").replace("\n", "").replace("\t", "").replace(" ", "").replace(" ", "").encode("utf-8")
reparsed = minidom.parseString(rough_string)
clean = reparsed.toprettyxml(indent="\t")
f = open(getpath.core()+"module_database.xml", "w")
f.write(clean)
f.close()
def addtodb(modadd):
xml = moddbparser.parsemoddb()
root = xml[0]
tree = xml[1]
new = True
newcat = True
for category in root:
if category.tag == "category":
for item in category:
if item.tag == "module" and item.attrib["name"] == modadd.conf["name"]:
for info in item:
if info.tag == "shortdesc":
info.text = modadd.conf["shortdesc"]
new = False
tree.write(getpath.core()+"module_database.xml")
print_success("database updated")
return
if new == True:
print_info(modadd.conf["name"]+" doesn't exist in database\n", start="\n")
print("available categories keys:"+colors.yellow)
for category in root:
if category.tag == "category":
print(category.attrib["key"])
print(colors.end, end="")
catkey = input("\ngive new or exist key? ")
for category in root:
if category.tag == "category" and category.attrib["key"] == catkey:
module = ElementTree.Element("module")
shortdesc = ElementTree.Element("shortdesc")
shortdesc.text = modadd.conf["shortdesc"]
module.set("name", modadd.conf["name"])
module.append(shortdesc)
category.append(module)
writedb(root)
newcat = False
print_success("module added to "+category.attrib["name"])
break
if newcat == True:
print_info("category not found")
print_info("going to add new category")
catname = input("give new category name: ")
newcat = ElementTree.Element("category")
newcat.set("name", catname)
newcat.set("key", catkey)
module = ElementTree.Element("module")
shortdesc = ElementTree.Element("shortdesc")
shortdesc.text = modadd.conf["shortdesc"]
module.set("name", modadd.conf["name"])
module.append(shortdesc)
newcat.append(module)
root.append(newcat)
writedb(root)
print_success("new category created")
print_success("module added to "+newcat.attrib["name"])
| {
"content_hash": "21294e00dec9dcd2f4c9551d665e0338",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 160,
"avg_line_length": 35.89090909090909,
"alnum_prop": 0.5539513677811551,
"repo_name": "4shadoww/hakkuframework",
"id": "58389a2b8ff80fa0ba29a5c59bdb83f982a2ba20",
"size": "4007",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/moduleop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7992059"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(
name="pinax.checkers",
version="1.1",
author="Pinax",
author_email="development@eldarion.com",
url="https://github.com/pinax/pinax-checkers",
description="Style checker for Pinax and Eldarion OSS",
license="BSD",
packages=[
"pinax",
"pinax.checkers",
],
install_requires=["pylint>=0.25.0"],
)
| {
"content_hash": "b8a2c0ff1da07dde1d30465ef5ae8501",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 59,
"avg_line_length": 23,
"alnum_prop": 0.6240409207161125,
"repo_name": "pinax-archives/pinax-checkers",
"id": "b620d8969e4730d50174b6465d082d814f4dab24",
"size": "391",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4153"
}
],
"symlink_target": ""
} |
"""
test HTTP exceptions
"""
from zunzuncito import tools
class APIResource(object):
@tools.allow_methods('get')
def dispatch(self, request, response):
request.log.debug(tools.log_json({
'API': request.version,
'Method': request.method,
'URI': request.URI,
'vroot': request.vroot
}, True))
try:
name = request.path[0]
except:
raise tools.HTTPException(400)
if name != 'foo':
raise tools.HTTPException(
406,
title='exeption example',
description='name must be foo',
code='my-custom-code',
display=True)
return __name__
| {
"content_hash": "ed702d9852b3521e78f334e49dd5323e",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 47,
"avg_line_length": 23.28125,
"alnum_prop": 0.508724832214765,
"repo_name": "nbari/zunzuncito",
"id": "e14106268b54ee1a0e54669ed0d8744f92b7a47a",
"size": "745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "my_api/default/v0/zun_exception/zun_exception.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "4811"
},
{
"name": "Python",
"bytes": "126204"
},
{
"name": "Shell",
"bytes": "586"
}
],
"symlink_target": ""
} |
from ..calendars import REECalendar
CALENDAR = REECalendar()
def get_holidays(year):
return CALENDAR.holidays_set(year)
| {
"content_hash": "7213ed0608eb41f261907356fc9b9195",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 38,
"avg_line_length": 18.142857142857142,
"alnum_prop": 0.7559055118110236,
"repo_name": "gisce/enerdata",
"id": "257862ba396d2f07586c4af4f069a9992442f3af",
"size": "127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enerdata/datetime/holidays.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "417818"
}
],
"symlink_target": ""
} |
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
from __future__ import print_function
import atexit
import copy
import fcntl
import json
import time
import os
import re
import shutil
import subprocess
import tempfile
# pylint: disable=import-error
try:
import ruamel.yaml as yaml
except ImportError:
import yaml
from ansible.module_utils.basic import AnsibleModule
# -*- -*- -*- End included fragment: lib/import.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: doc/route -*- -*- -*-
DOCUMENTATION = '''
---
module: oc_route
short_description: Create, modify, and idempotently manage openshift routes.
description:
- Manage openshift route objects programmatically.
options:
state:
description:
- State represents whether to create, modify, delete, or list
required: true
default: present
choices: ["present", "absent", "list"]
aliases: []
kubeconfig:
description:
- The path for the kubeconfig file to use for authentication
required: false
default: /etc/origin/master/admin.kubeconfig
aliases: []
debug:
description:
- Turn on debug output.
required: false
default: False
aliases: []
name:
description:
- Name of the object that is being queried.
required: false
default: None
aliases: []
namespace:
description:
- The namespace where the object lives.
required: false
default: str
aliases: []
labels:
description:
- The labels to apply on the route
required: false
default: None
aliases: []
tls_termination:
description:
- The options for termination. e.g. reencrypt
required: false
default: None
aliases: []
dest_cacert_path:
description:
- The path to the dest_cacert
required: false
default: None
aliases: []
cacert_path:
description:
- The path to the cacert
required: false
default: None
aliases: []
cert_path:
description:
- The path to the cert
required: false
default: None
aliases: []
key_path:
description:
- The path to the key
required: false
default: None
aliases: []
dest_cacert_content:
description:
- The dest_cacert content
required: false
default: None
aliases: []
cacert_content:
description:
- The cacert content
required: false
default: None
aliases: []
cert_content:
description:
- The cert content
required: false
default: None
aliases: []
service_name:
description:
- The name of the service that this route points to.
required: false
default: None
aliases: []
host:
description:
- The host that the route will use. e.g. myapp.x.y.z
required: false
default: None
aliases: []
port:
description:
- The Name of the service port or number of the container port the route will route traffic to
required: false
default: None
aliases: []
author:
- "Kenny Woodson <kwoodson@redhat.com>"
extends_documentation_fragment: []
'''
EXAMPLES = '''
- name: Configure certificates for reencrypt route
oc_route:
name: myapproute
namespace: awesomeapp
cert_path: "/etc/origin/master/named_certificates/myapp_cert
key_path: "/etc/origin/master/named_certificates/myapp_key
cacert_path: "/etc/origin/master/named_certificates/myapp_cacert
dest_cacert_content: "{{ dest_cacert_content }}"
service_name: myapp_php
host: myapp.awesomeapp.openshift.com
tls_termination: reencrypt
run_once: true
'''
# -*- -*- -*- End included fragment: doc/route -*- -*- -*-
# -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
class YeditException(Exception): # pragma: no cover
''' Exception class for Yedit '''
pass
# pylint: disable=too-many-public-methods
class Yedit(object): # pragma: no cover
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self,
filename=None,
content=None,
content_type='yaml',
separator='.',
backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict is None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for separator '''
return self._separator
@separator.setter
def separator(self, inc_sep):
''' setter method for separator '''
self._separator = inc_sep
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key.format(''.join(common_separators)), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key):
return False
return True
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def remove_entry(data, key, index=None, value=None, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
if value is not None:
data.pop(value)
elif index is not None:
raise YeditException("remove_entry for a dictionary does not have an index {}".format(index))
else:
data.clear()
return True
elif key == '' and isinstance(data, list):
ind = None
if value is not None:
try:
ind = data.index(value)
except ValueError:
return False
elif index is not None:
ind = index
else:
del data[:]
if ind is not None:
data.pop(ind)
return True
if not (key and Yedit.valid_key(key, sep)) and \
isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif (not (key and Yedit.valid_key(key, sep)) and
isinstance(data, (list, dict))):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key)
elif (arr_ind and isinstance(data, list) and
int(arr_ind) <= len(data) - 1):
data = data[int(arr_ind)]
else:
return None
return data
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
tmp_filename = filename + '.yedit'
with open(tmp_filename, 'w') as yfd:
fcntl.flock(yfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
yfd.write(contents)
fcntl.flock(yfd, fcntl.LOCK_UN)
os.rename(tmp_filename, filename)
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, '{}.{}'.format(self.filename, time.strftime("%Y%m%dT%H%M%S")))
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripDumper if supported.
if self.content_type == 'yaml':
try:
Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except AttributeError:
Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False))
elif self.content_type == 'json':
Yedit._write(self.filename, json.dumps(self.yaml_dict, indent=4, sort_keys=True))
else:
raise YeditException('Unsupported content_type: {}.'.format(self.content_type) +
'Please specify a content_type of yaml or json.')
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename is None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
# Try to use RoundTripLoader if supported.
try:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
except AttributeError:
self.yaml_dict = yaml.safe_load(contents)
# Try to set format attributes if supported
try:
self.yaml_dict.fa.set_block_style()
except AttributeError:
pass
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. {}'.format(err))
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if key_or_item in entry:
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path, index=None, value=None):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, index, value, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# AUDIT:maybe-no-member makes sense due to loading data from
# a serialized format.
# pylint: disable=maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if isinstance(entry, dict):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' +
'value=[{}] type=[{}]'.format(value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# AUDIT:maybe-no-member makes sense due to fuzzy types
# pylint: disable=maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index is not None:
ind = index
if ind is not None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
# already exists, return
if ind is not None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is None:
return (False, self.yaml_dict)
# When path equals "" it is a special case.
# "" refers to the root of the document
# Only update the root path (entire document) when its a list or dict
if path == '':
if isinstance(result, list) or isinstance(result, dict):
self.yaml_dict = result
return (True, self.yaml_dict)
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
# Try to use ruamel.yaml and fallback to pyyaml
try:
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict,
default_flow_style=False),
yaml.RoundTripLoader)
except AttributeError:
tmp_copy = copy.deepcopy(self.yaml_dict)
# set the format attributes if available
try:
tmp_copy.fa.set_block_style()
except AttributeError:
pass
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result is not None:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
@staticmethod
def get_curr_value(invalue, val_type):
'''return the current value'''
if invalue is None:
return None
curr_value = invalue
if val_type == 'yaml':
try:
# AUDIT:maybe-no-member makes sense due to different yaml libraries
# pylint: disable=maybe-no-member
curr_value = yaml.safe_load(invalue, Loader=yaml.RoundTripLoader)
except AttributeError:
curr_value = yaml.safe_load(invalue)
elif val_type == 'json':
curr_value = json.loads(invalue)
return curr_value
@staticmethod
def parse_value(inc_value, vtype=''):
'''determine value type passed'''
true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE',
'on', 'On', 'ON', ]
false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE',
'off', 'Off', 'OFF']
# It came in as a string but you didn't specify value_type as string
# we will convert to bool if it matches any of the above cases
if isinstance(inc_value, str) and 'bool' in vtype:
if inc_value not in true_bools and inc_value not in false_bools:
raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype))
elif isinstance(inc_value, bool) and 'str' in vtype:
inc_value = str(inc_value)
# There is a special case where '' will turn into None after yaml loading it so skip
if isinstance(inc_value, str) and inc_value == '':
pass
# If vtype is not str then go ahead and attempt to yaml load it.
elif isinstance(inc_value, str) and 'str' not in vtype:
try:
inc_value = yaml.safe_load(inc_value)
except Exception:
raise YeditException('Could not determine type of incoming value. ' +
'value=[{}] vtype=[{}]'.format(type(inc_value), vtype))
return inc_value
@staticmethod
def process_edits(edits, yamlfile):
'''run through a list of edits and process them one-by-one'''
results = []
for edit in edits:
value = Yedit.parse_value(edit['value'], edit.get('value_type', ''))
if edit.get('action') == 'update':
# pylint: disable=line-too-long
curr_value = Yedit.get_curr_value(
Yedit.parse_value(edit.get('curr_value')),
edit.get('curr_value_format'))
rval = yamlfile.update(edit['key'],
value,
edit.get('index'),
curr_value)
elif edit.get('action') == 'append':
rval = yamlfile.append(edit['key'], value)
else:
rval = yamlfile.put(edit['key'], value)
if rval[0]:
results.append({'key': edit['key'], 'edit': rval[1]})
return {'changed': len(results) > 0, 'results': results}
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params):
'''perform the idempotent crud operations'''
yamlfile = Yedit(filename=params['src'],
backup=params['backup'],
content_type=params['content_type'],
separator=params['separator'])
state = params['state']
if params['src']:
rval = yamlfile.load()
if yamlfile.yaml_dict is None and state != 'present':
return {'failed': True,
'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) +
'file exists, that it is has correct permissions, and is valid yaml.'}
if state == 'list':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['key']:
rval = yamlfile.get(params['key'])
return {'changed': False, 'result': rval, 'state': state}
elif state == 'absent':
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
yamlfile.yaml_dict = content
if params['update']:
rval = yamlfile.pop(params['key'], params['value'])
else:
rval = yamlfile.delete(params['key'], params['index'], params['value'])
if rval[0] and params['src']:
yamlfile.write()
return {'changed': rval[0], 'result': rval[1], 'state': state}
elif state == 'present':
# check if content is different than what is in the file
if params['content']:
content = Yedit.parse_value(params['content'], params['content_type'])
# We had no edits to make and the contents are the same
if yamlfile.yaml_dict == content and \
params['value'] is None:
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
yamlfile.yaml_dict = content
# If we were passed a key, value then
# we enapsulate it in a list and process it
# Key, Value passed to the module : Converted to Edits list #
edits = []
_edit = {}
if params['value'] is not None:
_edit['value'] = params['value']
_edit['value_type'] = params['value_type']
_edit['key'] = params['key']
if params['update']:
_edit['action'] = 'update'
_edit['curr_value'] = params['curr_value']
_edit['curr_value_format'] = params['curr_value_format']
_edit['index'] = params['index']
elif params['append']:
_edit['action'] = 'append'
edits.append(_edit)
elif params['edits'] is not None:
edits = params['edits']
if edits:
results = Yedit.process_edits(edits, yamlfile)
# if there were changes and a src provided to us we need to write
if results['changed'] and params['src']:
yamlfile.write()
return {'changed': results['changed'], 'result': results['results'], 'state': state}
# no edits to make
if params['src']:
# pylint: disable=redefined-variable-type
rval = yamlfile.write()
return {'changed': rval[0],
'result': rval[1],
'state': state}
# We were passed content but no src, key or value, or edits. Return contents in memory
return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state}
return {'failed': True, 'msg': 'Unkown state passed'}
# -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*-
# pylint: disable=too-many-lines
# noqa: E301,E302,E303,T001
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
''' Find and return oc binary file '''
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig)
self.all_namespaces = all_namespaces
self.oc_binary = locate_oc_binary()
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, edits=None, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, res['results'][0], separator=sep)
updated = False
if content is not None:
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
updated = True
elif edits is not None:
results = Yedit.process_edits(edits, yed)
if results['changed']:
updated = True
if updated:
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''replace the current object with oc replace'''
# We are removing the 'resourceVersion' to handle
# a race condition when modifying oc objects
yed = Yedit(fname)
results = yed.delete('metadata.resourceVersion')
if results[0]:
yed.write()
cmd = ['replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''create a temporary file and then call oc create on it'''
fname = Utils.create_tmpfile(rname + '-')
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''call oc create on a filename'''
return self.openshift_cmd(['create', '-f', fname])
def _delete(self, resource, name=None, selector=None):
'''call oc delete on a resource'''
cmd = ['delete', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
elif name is not None:
cmd.append(name)
else:
raise OpenShiftCLIError('Either name or selector is required when calling delete.')
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501
'''process a template
template_name: the name of the template to process
create: whether to send to oc create after processing
params: the parameters for the template
template_data: the incoming template's data; instead of a file
'''
cmd = ['process']
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()]
cmd.append('-p')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = Utils.create_tmpfile(template_name + '-')
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['create', '-f', fname])
def _get(self, resource, name=None, selector=None, field_selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector is not None:
cmd.append('--selector={}'.format(selector))
if field_selector is not None:
cmd.append('--field-selector={}'.format(field_selector))
# Name cannot be used with selector or field_selector.
if selector is None and field_selector is None and name is not None:
cmd.append(name)
cmd.extend(['-o', 'json'])
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if 'items' in rval:
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
cmd.append('--schedulable={}'.format(schedulable))
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm list pods
node: the node in which to list pods
selector: the label selector filter if provided
pod_selector: the pod selector filter if provided
'''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
# pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector={}'.format(selector))
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector={}'.format(pod_selector))
if grace_period:
cmd.append('--grace-period={}'.format(int(grace_period)))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _version(self):
''' return the openshift version'''
return self.openshift_cmd(['version'], output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
def _run(self, cmds, input_data):
''' Actually executes the command. This makes mocking easier. '''
curr_env = os.environ.copy()
curr_env.update({'KUBECONFIG': self.kubeconfig})
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=curr_env)
stdout, stderr = proc.communicate(input_data)
return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8')
# pylint: disable=too-many-arguments,too-many-branches
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = [self.oc_binary]
if oadm:
cmds.append('adm')
cmds.extend(cmd)
if self.all_namespaces:
cmds.extend(['--all-namespaces'])
elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501
cmds.extend(['-n', self.namespace])
if self.verbose:
print(' '.join(cmds))
try:
returncode, stdout, stderr = self._run(cmds, input_data)
except OSError as ex:
returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex)
rval = {"returncode": returncode,
"cmd": ' '.join(cmds)}
if output_type == 'json':
rval['results'] = {}
if output and stdout:
try:
rval['results'] = json.loads(stdout)
except ValueError as verr:
if "No JSON object could be decoded" in verr.args:
rval['err'] = verr.args
elif output_type == 'raw':
rval['results'] = stdout if output else ''
if self.verbose:
print("STDOUT: {0}".format(stdout))
print("STDERR: {0}".format(stderr))
if 'err' in rval or returncode != 0:
rval.update({"stderr": stderr,
"stdout": stdout})
return rval
class Utils(object): # pragma: no cover
''' utilities for openshiftcli modules '''
@staticmethod
def _write(filename, contents):
''' Actually write the file contents to disk. This helps with mocking. '''
with open(filename, 'w') as sfd:
sfd.write(str(contents))
@staticmethod
def create_tmp_file_from_contents(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
tmp = Utils.create_tmpfile(prefix=rname)
if ftype == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripDumper'):
Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper))
else:
Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
Utils._write(tmp, json.dumps(data))
else:
Utils._write(tmp, data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [tmp])
return tmp
@staticmethod
def create_tmpfile_copy(inc_file):
'''create a temporary copy of a file'''
tmpfile = Utils.create_tmpfile('lib_openshift-')
Utils._write(tmpfile, open(inc_file).read())
# Cleanup the tmpfile
atexit.register(Utils.cleanup, [tmpfile])
return tmpfile
@staticmethod
def create_tmpfile(prefix='tmp'):
''' Generates and returns a temporary file name '''
with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp:
return tmp.name
@staticmethod
def create_tmp_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_tmp_file_from_contents(item['path'] + '-',
item['data'],
ftype=content_type)
files.append({'name': os.path.basename(item['path']),
'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if 'metadata' in result and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
# AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage
# pylint: disable=no-member
if hasattr(yaml, 'RoundTripLoader'):
contents = yaml.load(contents, yaml.RoundTripLoader)
else:
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
@staticmethod
def filter_versions(stdout):
''' filter the oc version output '''
version_dict = {}
version_search = ['oc', 'openshift', 'kubernetes']
for line in stdout.strip().split('\n'):
for term in version_search:
if not line:
continue
if line.startswith(term):
version_dict[term] = line.split()[-1]
# horrible hack to get openshift version in Openshift 3.2
# By default "oc version in 3.2 does not return an "openshift" version
if "openshift" not in version_dict:
version_dict["openshift"] = version_dict["oc"]
return version_dict
@staticmethod
def add_custom_versions(versions):
''' create custom versions strings '''
versions_dict = {}
for tech, version in versions.items():
# clean up "-" from version
if "-" in version:
version = version.split("-")[0]
if version.startswith('v'):
version = version[1:] # Remove the 'v' prefix
versions_dict[tech + '_numeric'] = version.split('+')[0]
# "3.3.0.33" is what we have, we want "3.3"
versions_dict[tech + '_short'] = "{}.{}".format(*version.split('.'))
return versions_dict
@staticmethod
def openshift_installed():
''' check if openshift is installed '''
import rpm
transaction_set = rpm.TransactionSet()
rpmquery = transaction_set.dbMatch("name", "atomic-openshift")
return rpmquery.count() > 0
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if key not in user_def:
if debug:
print('User data does not have key [%s]' % key)
print('User data: %s' % user_def)
return False
if not isinstance(user_def[key], list):
if debug:
print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]))
return False
if len(user_def[key]) != len(value):
if debug:
print("List lengths are not equal.")
print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)))
print("user_def: %s" % user_def[key])
print("value: %s" % value)
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print('sending list - list')
print(type(values[0]))
print(type(values[1]))
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print('list compare returned false')
return False
elif value != user_def[key]:
if debug:
print('value should be identical')
print(user_def[key])
print(value)
return False
# recurse on a dictionary
elif isinstance(value, dict):
if key not in user_def:
if debug:
print("user_def does not have key [%s]" % key)
return False
if not isinstance(user_def[key], dict):
if debug:
print("dict returned false: not instance of dict")
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print("keys are not equal in dict")
print(user_values)
print(api_values)
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print("dict returned false")
print(result)
return False
# Verify each key, value pair is the same
else:
if key not in user_def or value != user_def[key]:
if debug:
print("value not equal; user_def does not have key")
print(key)
print(value)
if key in user_def:
print(user_def[key])
return False
if debug:
print('returning true')
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self, ascommalist=''):
'''return all options as a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs'''
return self.stringify(ascommalist)
def stringify(self, ascommalist=''):
''' return the options hash as cli params in a string
if ascommalist is set to the name of a key, and
the value of that key is a dict, format the dict
as a list of comma delimited key=value pairs '''
rval = []
for key in sorted(self.config_options.keys()):
data = self.config_options[key]
if data['include'] \
and (data['value'] is not None or isinstance(data['value'], int)):
if key == ascommalist:
val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())])
else:
val = data['value']
rval.append('--{}={}'.format(key.replace('_', '-'), val))
return rval
# -*- -*- -*- End included fragment: lib/base.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: lib/route.py -*- -*- -*-
# noqa: E302,E301
# pylint: disable=too-many-instance-attributes
class RouteConfig(object):
''' Handle route options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
namespace,
kubeconfig,
labels=None,
destcacert=None,
cacert=None,
cert=None,
key=None,
host=None,
tls_termination=None,
service_name=None,
wildcard_policy=None,
weight=None,
port=None):
''' constructor for handling route options '''
self.kubeconfig = kubeconfig
self.name = sname
self.namespace = namespace
self.labels = labels
self.host = host
self.tls_termination = tls_termination
self.destcacert = destcacert
self.cacert = cacert
self.cert = cert
self.key = key
self.service_name = service_name
self.port = port
self.data = {}
self.wildcard_policy = wildcard_policy
if wildcard_policy is None:
self.wildcard_policy = 'None'
self.weight = weight
if weight is None:
self.weight = 100
self.create_dict()
def create_dict(self):
''' return a service as a dict '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'Route'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
if self.labels:
self.data['metadata']['labels'] = self.labels
self.data['spec'] = {}
self.data['spec']['host'] = self.host
if self.tls_termination:
self.data['spec']['tls'] = {}
self.data['spec']['tls']['termination'] = self.tls_termination
if self.tls_termination != 'passthrough':
self.data['spec']['tls']['key'] = self.key
self.data['spec']['tls']['caCertificate'] = self.cacert
self.data['spec']['tls']['certificate'] = self.cert
if self.tls_termination == 'reencrypt':
self.data['spec']['tls']['destinationCACertificate'] = self.destcacert
self.data['spec']['to'] = {'kind': 'Service',
'name': self.service_name,
'weight': self.weight}
self.data['spec']['wildcardPolicy'] = self.wildcard_policy
if self.port:
self.data['spec']['port'] = {}
self.data['spec']['port']['targetPort'] = self.port
# pylint: disable=too-many-instance-attributes,too-many-public-methods
class Route(Yedit):
''' Class to wrap the oc command line tools '''
wildcard_policy = "spec.wildcardPolicy"
host_path = "spec.host"
port_path = "spec.port.targetPort"
service_path = "spec.to.name"
weight_path = "spec.to.weight"
cert_path = "spec.tls.certificate"
cacert_path = "spec.tls.caCertificate"
destcacert_path = "spec.tls.destinationCACertificate"
termination_path = "spec.tls.termination"
key_path = "spec.tls.key"
kind = 'route'
def __init__(self, content):
'''Route constructor'''
super(Route, self).__init__(content=content)
def get_destcacert(self):
''' return cert '''
return self.get(Route.destcacert_path)
def get_cert(self):
''' return cert '''
return self.get(Route.cert_path)
def get_key(self):
''' return key '''
return self.get(Route.key_path)
def get_cacert(self):
''' return cacert '''
return self.get(Route.cacert_path)
def get_service(self):
''' return service name '''
return self.get(Route.service_path)
def get_weight(self):
''' return service weight '''
return self.get(Route.weight_path)
def get_termination(self):
''' return tls termination'''
return self.get(Route.termination_path)
def get_host(self):
''' return host '''
return self.get(Route.host_path)
def get_port(self):
''' return port '''
return self.get(Route.port_path)
def get_wildcard_policy(self):
''' return wildcardPolicy '''
return self.get(Route.wildcard_policy)
# -*- -*- -*- End included fragment: lib/route.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: class/oc_route.py -*- -*- -*-
# pylint: disable=too-many-instance-attributes
class OCRoute(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'route'
def __init__(self,
config,
verbose=False):
''' Constructor for OCVolume '''
super(OCRoute, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self._route = None
@property
def route(self):
''' property function for route'''
if not self._route:
self.get()
return self._route
@route.setter
def route(self, data):
''' setter function for route '''
self._route = data
def exists(self):
''' return whether a route exists '''
if self.route:
return True
return False
def get(self):
'''return route information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.route = Route(content=result['results'][0])
elif 'routes \"%s\" not found' % self.config.name in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
elif 'namespaces \"%s\" not found' % self.config.namespace in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
return self._replace_content(self.kind,
self.config.name,
self.config.data,
force=(self.config.host != self.route.get_host()))
def needs_update(self):
''' verify an update is needed '''
skip = []
return not Utils.check_def_equal(self.config.data, self.route.yaml_dict, skip_keys=skip, debug=self.verbose)
@staticmethod
def get_cert_data(path, content):
'''get the data for a particular value'''
rval = None
if path and os.path.exists(path) and os.access(path, os.R_OK):
rval = open(path).read()
elif content:
rval = content
return rval
# pylint: disable=too-many-return-statements,too-many-branches
@staticmethod
def run_ansible(params, check_mode=False):
''' run the idempotent asnible code
params comes from the ansible portion for this module
files: a dictionary for the certificates
{'cert': {'path': '',
'content': '',
'value': ''
}
}
check_mode: does the module support check mode. (module.check_mode)
'''
files = {'destcacert': {'path': params['dest_cacert_path'],
'content': params['dest_cacert_content'],
'value': None, },
'cacert': {'path': params['cacert_path'],
'content': params['cacert_content'],
'value': None, },
'cert': {'path': params['cert_path'],
'content': params['cert_content'],
'value': None, },
'key': {'path': params['key_path'],
'content': params['key_content'],
'value': None, }, }
if params['tls_termination'] and params['tls_termination'].lower() != 'passthrough': # E501
for key, option in files.items():
if not option['path'] and not option['content']:
continue
option['value'] = OCRoute.get_cert_data(option['path'], option['content']) # E501
if not option['value']:
return {'failed': True,
'msg': 'Verify that you pass a correct value for %s' % key}
rconfig = RouteConfig(params['name'],
params['namespace'],
params['kubeconfig'],
params['labels'],
files['destcacert']['value'],
files['cacert']['value'],
files['cert']['value'],
files['key']['value'],
params['host'],
params['tls_termination'],
params['service_name'],
params['wildcard_policy'],
params['weight'],
params['port'])
oc_route = OCRoute(rconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_route.get()
#####
# Get
#####
if state == 'list':
return {'changed': False,
'results': api_rval['results'],
'state': 'list'}
########
# Delete
########
if state == 'absent':
if oc_route.exists():
if check_mode:
return {'changed': False, 'msg': 'CHECK_MODE: Would have performed a delete.'} # noqa: E501
api_rval = oc_route.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"} # noqa: E501
return {'changed': False, 'state': 'absent'}
if state == 'present':
########
# Create
########
if not oc_route.exists():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'} # noqa: E501
# Create it here
api_rval = oc_route.create()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval, 'state': "present"} # noqa: E501
# return the created object
api_rval = oc_route.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval, 'state': "present"} # noqa: E501
return {'changed': True, 'results': api_rval, 'state': "present"} # noqa: E501
########
# Update
########
if oc_route.needs_update():
if check_mode:
return {'changed': True, 'msg': 'CHECK_MODE: Would have performed an update.'} # noqa: E501
api_rval = oc_route.update()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval, 'state': "present"} # noqa: E501
# return the created object
api_rval = oc_route.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval, 'state': "present"} # noqa: E501
return {'changed': True, 'results': api_rval, 'state': "present"} # noqa: E501
return {'changed': False, 'results': api_rval, 'state': "present"}
# catch all
return {'failed': True, 'msg': "Unknown State passed"}
# -*- -*- -*- End included fragment: class/oc_route.py -*- -*- -*-
# -*- -*- -*- Begin included fragment: ansible/oc_route.py -*- -*- -*-
# pylint: disable=too-many-branches
def main():
'''
ansible oc module for route
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
labels=dict(default=None, type='dict'),
name=dict(default=None, required=True, type='str'),
namespace=dict(default=None, required=True, type='str'),
tls_termination=dict(default=None, type='str'),
dest_cacert_path=dict(default=None, type='str'),
cacert_path=dict(default=None, type='str'),
cert_path=dict(default=None, type='str'),
key_path=dict(default=None, type='str'),
dest_cacert_content=dict(default=None, type='str'),
cacert_content=dict(default=None, type='str'),
cert_content=dict(default=None, type='str'),
key_content=dict(default=None, type='str'),
service_name=dict(default=None, type='str'),
host=dict(default=None, type='str'),
wildcard_policy=dict(default=None, type='str'),
weight=dict(default=None, type='int'),
port=dict(default=None, type='int'),
),
mutually_exclusive=[('dest_cacert_path', 'dest_cacert_content'),
('cacert_path', 'cacert_content'),
('cert_path', 'cert_content'),
('key_path', 'key_content'), ],
supports_check_mode=True,
)
results = OCRoute.run_ansible(module.params, module.check_mode)
if 'failed' in results:
module.fail_json(**results)
module.exit_json(**results)
if __name__ == '__main__':
main()
# -*- -*- -*- End included fragment: ansible/oc_route.py -*- -*- -*-
| {
"content_hash": "911ade439176aa562ca36fec6ccff58a",
"timestamp": "",
"source": "github",
"line_count": 1899,
"max_line_length": 118,
"avg_line_length": 34.0173775671406,
"alnum_prop": 0.5284601928822428,
"repo_name": "openshift/openshift-tools",
"id": "5cc09460ccd7dec4f8c2153ad97b9a50836eb07c",
"size": "65761",
"binary": false,
"copies": "4",
"ref": "refs/heads/prod",
"path": "openshift/installer/vendored/openshift-ansible-3.9.40/roles/lib_openshift/library/oc_route.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24919"
},
{
"name": "Dockerfile",
"bytes": "10248"
},
{
"name": "Go",
"bytes": "127388"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "67678"
},
{
"name": "JavaScript",
"bytes": "9573"
},
{
"name": "Makefile",
"bytes": "1108"
},
{
"name": "PHP",
"bytes": "30017"
},
{
"name": "Python",
"bytes": "19774421"
},
{
"name": "Shell",
"bytes": "553874"
}
],
"symlink_target": ""
} |
from factory import Sequence, PostGenerationMethodCall
from factory.alchemy import SQLAlchemyModelFactory
from placebo_gov.user.models import User
from placebo_gov.database import db
class BaseFactory(SQLAlchemyModelFactory):
class Meta:
abstract = True
sqlalchemy_session = db.session
class UserFactory(BaseFactory):
username = Sequence(lambda n: "user{0}".format(n))
email = Sequence(lambda n: "user{0}@example.com".format(n))
password = PostGenerationMethodCall('set_password', 'example')
active = True
class Meta:
model = User
| {
"content_hash": "33e95c70fac98b9dbf68027fc940a0b7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 66,
"avg_line_length": 26.636363636363637,
"alnum_prop": 0.726962457337884,
"repo_name": "Zarkantho/placebo.gov",
"id": "63baa104ac843447cdc0a3e9daaabf27424d8fc7",
"size": "610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/factories.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1170"
},
{
"name": "HTML",
"bytes": "8685"
},
{
"name": "JavaScript",
"bytes": "240856"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "31555"
}
],
"symlink_target": ""
} |
"""Sensor platform for frigate."""
from __future__ import annotations
import logging
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL, TEMP_CELSIUS
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity import DeviceInfo, EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import (
FrigateDataUpdateCoordinator,
FrigateEntity,
FrigateMQTTEntity,
ReceiveMessage,
get_cameras_zones_and_objects,
get_friendly_name,
get_frigate_device_identifier,
get_frigate_entity_unique_id,
get_zones,
)
from .const import ATTR_CONFIG, ATTR_COORDINATOR, DOMAIN, FPS, MS, NAME
from .icons import ICON_CORAL, ICON_SERVER, ICON_SPEEDOMETER, get_icon_from_type
_LOGGER: logging.Logger = logging.getLogger(__name__)
CAMERA_FPS_TYPES = ["camera", "detection", "process", "skipped"]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Sensor entry setup."""
coordinator = hass.data[DOMAIN][entry.entry_id][ATTR_COORDINATOR]
entities = []
for key, value in coordinator.data.items():
if key == "detection_fps":
entities.append(FrigateFpsSensor(coordinator, entry))
elif key == "detectors":
for name in value.keys():
entities.append(DetectorSpeedSensor(coordinator, entry, name))
elif key == "service":
# Temperature is only supported on PCIe Coral.
for name in value.get("temperatures", {}):
entities.append(DeviceTempSensor(coordinator, entry, name))
else:
entities.extend(
[CameraFpsSensor(coordinator, entry, key, t) for t in CAMERA_FPS_TYPES]
)
frigate_config = hass.data[DOMAIN][entry.entry_id][ATTR_CONFIG]
entities.extend(
[
FrigateObjectCountSensor(entry, frigate_config, cam_name, obj)
for cam_name, obj in get_cameras_zones_and_objects(frigate_config)
]
)
entities.append(FrigateStatusSensor(coordinator, entry))
async_add_entities(entities)
class FrigateFpsSensor(FrigateEntity, CoordinatorEntity): # type: ignore[misc]
"""Frigate Sensor class."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
_attr_name = "Detection fps"
def __init__(
self, coordinator: FrigateDataUpdateCoordinator, config_entry: ConfigEntry
) -> None:
"""Construct a FrigateFpsSensor."""
FrigateEntity.__init__(self, config_entry)
CoordinatorEntity.__init__(self, coordinator)
self._attr_entity_registry_enabled_default = False
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id, "sensor_fps", "detection"
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {get_frigate_device_identifier(self._config_entry)},
"name": NAME,
"model": self._get_model(),
"configuration_url": self._config_entry.data.get(CONF_URL),
"manufacturer": NAME,
}
@property
def state(self) -> int | None:
"""Return the state of the sensor."""
if self.coordinator.data:
data = self.coordinator.data.get("detection_fps")
if data is not None:
try:
return round(float(data))
except ValueError:
pass
return None
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return FPS
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return ICON_SPEEDOMETER
class FrigateStatusSensor(FrigateEntity, CoordinatorEntity): # type: ignore[misc]
"""Frigate Status Sensor class."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
_attr_name = "Status"
def __init__(
self, coordinator: FrigateDataUpdateCoordinator, config_entry: ConfigEntry
) -> None:
"""Construct a FrigateStatusSensor."""
FrigateEntity.__init__(self, config_entry)
CoordinatorEntity.__init__(self, coordinator)
self._attr_entity_registry_enabled_default = False
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id, "sensor_status", "frigate"
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {get_frigate_device_identifier(self._config_entry)},
"name": NAME,
"model": self._get_model(),
"configuration_url": self._config_entry.data.get(CONF_URL),
"manufacturer": NAME,
}
@property
def state(self) -> str:
"""Return the state of the sensor."""
return str(self.coordinator.server_status)
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return ICON_SERVER
class DetectorSpeedSensor(FrigateEntity, CoordinatorEntity): # type: ignore[misc]
"""Frigate Detector Speed class."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
def __init__(
self,
coordinator: FrigateDataUpdateCoordinator,
config_entry: ConfigEntry,
detector_name: str,
) -> None:
"""Construct a DetectorSpeedSensor."""
FrigateEntity.__init__(self, config_entry)
CoordinatorEntity.__init__(self, coordinator)
self._detector_name = detector_name
self._attr_entity_registry_enabled_default = False
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id, "sensor_detector_speed", self._detector_name
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {get_frigate_device_identifier(self._config_entry)},
"name": NAME,
"model": self._get_model(),
"configuration_url": self._config_entry.data.get(CONF_URL),
"manufacturer": NAME,
}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return f"{get_friendly_name(self._detector_name)} inference speed"
@property
def state(self) -> int | None:
"""Return the state of the sensor."""
if self.coordinator.data:
data = (
self.coordinator.data.get("detectors", {})
.get(self._detector_name, {})
.get("inference_speed")
)
if data is not None:
try:
return round(float(data))
except ValueError:
pass
return None
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return MS
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return ICON_SPEEDOMETER
class CameraFpsSensor(FrigateEntity, CoordinatorEntity): # type: ignore[misc]
"""Frigate Camera Fps class."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
def __init__(
self,
coordinator: FrigateDataUpdateCoordinator,
config_entry: ConfigEntry,
cam_name: str,
fps_type: str,
) -> None:
"""Construct a CameraFpsSensor."""
FrigateEntity.__init__(self, config_entry)
CoordinatorEntity.__init__(self, coordinator)
self._cam_name = cam_name
self._fps_type = fps_type
self._attr_entity_registry_enabled_default = False
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id,
"sensor_fps",
f"{self._cam_name}_{self._fps_type}",
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {
get_frigate_device_identifier(self._config_entry, self._cam_name)
},
"via_device": get_frigate_device_identifier(self._config_entry),
"name": get_friendly_name(self._cam_name),
"model": self._get_model(),
"configuration_url": f"{self._config_entry.data.get(CONF_URL)}/cameras/{self._cam_name}",
"manufacturer": NAME,
}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return f"{self._fps_type} fps"
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return FPS
@property
def state(self) -> int | None:
"""Return the state of the sensor."""
if self.coordinator.data:
data = self.coordinator.data.get(self._cam_name, {}).get(
f"{self._fps_type}_fps"
)
if data is not None:
try:
return round(float(data))
except ValueError:
pass
return None
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return ICON_SPEEDOMETER
class FrigateObjectCountSensor(FrigateMQTTEntity):
"""Frigate Motion Sensor class."""
def __init__(
self,
config_entry: ConfigEntry,
frigate_config: dict[str, Any],
cam_name: str,
obj_name: str,
) -> None:
"""Construct a FrigateObjectCountSensor."""
self._cam_name = cam_name
self._obj_name = obj_name
self._state = 0
self._frigate_config = frigate_config
self._icon = get_icon_from_type(self._obj_name)
super().__init__(
config_entry,
frigate_config,
{
"state_topic": {
"msg_callback": self._state_message_received,
"qos": 0,
"topic": (
f"{self._frigate_config['mqtt']['topic_prefix']}"
f"/{self._cam_name}/{self._obj_name}"
),
"encoding": None,
},
},
)
@callback # type: ignore[misc]
def _state_message_received(self, msg: ReceiveMessage) -> None:
"""Handle a new received MQTT state message."""
try:
self._state = int(msg.payload)
self.async_write_ha_state()
except ValueError:
pass
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id,
"sensor_object_count",
f"{self._cam_name}_{self._obj_name}",
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {
get_frigate_device_identifier(self._config_entry, self._cam_name)
},
"via_device": get_frigate_device_identifier(self._config_entry),
"name": get_friendly_name(self._cam_name),
"model": self._get_model(),
"configuration_url": f"{self._config_entry.data.get(CONF_URL)}/cameras/{self._cam_name if self._cam_name not in get_zones(self._frigate_config) else ''}",
"manufacturer": NAME,
}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return f"{self._obj_name} count"
@property
def state(self) -> int:
"""Return true if the binary sensor is on."""
return self._state
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return "objects"
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return self._icon
class DeviceTempSensor(FrigateEntity, CoordinatorEntity): # type: ignore[misc]
"""Frigate Coral Temperature Sensor class."""
_attr_entity_category = EntityCategory.DIAGNOSTIC
def __init__(
self,
coordinator: FrigateDataUpdateCoordinator,
config_entry: ConfigEntry,
name: str,
) -> None:
"""Construct a CoralTempSensor."""
self._name = name
FrigateEntity.__init__(self, config_entry)
CoordinatorEntity.__init__(self, coordinator)
self._attr_entity_registry_enabled_default = False
@property
def unique_id(self) -> str:
"""Return a unique ID to use for this entity."""
return get_frigate_entity_unique_id(
self._config_entry.entry_id, "sensor_temp", self._name
)
@property
def device_info(self) -> DeviceInfo:
"""Get device information."""
return {
"identifiers": {get_frigate_device_identifier(self._config_entry)},
"name": NAME,
"model": self._get_model(),
"configuration_url": self._config_entry.data.get(CONF_URL),
"manufacturer": NAME,
}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return f"{get_friendly_name(self._name)} temperature"
@property
def state(self) -> float | None:
"""Return the state of the sensor."""
if self.coordinator.data:
data = (
self.coordinator.data.get("service", {})
.get("temperatures", {})
.get(self._name, 0.0)
)
try:
return float(data)
except (TypeError, ValueError):
pass
return None
@property
def unit_of_measurement(self) -> Any:
"""Return the unit of measurement of the sensor."""
return TEMP_CELSIUS
@property
def icon(self) -> str:
"""Return the icon of the sensor."""
return ICON_CORAL
| {
"content_hash": "349591a752f65a26f9e444c3387b6362",
"timestamp": "",
"source": "github",
"line_count": 453,
"max_line_length": 166,
"avg_line_length": 32.136865342163354,
"alnum_prop": 0.5776205522736639,
"repo_name": "jnewland/ha-config",
"id": "8c73886b3062444db2de82b79de28dca06178af6",
"size": "14558",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "custom_components/frigate/sensor.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "896"
},
{
"name": "JavaScript",
"bytes": "160044"
},
{
"name": "Python",
"bytes": "510412"
},
{
"name": "Shell",
"bytes": "23976"
}
],
"symlink_target": ""
} |
"""
Plugins go in directories on your PYTHONPATH named piped/plugins:
this is the only place where an __init__.py is necessary, thanks to
the __path__ variable.
"""
from twisted.plugin import pluginPackagePaths
__path__.extend(pluginPackagePaths(__name__))
__all__ = []#nothing to see here, move along, move along | {
"content_hash": "5f1a473ec12d0b649138aa597503fa2a",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 67,
"avg_line_length": 34.888888888888886,
"alnum_prop": 0.732484076433121,
"repo_name": "alexbrasetvik/Piped",
"id": "ea6122cd5c80966490741b2cd8e5f181d177aae3",
"size": "314",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "piped/plugins/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1144292"
},
{
"name": "Shell",
"bytes": "4509"
}
],
"symlink_target": ""
} |
"""
Fluentd queue check for v3
"""
import argparse
import time
import subprocess
import math
from dateutil import parser
from datetime import datetime
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ocutil = OCUtil()
class OpenshiftFluentdQueueCheck(object):
def __init__(self):
""" Initialize OpenshiftFluentdQueueCheck class """
self.metric_sender = None
self.oc = None
self.args = None
self.fluentd_pods = []
def parse_args(self):
""" Parse arguments passed to the script """
parser = argparse.ArgumentParser(description='OpenShift Fluentd Queue Checker')
parser.add_argument('-v', '--verbose', action='store_true', default=None, help='Verbose output')
parser.add_argument('--debug', action='store_true', default=None, help='Debug?')
self.args = parser.parse_args()
def send_metrics(self,oldest_buffer):
""" Send data to MetricSender """
logger.debug("send_metrics()")
ms_time = time.time()
ms = MetricSender()
logger.info("Sending data to MetricSender...")
logger.debug({'openshift.logging.fluentd.queue.oldest' : oldest_buffer})
ms.add_metric({'openshift.logging.fluentd.queue.oldest' : oldest_buffer})
ms.send_metrics()
logger.info("Data sent to Zagg in %s seconds", str(time.time() - ms_time))
def get_pods(self):
""" Get all pods and filter them in one pass """
pods = self.oc.get_pods()
for pod in pods['items']:
if 'component' in pod['metadata']['labels']:
# Get Fluentd pods
if pod['metadata']['labels']['component'] == 'fluentd':
self.fluentd_pods.append(pod)
def check_fluentd_queues(self):
""" Check oldest buffer file in Fluentd pods """
# Get timestamps of files in /var/lib/fluentd from each pod
buffer_list = []
for pod in self.fluentd_pods:
pod_name = pod['metadata']['name']
find_ts = "exec " + pod_name + " -- find /var/lib/fluentd -type f -name \*.log ! -name '*output_ops_tag*' -printf '%T+\n'"
buffer_ts = self.oc.run_user_cmd(find_ts)
timestamps = buffer_ts.split("\n")
timestamps.pop() # Removes empty newline
timestamps.sort()
if len(timestamps) > 0:
buffer_list.append(timestamps[0])
logger.info("Found files in fluentd queue on " + pod_name + " with timestamp(s): %s", str(timestamps))
else:
logger.info("No files found in fluentd queue on " + pod_name)
# Convert timestamps to age in seconds
age_list=[]
for ts in buffer_list:
if "+" in ts:
ts = ts.replace("+", " ")
ts = parser.parse(ts)
ts = ts.replace(tzinfo=None)
buffer_age = (datetime.now() - ts).total_seconds()
age_list.append(buffer_age)
oldest_age = int(math.ceil(max(age_list or [0])))
logger.info("Oldest fluentd queue file is %s seconds old.", str(oldest_age))
return oldest_age
def get_logging_namespace(self):
""" Determine which logging namespace is in use """
# Assume the correct namespace is 'openshift-logging' and fall back to 'logging'
# if that assumption ends up being wrong.
oc_client = OCUtil(namespace='openshift-logging', config_file='/tmp/admin.kubeconfig', verbose=self.args.verbose)
logger.info("Determining which namespace is in use...")
try:
oc_client.get_dc('logging-kibana')
# If the previous call didn't throw an exception, logging is deployed in this namespace.
logger.info("Using namespace: openshift-logging")
return 'openshift-logging'
except subprocess.CalledProcessError:
logger.info("Using namespace: logging")
return 'logging'
def run(self):
""" Main function that runs the check """
self.parse_args()
self.metric_sender = MetricSender(verbose=self.args.verbose, debug=self.args.debug)
self.oc = OCUtil(namespace=self.get_logging_namespace(), config_file='/tmp/admin.kubeconfig', verbose=self.args.verbose)
self.get_pods()
oldest_buffer = self.check_fluentd_queues()
self.send_metrics(oldest_buffer)
if __name__ == '__main__':
OFQC = OpenshiftFluentdQueueCheck()
OFQC.run()
| {
"content_hash": "5838170b9323921874d907c38741cf20",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 134,
"avg_line_length": 39.231404958677686,
"alnum_prop": 0.6096481988624395,
"repo_name": "drewandersonnz/openshift-tools",
"id": "e30d9d200104907a3adac3af1fb8103b332e6163",
"size": "4769",
"binary": false,
"copies": "3",
"ref": "refs/heads/prod",
"path": "scripts/monitoring/cron-send-fluentd-depth.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24919"
},
{
"name": "Dockerfile",
"bytes": "10248"
},
{
"name": "Go",
"bytes": "127388"
},
{
"name": "Groovy",
"bytes": "6322"
},
{
"name": "HTML",
"bytes": "67678"
},
{
"name": "JavaScript",
"bytes": "9573"
},
{
"name": "Makefile",
"bytes": "1108"
},
{
"name": "PHP",
"bytes": "30017"
},
{
"name": "Python",
"bytes": "19774421"
},
{
"name": "Shell",
"bytes": "553874"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('../API')
import solr
fp_solr = solr.SolrConnection("http://localhost:8502/solr/fp")
fp_solr.delete_query("track_id:[* TO *]")
fp_solr.commit()
| {
"content_hash": "93e12a19325e53bc10d28827678282b4",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 62,
"avg_line_length": 24.571428571428573,
"alnum_prop": 0.6976744186046512,
"repo_name": "alexonea/3rdyrp",
"id": "25aff1ba18fd51a43d28aa31018554ce055e387d",
"size": "213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/solr/util/wipe_codes.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "921"
},
{
"name": "C",
"bytes": "11274"
},
{
"name": "C++",
"bytes": "32460"
},
{
"name": "CSS",
"bytes": "403"
},
{
"name": "HTML",
"bytes": "2484"
},
{
"name": "JavaScript",
"bytes": "33207"
},
{
"name": "Makefile",
"bytes": "262"
},
{
"name": "Matlab",
"bytes": "18206"
},
{
"name": "Python",
"bytes": "137450"
},
{
"name": "Shell",
"bytes": "139"
},
{
"name": "XSLT",
"bytes": "22416"
}
],
"symlink_target": ""
} |
'''
Created on 13.07.2015
@author: Aaron Klein
'''
class BaseMaximizer(object):
def __init__(self, objective_function, X_lower, X_upper):
"""
Interface for optimizers that maximizing the
acquisition function.
Parameters
----------
objective_function: acquisition function
The acquisition function which will be maximized
X_lower: np.ndarray (D)
Lower bounds of the input space
X_upper: np.ndarray (D)
Upper bounds of the input space
"""
self.X_lower = X_lower
self.X_upper = X_upper
self.objective_func = objective_function
def maximize(self):
pass
| {
"content_hash": "1a8da04010e0e8dddfa846d306e0378b",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 61,
"avg_line_length": 24.24137931034483,
"alnum_prop": 0.5860597439544808,
"repo_name": "aaronkl/RoBO",
"id": "97290254da796eefcfab5b1ab489e76a42e7333d",
"size": "703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "robo/maximizers/base_maximizer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Matlab",
"bytes": "2054"
},
{
"name": "Python",
"bytes": "402978"
},
{
"name": "Shell",
"bytes": "318"
}
],
"symlink_target": ""
} |
with break continue del exec return pass print raise global assert yield
for while if elif else import from as try except finally and in is not or
lambda: a + 1
lambda x, y: x + y
yield from
def functionname
class Classname
def функция
class Класс
await
async def Test
async with
async for
# Type annotations
def myfunc(a: str, something_other,
b: Callable[[str, str], int],
c: mypkg.MyType) -> 'runtime_resolved_type':
myval: float
mygood: Optional[int, Any] = b('wow', 'oops')
myarr: Sequence[int] = origarr[aa:bb] + (lambda: x)()
mykey = a
wow = {
mykey: this_should_not_be_type_anno[Any],
'b': some_data,
}
call_with_dict(a={
'a': asdf,
'b': 'zxcb',
mykey: this_should_not_be_type_anno[Any],
}, b=mydata['a'])
vanilla_lambda = lambda x, y: myval + 1.0
call_with_lambda(lambda x, y: myval + 1.0)
call_with_slice(mydata[range_start:range_end])
# Builtin objects.
True False Ellipsis None NotImplemented
# Builtin function and types.
__import__() abs() all() any() apply() basestring() bool() buffer() callable() chr() classmethod()
cmp() coerce() compile() complex() delattr() dict() dir() divmod() enumerate() eval() execfile() file()
filter() float() frozenset() getattr() globals() hasattr() hash() help() hex() id() input() int()
intern() isinstance() issubclass() iter() len() list() locals() long() map() max() min() object() oct()
open() ord() pow() property() range() raw_input() reduce() reload() repr() reversed() round() set()
setattr() slice() sorted() staticmethod() str() sum() super() tuple() type() unichr() unicode() vars()
xrange() zip()
when_we_dont_call = a.float
float = when_we_dont_call
when_we_call = float(x)
when_we_call = min(a, b)
# Builtin exceptions and warnings.
BaseException Exception StandardError ArithmeticError LookupError
EnvironmentError
AssertionError AttributeError EOFError FloatingPointError GeneratorExit IOError
ImportError IndexError KeyError KeyboardInterrupt MemoryError NameError
NotImplementedError OSError OverflowError ReferenceError RuntimeError
StopIteration SyntaxError IndentationError TabError SystemError SystemExit
TypeError UnboundLocalError UnicodeError UnicodeEncodeError UnicodeDecodeError
UnicodeTranslateError ValueError WindowsError ZeroDivisionError
Warning UserWarning DeprecationWarning PendingDepricationWarning SyntaxWarning
RuntimeWarning FutureWarning ImportWarning UnicodeWarning
# Decorators.
@ decoratorname
@ object.__init__(arg1, arg2)
@ декоратор
@ декоратор.décorateur
# Numbers
0 1 2 9 10 0x1f .3 12.34 0j 124j 34.2E-3 0b10 0o77 1023434 0x0
1_1 1_1.2_2 1_2j 0x_1f 0x1_f 34_56e-3 34_56e+3_1 0o7_7
# Erroneous numbers
077 100L 0xfffffffL 0L 08 0xk 0x 0b102 0o78 0o123LaB
0_ 0_1 0_x1f 0x1f_ 0_b77 0b77_ .2_ 1_j
# Strings
" test " ' test '
"""
test
"""
'''
test
'''
" \a\b\c\"\'\n\r \x34\077 \08 \xag"
r" \" \' "
"testтест"
b"test"
b"test\r\n\xffff"
b"тестtest"
br"test"
br"\a\b\n\r"
# Formattings
" %f "
b" %f "
"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
b"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
"${test} ${test ${test}aname $$$ $test+nope"
b"${test} ${test ${test}aname $$$ $test+nope"
f"{var}...{arr[123]} normal {var['{'] // 0xff} \"xzcb\" 'xzcb' {var['}'] + 1} text"
f"{expr1 if True or False else expr2} wow {','.join(c.lower() for c in 'asdf')}"
f"hello {expr:.2f} yes {(lambda: 0b1)():#03x} lol {var!r}"
# Doctests.
"""
Test:
>>> a = 5
>>> a
5
Test
"""
'''
Test:
>>> a = 5
>>> a
5
Test
'''
# Erroneous symbols or bad variable names.
$ ? 6xav
&& || ===
# Indentation errors.
break
# Trailing space errors.
break
"""
test
"""
| {
"content_hash": "71c76f4dfa36c529cafd92d69b6a7c3f",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 103,
"avg_line_length": 22.22222222222222,
"alnum_prop": 0.645,
"repo_name": "achimnol/python-syntax",
"id": "d19a6f716db676602f914fc0bef4e41fd190b62f",
"size": "3982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3982"
},
{
"name": "Vim script",
"bytes": "33705"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ArrayminusValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="arrayminus", parent_name="bar.error_y", **kwargs):
super(ArrayminusValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs,
)
| {
"content_hash": "13ddde3bddbec1f73b378e69bac3b66f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 86,
"avg_line_length": 37.54545454545455,
"alnum_prop": 0.6319612590799032,
"repo_name": "plotly/plotly.py",
"id": "3b6605bda833fda5b9f21373dff0428bc9b78e94",
"size": "413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/bar/error_y/_arrayminus.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
import os,socket, sys
import json
from fabric.state import _AttributeDict, env
from fabric.operations import run, sudo
from fabric.context_managers import cd, settings
from fabric.contrib.files import append, contains, exists
from fabric.decorators import runs_once
from woven.decorators import run_once_per_version
from woven.deployment import deploy_files, mkdirs, upload_template
from woven.environment import deployment_root, version_state, _root_domain, get_packages
from woven.linux import add_user
def _activate_sites(path, filenames):
enabled_sites = _ls_sites(path)
for site in enabled_sites:
if env.verbosity:
print env.host,'Disabling', site
if site not in filenames:
sudo("rm %s/%s"% (path,site))
sudo("chmod 644 %s" % site)
if not exists('/etc/apache2/sites-enabled'+ filename):
sudo("ln -s %s%s %s%s"% (self.deploy_root,filename,self.enabled_path,filename))
def _deploy_webconf(remote_dir,template):
if not 'http:' in env.MEDIA_URL: media_url = env.MEDIA_URL
else: media_url = ''
if not 'http:' in env.STATIC_URL: static_url = env.STATIC_URL
else: static_url = ''
if not static_url: static_url = env.ADMIN_MEDIA_PREFIX
log_dir = '/'.join([deployment_root(),'log'])
deployed = []
users_added = []
domains = domain_sites()
for d in domains:
u_domain = d.name.replace('.','_')
wsgi_filename = d.settings.replace('.py','.wsgi')
site_user = ''.join(['site_',str(d.site_id)])
filename = ''.join([remote_dir,'/',u_domain,'-',env.project_version,'.conf'])
context = {"project_name": env.project_name,
"deployment_root":deployment_root(),
"u_domain":u_domain,
"domain":d.name,
"root_domain":env.root_domain,
"user":env.user,
"site_user":site_user,
"SITE_ID":d.site_id,
"host_ip":socket.gethostbyname(env.host),
"wsgi_filename":wsgi_filename,
"MEDIA_URL":media_url,
"STATIC_URL":static_url,
}
upload_template('/'.join(['woven',template]),
filename,
context,
use_sudo=True)
if env.verbosity:
print " * uploaded", filename
#add site users if necessary
site_users = _site_users()
if site_user not in users_added and site_user not in site_users:
add_user(username=site_user,group='www-data',site_user=True)
users_added.append(site_user)
if env.verbosity:
print " * useradded",site_user
return deployed
def _site_users():
"""
Get a list of site_n users
"""
userlist = sudo("cat /etc/passwd | awk '/site/'").split('\n')
siteuserlist = [user.split(':')[0] for user in userlist if 'site_' in user]
return siteuserlist
def _ls_sites(path):
"""
List only sites in the domain_sites() to ensure we co-exist with other projects
"""
with cd(path):
sites = run('ls').split('\n')
doms = [d.name for d in domain_sites()]
dom_sites = []
for s in sites:
ds = s.split('-')[0]
ds = ds.replace('_','.')
if ds in doms and s not in dom_sites:
dom_sites.append(s)
return dom_sites
def _sitesettings_files():
"""
Get a list of sitesettings files
settings.py can be prefixed with a subdomain and underscore so with example.com site:
sitesettings/settings.py would be the example.com settings file and
sitesettings/admin_settings.py would be the admin.example.com settings file
"""
settings_files = []
sitesettings_path = os.path.join(env.project_package_name,'sitesettings')
if os.path.exists(sitesettings_path):
sitesettings = os.listdir(sitesettings_path)
for file in sitesettings:
if file == 'settings.py':
settings_files.append(file)
elif len(file)>12 and file[-12:]=='_settings.py': #prefixed settings
settings_files.append(file)
return settings_files
def _get_django_sites():
"""
Get a list of sites as dictionaries {site_id:'domain.name'}
"""
deployed = version_state('deploy_project')
if not env.sites and 'django.contrib.sites' in env.INSTALLED_APPS and deployed:
with cd('/'.join([deployment_root(),'env',env.project_fullname,'project',env.project_package_name,'sitesettings'])):
venv = '/'.join([deployment_root(),'env',env.project_fullname,'bin','activate'])
#since this is the first time we run ./manage.py on the server it can be
#a point of failure for installations
with settings(warn_only=True):
output = run(' '.join(['source',venv,'&&',"django-admin.py dumpdata sites --settings=%s.sitesettings.settings"% env.project_package_name]))
if output.failed:
print "ERROR: There was an error running ./manage.py on the node"
print "See the troubleshooting docs for hints on how to diagnose deployment issues"
if hasattr(output, 'stderr'):
print output.stderr
sys.exit(1)
output = output.split('\n')[-1] #ignore any lines prior to the data being dumped
sites = json.loads(output)
env.sites = {}
for s in sites:
env.sites[s['pk']] = s['fields']['domain']
return env.sites
def domain_sites():
"""
Get a list of domains
Each domain is an attribute dict with name, site_id and settings
"""
if not hasattr(env,'domains'):
sites = _get_django_sites()
site_ids = sites.keys()
site_ids.sort()
domains = []
for id in site_ids:
for file in _sitesettings_files():
domain = _AttributeDict({})
if file == 'settings.py':
domain.name = sites[id]
else: #prefix indicates subdomain
subdomain = file[:-12].replace('_','.')
domain.name = ''.join([subdomain,sites[id]])
domain.settings = file
domain.site_id = id
domains.append(domain)
env.domains = domains
if env.domains: env.root_domain = env.domains[0].name
else:
domain.name = _root_domain(); domain.site_id = 1; domain.settings='settings.py'
env.domains = [domain]
return env.domains
@run_once_per_version
def deploy_webconf():
""" Deploy nginx and other wsgi server site configurations to the host """
deployed = []
log_dir = '/'.join([deployment_root(),'log'])
#TODO - incorrect - check for actual package to confirm installation
if webserver_list():
if env.verbosity:
print env.host,"DEPLOYING webconf:"
if not exists(log_dir):
run('ln -s /var/log log')
#deploys confs for each domain based on sites app
if 'apache2' in get_packages():
deployed += _deploy_webconf('/etc/apache2/sites-available','django-apache-template.txt')
deployed += _deploy_webconf('/etc/nginx/sites-available','nginx-template.txt')
elif 'gunicorn' in get_packages():
deployed += _deploy_webconf('/etc/nginx/sites-available','nginx-gunicorn-template.txt')
if not exists('/var/www/nginx-default'):
sudo('mkdir /var/www/nginx-default')
upload_template('woven/maintenance.html','/var/www/nginx-default/maintenance.html',use_sudo=True)
sudo('chmod ugo+r /var/www/nginx-default/maintenance.html')
else:
print env.host,"""WARNING: Apache or Nginx not installed"""
return deployed
@run_once_per_version
def deploy_wsgi():
"""
deploy python wsgi file(s)
"""
if 'libapache2-mod-wsgi' in get_packages():
remote_dir = '/'.join([deployment_root(),'env',env.project_fullname,'wsgi'])
wsgi = 'apache2'
elif 'gunicorn' in get_packages():
remote_dir = '/etc/init'
wsgi = 'gunicorn'
deployed = []
#ensure project apps path is also added to environment variables as well as wsgi
if env.PROJECT_APPS_PATH:
pap = '/'.join([deployment_root(),'env',
env.project_name,'project',env.project_package_name,env.PROJECT_APPS_PATH])
pap = ''.join(['export PYTHONPATH=$PYTHONPATH:',pap])
postactivate = '/'.join([deployment_root(),'env','postactivate'])
if not exists(postactivate):
append('#!/bin/bash', postactivate)
run('chmod +x %s'% postactivate)
if not contains('PYTHONPATH',postactivate):
append(pap,postactivate)
if env.verbosity:
print env.host,"DEPLOYING wsgi", wsgi, remote_dir
for file in _sitesettings_files():
deployed += mkdirs(remote_dir)
with cd(remote_dir):
settings_module = file.replace('.py','')
context = {"deployment_root":deployment_root(),
"user": env.user,
"project_name": env.project_name,
"project_package_name": env.project_package_name,
"project_apps_path":env.PROJECT_APPS_PATH,
"settings": settings_module,
}
if wsgi == 'apache2':
filename = file.replace('.py','.wsgi')
upload_template('/'.join(['woven','django-wsgi-template.txt']),
filename,
context,
)
elif wsgi == 'gunicorn':
filename = 'gunicorn-%s.conf'% env.project_name
upload_template('/'.join(['woven','gunicorn.conf']),
filename,
context,
backup=False,
use_sudo=True
)
if env.verbosity:
print " * uploaded", filename
#finally set the ownership/permissions
#We'll use the group to allow www-data execute
if wsgi == 'apache2':
sudo("chown %s:www-data %s"% (env.user,filename))
run("chmod ug+xr %s"% filename)
elif wsgi == 'gunicorn':
sudo("chown root:root %s"% filename)
sudo("chmod go+r %s"% filename)
return deployed
def webserver_list():
"""
list of webserver packages
"""
p = set(get_packages())
w = set(['apache2','gunicorn','uwsgi','nginx'])
installed = p & w
return list(installed)
def reload_webservers():
"""
Reload apache2 and nginx
"""
if env.verbosity:
print env.host, "RELOADING apache2"
with settings(warn_only=True):
a = sudo("/etc/init.d/apache2 reload")
if env.verbosity:
print '',a
if env.verbosity:
#Reload used to fail on Ubuntu but at least in 10.04 it works
print env.host,"RELOADING nginx"
with settings(warn_only=True):
s = run("/etc/init.d/nginx status")
if 'running' in s:
n = sudo("/etc/init.d/nginx reload")
else:
n = sudo("/etc/init.d/nginx start")
if env.verbosity:
print ' *',n
return True
def stop_webserver(server):
"""
Stop server
"""
#TODO - distinguish between a warning and a error on apache
if server == 'apache2':
with settings(warn_only=True):
if env.verbosity:
print env.host,"STOPPING apache2"
a = sudo("/etc/init.d/apache2 stop")
if env.verbosity:
print '',a
elif server == 'gunicorn':
with settings(warn_only=True):
if env.verbosity:
print env.host,"STOPPING","%s-%s"% (server,env.project_name)
a = sudo("stop %s-%s"% (server,env.project_name))
if env.verbosity and a.strip():
print '',a
return True
def start_webserver(server):
"""
Start server
"""
if server == 'apache2':
with settings(warn_only=True):
if env.verbosity:
print env.host,"STARTING apache2"
#some issues with pty=True getting apache to start on ec2
a = sudo("/etc/init.d/apache2 start", pty=False)
if env.verbosity:
print '',a
if a.failed:
print "ERROR: /etc/init.d/apache2 start failed"
print env.host, a
sys.exit(1)
elif server == 'nginx':
if env.verbosity:
#Reload used to fail on Ubuntu but at least in 10.04 it works
print env.host,"RELOADING nginx"
with settings(warn_only=True):
s = run("/etc/init.d/nginx status")
if 'running' in s:
n = sudo("/etc/init.d/nginx reload")
else:
n = sudo("/etc/init.d/nginx start")
if env.verbosity:
print ' *',n
else:
if env.verbosity:
print env.host, "STARTING","%s-%s"% (server,env.project_name)
with settings(warn_only=True):
n = sudo('start %s-%s'% (server,env.project_name))
if env.verbosity and n.strip():
print ' *', n
return True
| {
"content_hash": "f39fd0f43c83596b7673b004445d37e5",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 155,
"avg_line_length": 37.3739837398374,
"alnum_prop": 0.5459357552026685,
"repo_name": "bretth/woven",
"id": "996b4322eb7543b302e67bc972de908d83aca924",
"size": "13813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "woven/webservers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "153614"
}
],
"symlink_target": ""
} |
from test_framework.blocktools import (
create_coinbase,
create_block,
add_witness_commitment,
MAX_BLOCK_SIGOPS_WEIGHT,
NORMAL_GBT_REQUEST_PARAMS,
WITNESS_SCALE_FACTOR,
)
from test_framework.messages import (
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
ToHex,
)
from test_framework.script import (
ANNEX_TAG,
CScript,
CScriptNum,
CScriptOp,
LEAF_VERSION_TAPSCRIPT,
LegacySignatureHash,
LOCKTIME_THRESHOLD,
MAX_SCRIPT_ELEMENT_SIZE,
OP_0,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_16,
OP_2DROP,
OP_2DUP,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_CHECKSIG,
OP_CHECKSIGADD,
OP_CHECKSIGVERIFY,
OP_CODESEPARATOR,
OP_DROP,
OP_DUP,
OP_ELSE,
OP_ENDIF,
OP_EQUAL,
OP_EQUALVERIFY,
OP_HASH160,
OP_IF,
OP_NOP,
OP_NOT,
OP_NOTIF,
OP_PUSHDATA1,
OP_RETURN,
OP_SWAP,
OP_VERIFY,
SIGHASH_DEFAULT,
SIGHASH_ALL,
SIGHASH_NONE,
SIGHASH_SINGLE,
SIGHASH_ANYONECANPAY,
SegwitV0SignatureHash,
TaprootSignatureHash,
is_op_success,
taproot_construct,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error, assert_equal
from test_framework.key import generate_privkey, compute_xonly_pubkey, sign_schnorr, tweak_add_privkey, ECKey
from test_framework.address import (
hash160,
sha256,
)
from collections import OrderedDict, namedtuple
from io import BytesIO
import json
import hashlib
import os
import random
# === Framework for building spending transactions. ===
#
# The computation is represented as a "context" dict, whose entries store potentially-unevaluated expressions that
# refer to lower-level ones. By overwriting these expression, many aspects - both high and low level - of the signing
# process can be overridden.
#
# Specifically, a context object is a dict that maps names to compositions of:
# - values
# - lists of values
# - callables which, when fed the context object as argument, produce any of these
#
# The DEFAULT_CONTEXT object specifies a standard signing process, with many overridable knobs.
#
# The get(ctx, name) function can evaluate a name, and cache its result in the context.
# getter(name) can be used to construct a callable that evaluates name. For example:
#
# ctx1 = {**DEFAULT_CONTEXT, inputs=[getter("sign"), b'\x01']}
#
# creates a context where the script inputs are a signature plus the bytes 0x01.
#
# override(expr, name1=expr1, name2=expr2, ...) can be used to cause an expression to be evaluated in a selectively
# modified context. For example:
#
# ctx2 = {**DEFAULT_CONTEXT, sighash=override(default_sighash, hashtype=SIGHASH_DEFAULT)}
#
# creates a context ctx2 where the sighash is modified to use hashtype=SIGHASH_DEFAULT. This differs from
#
# ctx3 = {**DEFAULT_CONTEXT, hashtype=SIGHASH_DEFAULT}
#
# in that ctx3 will globally use hashtype=SIGHASH_DEFAULT (including in the hashtype byte appended to the signature)
# while ctx2 only uses the modified hashtype inside the sighash calculation.
def deep_eval(ctx, expr):
"""Recursively replace any callables c in expr (including inside lists) with c(ctx)."""
while callable(expr):
expr = expr(ctx)
if isinstance(expr, list):
expr = [deep_eval(ctx, x) for x in expr]
return expr
# Data type to represent fully-evaluated expressions in a context dict (so we can avoid reevaluating them).
Final = namedtuple("Final", "value")
def get(ctx, name):
"""Evaluate name in context ctx."""
assert name in ctx, "Missing '%s' in context" % name
expr = ctx[name]
if not isinstance(expr, Final):
# Evaluate and cache the result.
expr = Final(deep_eval(ctx, expr))
ctx[name] = expr
return expr.value
def getter(name):
"""Return a callable that evaluates name in its passed context."""
return lambda ctx: get(ctx, name)
def override(expr, **kwargs):
"""Return a callable that evaluates expr in a modified context."""
return lambda ctx: deep_eval({**ctx, **kwargs}, expr)
# === Implementations for the various default expressions in DEFAULT_CONTEXT ===
def default_hashtype(ctx):
"""Default expression for "hashtype": SIGHASH_DEFAULT for taproot, SIGHASH_ALL otherwise."""
mode = get(ctx, "mode")
if mode == "taproot":
return SIGHASH_DEFAULT
else:
return SIGHASH_ALL
def default_tapleaf(ctx):
"""Default expression for "tapleaf": looking up leaf in tap[2]."""
return get(ctx, "tap").leaves[get(ctx, "leaf")]
def default_script_taproot(ctx):
"""Default expression for "script_taproot": tapleaf.script."""
return get(ctx, "tapleaf").script
def default_leafversion(ctx):
"""Default expression for "leafversion": tapleaf.version"""
return get(ctx, "tapleaf").version
def default_negflag(ctx):
"""Default expression for "negflag": tap.negflag."""
return get(ctx, "tap").negflag
def default_pubkey_inner(ctx):
"""Default expression for "pubkey_inner": tap.inner_pubkey."""
return get(ctx, "tap").inner_pubkey
def default_merklebranch(ctx):
"""Default expression for "merklebranch": tapleaf.merklebranch."""
return get(ctx, "tapleaf").merklebranch
def default_controlblock(ctx):
"""Default expression for "controlblock": combine leafversion, negflag, pubkey_inner, merklebranch."""
return bytes([get(ctx, "leafversion") + get(ctx, "negflag")]) + get(ctx, "pubkey_inner") + get(ctx, "merklebranch")
def default_sighash(ctx):
"""Default expression for "sighash": depending on mode, compute BIP341, BIP143, or legacy sighash."""
tx = get(ctx, "tx")
idx = get(ctx, "idx")
hashtype = get(ctx, "hashtype_actual")
mode = get(ctx, "mode")
if mode == "taproot":
# BIP341 signature hash
utxos = get(ctx, "utxos")
annex = get(ctx, "annex")
if get(ctx, "leaf") is not None:
codeseppos = get(ctx, "codeseppos")
leaf_ver = get(ctx, "leafversion")
script = get(ctx, "script_taproot")
return TaprootSignatureHash(tx, utxos, hashtype, idx, scriptpath=True, script=script, leaf_ver=leaf_ver, codeseparator_pos=codeseppos, annex=annex)
else:
return TaprootSignatureHash(tx, utxos, hashtype, idx, scriptpath=False, annex=annex)
elif mode == "witv0":
# BIP143 signature hash
scriptcode = get(ctx, "scriptcode")
utxos = get(ctx, "utxos")
return SegwitV0SignatureHash(scriptcode, tx, idx, hashtype, utxos[idx].nValue)
else:
# Pre-segwit signature hash
scriptcode = get(ctx, "scriptcode")
return LegacySignatureHash(scriptcode, tx, idx, hashtype)[0]
def default_tweak(ctx):
"""Default expression for "tweak": None if a leaf is specified, tap[0] otherwise."""
if get(ctx, "leaf") is None:
return get(ctx, "tap").tweak
return None
def default_key_tweaked(ctx):
"""Default expression for "key_tweaked": key if tweak is None, tweaked with it otherwise."""
key = get(ctx, "key")
tweak = get(ctx, "tweak")
if tweak is None:
return key
else:
return tweak_add_privkey(key, tweak)
def default_signature(ctx):
"""Default expression for "signature": BIP340 signature or ECDSA signature depending on mode."""
sighash = get(ctx, "sighash")
if get(ctx, "mode") == "taproot":
key = get(ctx, "key_tweaked")
flip_r = get(ctx, "flag_flip_r")
flip_p = get(ctx, "flag_flip_p")
return sign_schnorr(key, sighash, flip_r=flip_r, flip_p=flip_p)
else:
key = get(ctx, "key")
return key.sign_ecdsa(sighash)
def default_hashtype_actual(ctx):
"""Default expression for "hashtype_actual": hashtype, unless mismatching SIGHASH_SINGLE in taproot."""
hashtype = get(ctx, "hashtype")
mode = get(ctx, "mode")
if mode != "taproot":
return hashtype
idx = get(ctx, "idx")
tx = get(ctx, "tx")
if hashtype & 3 == SIGHASH_SINGLE and idx >= len(tx.vout):
return (hashtype & ~3) | SIGHASH_NONE
return hashtype
def default_bytes_hashtype(ctx):
"""Default expression for "bytes_hashtype": bytes([hashtype_actual]) if not 0, b"" otherwise."""
return bytes([x for x in [get(ctx, "hashtype_actual")] if x != 0])
def default_sign(ctx):
"""Default expression for "sign": concatenation of signature and bytes_hashtype."""
return get(ctx, "signature") + get(ctx, "bytes_hashtype")
def default_inputs_keypath(ctx):
"""Default expression for "inputs_keypath": a signature."""
return [get(ctx, "sign")]
def default_witness_taproot(ctx):
"""Default expression for "witness_taproot", consisting of inputs, script, control block, and annex as needed."""
annex = get(ctx, "annex")
suffix_annex = []
if annex is not None:
suffix_annex = [annex]
if get(ctx, "leaf") is None:
return get(ctx, "inputs_keypath") + suffix_annex
else:
return get(ctx, "inputs") + [bytes(get(ctx, "script_taproot")), get(ctx, "controlblock")] + suffix_annex
def default_witness_witv0(ctx):
"""Default expression for "witness_witv0", consisting of inputs and witness script, as needed."""
script = get(ctx, "script_witv0")
inputs = get(ctx, "inputs")
if script is None:
return inputs
else:
return inputs + [script]
def default_witness(ctx):
"""Default expression for "witness", delegating to "witness_taproot" or "witness_witv0" as needed."""
mode = get(ctx, "mode")
if mode == "taproot":
return get(ctx, "witness_taproot")
elif mode == "witv0":
return get(ctx, "witness_witv0")
else:
return []
def default_scriptsig(ctx):
"""Default expression for "scriptsig", consisting of inputs and redeemscript, as needed."""
scriptsig = []
mode = get(ctx, "mode")
if mode == "legacy":
scriptsig = get(ctx, "inputs")
redeemscript = get(ctx, "script_p2sh")
if redeemscript is not None:
scriptsig += [bytes(redeemscript)]
return scriptsig
# The default context object.
DEFAULT_CONTEXT = {
# == The main expressions to evaluate. Only override these for unusual or invalid spends. ==
# The overall witness stack, as a list of bytes objects.
"witness": default_witness,
# The overall scriptsig, as a list of CScript objects (to be concatenated) and bytes objects (to be pushed)
"scriptsig": default_scriptsig,
# == Expressions you'll generally only override for intentionally invalid spends. ==
# The witness stack for spending a taproot output.
"witness_taproot": default_witness_taproot,
# The witness stack for spending a P2WPKH/P2WSH output.
"witness_witv0": default_witness_witv0,
# The script inputs for a taproot key path spend.
"inputs_keypath": default_inputs_keypath,
# The actual hashtype to use (usually equal to hashtype, but in taproot SIGHASH_SINGLE is not always allowed).
"hashtype_actual": default_hashtype_actual,
# The bytes object for a full signature (including hashtype byte, if needed).
"bytes_hashtype": default_bytes_hashtype,
# A full script signature (bytes including hashtype, if needed)
"sign": default_sign,
# An ECDSA or Schnorr signature (excluding hashtype byte).
"signature": default_signature,
# The 32-byte tweaked key (equal to key for script path spends, or key+tweak for key path spends).
"key_tweaked": default_key_tweaked,
# The tweak to use (None for script path spends, the actual tweak for key path spends).
"tweak": default_tweak,
# The sighash value (32 bytes)
"sighash": default_sighash,
# The information about the chosen script path spend (TaprootLeafInfo object).
"tapleaf": default_tapleaf,
# The script to push, and include in the sighash, for a taproot script path spend.
"script_taproot": default_script_taproot,
# The inner pubkey for a taproot script path spend (32 bytes).
"pubkey_inner": default_pubkey_inner,
# The negation flag of the inner pubkey for a taproot script path spend.
"negflag": default_negflag,
# The leaf version to include in the sighash (this does not affect the one in the control block).
"leafversion": default_leafversion,
# The Merkle path to include in the control block for a script path spend.
"merklebranch": default_merklebranch,
# The control block to push for a taproot script path spend.
"controlblock": default_controlblock,
# Whether to produce signatures with invalid P sign (Schnorr signatures only).
"flag_flip_p": False,
# Whether to produce signatures with invalid R sign (Schnorr signatures only).
"flag_flip_r": False,
# == Parameters that can be changed without invalidating, but do have a default: ==
# The hashtype (as an integer).
"hashtype": default_hashtype,
# The annex (only when mode=="taproot").
"annex": None,
# The codeseparator position (only when mode=="taproot").
"codeseppos": -1,
# The redeemscript to add to the scriptSig (if P2SH; None implies not P2SH).
"script_p2sh": None,
# The script to add to the witness in (if P2WSH; None implies P2WPKH)
"script_witv0": None,
# The leaf to use in taproot spends (if script path spend; None implies key path spend).
"leaf": None,
# The input arguments to provide to the executed script
"inputs": [],
# == Parameters to be set before evaluation: ==
# - mode: what spending style to use ("taproot", "witv0", or "legacy").
# - key: the (untweaked) private key to sign with (ECKey object for ECDSA, 32 bytes for Schnorr).
# - tap: the TaprootInfo object (see taproot_construct; needed in mode=="taproot").
# - tx: the transaction to sign.
# - utxos: the UTXOs being spent (needed in mode=="witv0" and mode=="taproot").
# - idx: the input position being signed.
# - scriptcode: the scriptcode to include in legacy and witv0 sighashes.
}
def flatten(lst):
ret = []
for elem in lst:
if isinstance(elem, list):
ret += flatten(elem)
else:
ret.append(elem)
return ret
def spend(tx, idx, utxos, **kwargs):
"""Sign transaction input idx of tx, provided utxos is the list of outputs being spent.
Additional arguments may be provided that override any aspect of the signing process.
See DEFAULT_CONTEXT above for what can be overridden, and what must be provided.
"""
ctx = {**DEFAULT_CONTEXT, "tx":tx, "idx":idx, "utxos":utxos, **kwargs}
def to_script(elem):
"""If fed a CScript, return it; if fed bytes, return a CScript that pushes it."""
if isinstance(elem, CScript):
return elem
else:
return CScript([elem])
scriptsig_list = flatten(get(ctx, "scriptsig"))
scriptsig = CScript(b"".join(bytes(to_script(elem)) for elem in scriptsig_list))
witness_stack = flatten(get(ctx, "witness"))
return (scriptsig, witness_stack)
# === Spender objects ===
#
# Each spender is a tuple of:
# - A scriptPubKey which is to be spent from (CScript)
# - A comment describing the test (string)
# - Whether the spending (on itself) is expected to be standard (bool)
# - A tx-signing lambda returning (scriptsig, witness_stack), taking as inputs:
# - A transaction to sign (CTransaction)
# - An input position (int)
# - The spent UTXOs by this transaction (list of CTxOut)
# - Whether to produce a valid spend (bool)
# - A string with an expected error message for failure case if known
# - The (pre-taproot) sigops weight consumed by a successful spend
# - Whether this spend cannot fail
# - Whether this test demands being placed in a txin with no corresponding txout (for testing SIGHASH_SINGLE behavior)
Spender = namedtuple("Spender", "script,comment,is_standard,sat_function,err_msg,sigops_weight,no_fail,need_vin_vout_mismatch")
def make_spender(comment, *, tap=None, witv0=False, script=None, pkh=None, p2sh=False, spk_mutate_pre_p2sh=None, failure=None, standard=True, err_msg=None, sigops_weight=0, need_vin_vout_mismatch=False, **kwargs):
"""Helper for constructing Spender objects using the context signing framework.
* tap: a TaprootInfo object (see taproot_construct), for Taproot spends (cannot be combined with pkh, witv0, or script)
* witv0: boolean indicating the use of witness v0 spending (needs one of script or pkh)
* script: the actual script executed (for bare/P2WSH/P2SH spending)
* pkh: the public key for P2PKH or P2WPKH spending
* p2sh: whether the output is P2SH wrapper (this is supported even for Taproot, where it makes the output unencumbered)
* spk_mutate_pre_psh: a callable to be applied to the script (before potentially P2SH-wrapping it)
* failure: a dict of entries to override in the context when intentionally failing to spend (if None, no_fail will be set)
* standard: whether the (valid version of) spending is expected to be standard
* err_msg: a string with an expected error message for failure (or None, if not cared about)
* sigops_weight: the pre-taproot sigops weight consumed by a successful spend
* need_vin_vout_mismatch: whether this test requires being tested in a transaction input that has no corresponding
transaction output.
"""
conf = dict()
# Compute scriptPubKey and set useful defaults based on the inputs.
if witv0:
assert tap is None
conf["mode"] = "witv0"
if pkh is not None:
# P2WPKH
assert script is None
pubkeyhash = hash160(pkh)
spk = CScript([OP_0, pubkeyhash])
conf["scriptcode"] = CScript([OP_DUP, OP_HASH160, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG])
conf["script_witv0"] = None
conf["inputs"] = [getter("sign"), pkh]
elif script is not None:
# P2WSH
spk = CScript([OP_0, sha256(script)])
conf["scriptcode"] = script
conf["script_witv0"] = script
else:
assert False
elif tap is None:
conf["mode"] = "legacy"
if pkh is not None:
# P2PKH
assert script is None
pubkeyhash = hash160(pkh)
spk = CScript([OP_DUP, OP_HASH160, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG])
conf["scriptcode"] = spk
conf["inputs"] = [getter("sign"), pkh]
elif script is not None:
# bare
spk = script
conf["scriptcode"] = script
else:
assert False
else:
assert script is None
conf["mode"] = "taproot"
conf["tap"] = tap
spk = tap.scriptPubKey
if spk_mutate_pre_p2sh is not None:
spk = spk_mutate_pre_p2sh(spk)
if p2sh:
# P2SH wrapper can be combined with anything else
conf["script_p2sh"] = spk
spk = CScript([OP_HASH160, hash160(spk), OP_EQUAL])
conf = {**conf, **kwargs}
def sat_fn(tx, idx, utxos, valid):
if valid:
return spend(tx, idx, utxos, **conf)
else:
assert failure is not None
return spend(tx, idx, utxos, **{**conf, **failure})
return Spender(script=spk, comment=comment, is_standard=standard, sat_function=sat_fn, err_msg=err_msg, sigops_weight=sigops_weight, no_fail=failure is None, need_vin_vout_mismatch=need_vin_vout_mismatch)
def add_spender(spenders, *args, **kwargs):
"""Make a spender using make_spender, and add it to spenders."""
spenders.append(make_spender(*args, **kwargs))
# === Helpers for the test ===
def random_checksig_style(pubkey):
"""Creates a random CHECKSIG* tapscript that would succeed with only the valid signature on witness stack."""
return bytes(CScript([pubkey, OP_CHECKSIG]))
opcode = random.choice([OP_CHECKSIG, OP_CHECKSIGVERIFY, OP_CHECKSIGADD])
if (opcode == OP_CHECKSIGVERIFY):
ret = CScript([pubkey, opcode, OP_1])
elif (opcode == OP_CHECKSIGADD):
num = random.choice([0, 0x7fffffff, -0x7fffffff])
ret = CScript([num, pubkey, opcode, num + 1, OP_EQUAL])
else:
ret = CScript([pubkey, opcode])
return bytes(ret)
def random_bytes(n):
"""Return a random bytes object of length n."""
return bytes(random.getrandbits(8) for i in range(n))
def bitflipper(expr):
"""Return a callable that evaluates expr and returns it with a random bitflip."""
def fn(ctx):
sub = deep_eval(ctx, expr)
assert isinstance(sub, bytes)
return (int.from_bytes(sub, 'little') ^ (1 << random.randrange(len(sub) * 8))).to_bytes(len(sub), 'little')
return fn
def zero_appender(expr):
"""Return a callable that evaluates expr and returns it with a zero added."""
return lambda ctx: deep_eval(ctx, expr) + b"\x00"
def byte_popper(expr):
"""Return a callable that evaluates expr and returns it with its last byte removed."""
return lambda ctx: deep_eval(ctx, expr)[:-1]
# Expected error strings
ERR_SIG_SIZE = {"err_msg": "Invalid Schnorr signature size"}
ERR_SIG_HASHTYPE = {"err_msg": "Invalid Schnorr signature hash type"}
ERR_SIG_SCHNORR = {"err_msg": "Invalid Schnorr signature"}
ERR_OP_RETURN = {"err_msg": "OP_RETURN was encountered"}
ERR_CONTROLBLOCK_SIZE = {"err_msg": "Invalid Taproot control block size"}
ERR_WITNESS_PROGRAM_MISMATCH = {"err_msg": "Witness program hash mismatch"}
ERR_PUSH_LIMIT = {"err_msg": "Push value size limit exceeded"}
ERR_DISABLED_OPCODE = {"err_msg": "Attempted to use a disabled opcode"}
ERR_TAPSCRIPT_CHECKMULTISIG = {"err_msg": "OP_CHECKMULTISIG(VERIFY) is not available in tapscript"}
ERR_MINIMALIF = {"err_msg": "OP_IF/NOTIF argument must be minimal in tapscript"}
ERR_UNKNOWN_PUBKEY = {"err_msg": "Public key is neither compressed or uncompressed"}
ERR_STACK_SIZE = {"err_msg": "Stack size limit exceeded"}
ERR_CLEANSTACK = {"err_msg": "Stack size must be exactly one after execution"}
ERR_STACK_EMPTY = {"err_msg": "Operation not valid with the current stack size"}
ERR_SIGOPS_RATIO = {"err_msg": "Too much signature validation relative to witness weight"}
ERR_UNDECODABLE = {"err_msg": "Opcode missing or not understood"}
ERR_NO_SUCCESS = {"err_msg": "Script evaluated without error but finished with a false/empty top stack element"}
ERR_EMPTY_WITNESS = {"err_msg": "Witness program was passed an empty witness"}
ERR_CHECKSIGVERIFY = {"err_msg": "Script failed an OP_CHECKSIGVERIFY operation"}
VALID_SIGHASHES_ECDSA = [
SIGHASH_ALL,
SIGHASH_NONE,
SIGHASH_SINGLE,
SIGHASH_ANYONECANPAY + SIGHASH_ALL,
SIGHASH_ANYONECANPAY + SIGHASH_NONE,
SIGHASH_ANYONECANPAY + SIGHASH_SINGLE
]
VALID_SIGHASHES_TAPROOT = [SIGHASH_DEFAULT] + VALID_SIGHASHES_ECDSA
VALID_SIGHASHES_TAPROOT_SINGLE = [
SIGHASH_SINGLE,
SIGHASH_ANYONECANPAY + SIGHASH_SINGLE
]
VALID_SIGHASHES_TAPROOT_NO_SINGLE = [h for h in VALID_SIGHASHES_TAPROOT if h not in VALID_SIGHASHES_TAPROOT_SINGLE]
SIGHASH_BITFLIP = {"failure": {"sighash": bitflipper(default_sighash)}}
SIG_POP_BYTE = {"failure": {"sign": byte_popper(default_sign)}}
SINGLE_SIG = {"inputs": [getter("sign")]}
SIG_ADD_ZERO = {"failure": {"sign": zero_appender(default_sign)}}
DUST_LIMIT = 600
MIN_FEE = 50000
# === Actual test cases ===
def spenders_taproot_active():
"""Return a list of Spenders for testing post-Taproot activation behavior."""
secs = [generate_privkey() for _ in range(8)]
pubs = [compute_xonly_pubkey(sec)[0] for sec in secs]
spenders = []
# == Tests for BIP340 signature validation. ==
# These are primarily tested through the test vectors implemented in libsecp256k1, and in src/tests/key_tests.cpp.
# Some things are tested programmatically as well here.
tap = taproot_construct(pubs[0])
# Test with key with bit flipped.
add_spender(spenders, "sig/key", tap=tap, key=secs[0], failure={"key_tweaked": bitflipper(default_key_tweaked)}, **ERR_SIG_SCHNORR)
# Test with sighash with bit flipped.
add_spender(spenders, "sig/sighash", tap=tap, key=secs[0], failure={"sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
# Test with invalid R sign.
add_spender(spenders, "sig/flip_r", tap=tap, key=secs[0], failure={"flag_flip_r": True}, **ERR_SIG_SCHNORR)
# Test with invalid P sign.
add_spender(spenders, "sig/flip_p", tap=tap, key=secs[0], failure={"flag_flip_p": True}, **ERR_SIG_SCHNORR)
# Test with signature with bit flipped.
add_spender(spenders, "sig/bitflip", tap=tap, key=secs[0], failure={"signature": bitflipper(default_signature)}, **ERR_SIG_SCHNORR)
# == Tests for signature hashing ==
# Run all tests once with no annex, and once with a valid random annex.
for annex in [None, lambda _: bytes([ANNEX_TAG]) + random_bytes(random.randrange(0, 250))]:
# Non-empty annex is non-standard
no_annex = annex is None
# Sighash mutation tests (test all sighash combinations)
for hashtype in VALID_SIGHASHES_TAPROOT:
common = {"annex": annex, "hashtype": hashtype, "standard": no_annex}
# Pure pubkey
tap = taproot_construct(pubs[0])
add_spender(spenders, "sighash/purepk", tap=tap, key=secs[0], **common, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
# Pubkey/P2PK script combination
scripts = [("s0", CScript(random_checksig_style(pubs[1])))]
tap = taproot_construct(pubs[0], scripts)
add_spender(spenders, "sighash/keypath_hashtype_%x" % hashtype, tap=tap, key=secs[0], **common, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/scriptpath_hashtype_%x" % hashtype, tap=tap, leaf="s0", key=secs[1], **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
# Test SIGHASH_SINGLE behavior in combination with mismatching outputs
if hashtype in VALID_SIGHASHES_TAPROOT_SINGLE:
add_spender(spenders, "sighash/keypath_hashtype_mis_%x" % hashtype, tap=tap, key=secs[0], annex=annex, standard=no_annex, hashtype_actual=random.choice(VALID_SIGHASHES_TAPROOT_NO_SINGLE), failure={"hashtype_actual": hashtype}, **ERR_SIG_HASHTYPE, need_vin_vout_mismatch=True)
add_spender(spenders, "sighash/scriptpath_hashtype_mis_%x" % hashtype, tap=tap, leaf="s0", key=secs[1], annex=annex, standard=no_annex, hashtype_actual=random.choice(VALID_SIGHASHES_TAPROOT_NO_SINGLE), **SINGLE_SIG, failure={"hashtype_actual": hashtype}, **ERR_SIG_HASHTYPE, need_vin_vout_mismatch=True)
# Test OP_CODESEPARATOR impact on sighashing.
hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
common = {"annex": annex, "hashtype": hashtype, "standard": no_annex}
scripts = [
("pk_codesep", CScript(random_checksig_style(pubs[1]) + bytes([OP_CODESEPARATOR]))), # codesep after checksig
("codesep_pk", CScript(bytes([OP_CODESEPARATOR]) + random_checksig_style(pubs[1]))), # codesep before checksig
("branched_codesep", CScript([random_bytes(random.randrange(511)), OP_DROP, OP_IF, OP_CODESEPARATOR, pubs[0], OP_ELSE, OP_CODESEPARATOR, pubs[1], OP_ENDIF, OP_CHECKSIG])), # branch dependent codesep
]
random.shuffle(scripts)
tap = taproot_construct(pubs[0], scripts)
add_spender(spenders, "sighash/pk_codesep", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/codesep_pk", tap=tap, leaf="codesep_pk", key=secs[1], codeseppos=0, **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/branched_codesep/left", tap=tap, leaf="branched_codesep", key=secs[0], codeseppos=3, **common, inputs=[getter("sign"), b'\x01'], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/branched_codesep/right", tap=tap, leaf="branched_codesep", key=secs[1], codeseppos=6, **common, inputs=[getter("sign"), b''], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
# Reusing the scripts above, test that various features affect the sighash.
add_spender(spenders, "sighash/annex", tap=tap, leaf="pk_codesep", key=secs[1], hashtype=hashtype, standard=False, **SINGLE_SIG, annex=bytes([ANNEX_TAG]), failure={"sighash": override(default_sighash, annex=None)}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/script", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, script_taproot=tap.leaves["codesep_pk"].script)}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/leafver", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, leafversion=random.choice([x & 0xFE for x in range(0x100) if x & 0xFE != 0xC0]))}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, leaf=None)}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/keypath", tap=tap, key=secs[0], **common, failure={"sighash": override(default_sighash, leaf="pk_codesep")}, **ERR_SIG_SCHNORR)
# Test that invalid hashtypes don't work, both in key path and script path spends
hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
for invalid_hashtype in [x for x in range(0x100) if x not in VALID_SIGHASHES_TAPROOT]:
add_spender(spenders, "sighash/keypath_unk_hashtype_%x" % invalid_hashtype, tap=tap, key=secs[0], hashtype=hashtype, failure={"hashtype": invalid_hashtype}, **ERR_SIG_HASHTYPE)
add_spender(spenders, "sighash/scriptpath_unk_hashtype_%x" % invalid_hashtype, tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=hashtype, failure={"hashtype": invalid_hashtype}, **ERR_SIG_HASHTYPE)
# Test that hashtype 0 cannot have a hashtype byte, and 1 must have one.
add_spender(spenders, "sighash/hashtype0_byte_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_DEFAULT])}, **ERR_SIG_HASHTYPE)
add_spender(spenders, "sighash/hashtype0_byte_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_DEFAULT])}, **ERR_SIG_HASHTYPE)
add_spender(spenders, "sighash/hashtype1_byte_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/hashtype1_byte_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
# Test that hashtype 0 and hashtype 1 cannot be transmuted into each other.
add_spender(spenders, "sighash/hashtype0to1_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_ALL])}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/hashtype0to1_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_ALL])}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/hashtype1to0_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
add_spender(spenders, "sighash/hashtype1to0_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
# Test aspects of signatures with unusual lengths
for hashtype in [SIGHASH_DEFAULT, random.choice(VALID_SIGHASHES_TAPROOT)]:
scripts = [
("csv", CScript([pubs[2], OP_CHECKSIGVERIFY, OP_1])),
("cs_pos", CScript([pubs[2], OP_CHECKSIG])),
("csa_pos", CScript([OP_0, pubs[2], OP_CHECKSIGADD, OP_1, OP_EQUAL])),
("cs_neg", CScript([pubs[2], OP_CHECKSIG, OP_NOT])),
("csa_neg", CScript([OP_2, pubs[2], OP_CHECKSIGADD, OP_2, OP_EQUAL]))
]
random.shuffle(scripts)
tap = taproot_construct(pubs[3], scripts)
# Empty signatures
add_spender(spenders, "siglen/empty_keypath", tap=tap, key=secs[3], hashtype=hashtype, failure={"sign": b""}, **ERR_SIG_SIZE)
add_spender(spenders, "siglen/empty_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_CHECKSIGVERIFY)
add_spender(spenders, "siglen/empty_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_NO_SUCCESS)
add_spender(spenders, "siglen/empty_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_NO_SUCCESS)
add_spender(spenders, "siglen/empty_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": lambda _: random_bytes(random.randrange(1, 63))}, **ERR_SIG_SIZE)
add_spender(spenders, "siglen/empty_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": lambda _: random_bytes(random.randrange(66, 100))}, **ERR_SIG_SIZE)
# Appending a zero byte to signatures invalidates them
add_spender(spenders, "siglen/padzero_keypath", tap=tap, key=secs[3], hashtype=hashtype, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
add_spender(spenders, "siglen/padzero_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
add_spender(spenders, "siglen/padzero_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
add_spender(spenders, "siglen/padzero_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
add_spender(spenders, "siglen/padzero_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
add_spender(spenders, "siglen/padzero_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
# Removing the last byte from signatures invalidates them
add_spender(spenders, "siglen/popbyte_keypath", tap=tap, key=secs[3], hashtype=hashtype, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
add_spender(spenders, "siglen/popbyte_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
add_spender(spenders, "siglen/popbyte_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
add_spender(spenders, "siglen/popbyte_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
add_spender(spenders, "siglen/popbyte_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
add_spender(spenders, "siglen/popbyte_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
# Verify that an invalid signature is not allowed, not even when the CHECKSIG* is expected to fail.
add_spender(spenders, "siglen/invalid_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": default_sign, "sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
add_spender(spenders, "siglen/invalid_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": default_sign, "sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
# == Test that BIP341 spending only applies to witness version 1, program length 32, no P2SH ==
for p2sh in [False, True]:
for witver in range(1, 17):
for witlen in [20, 31, 32, 33]:
def mutate(spk):
prog = spk[2:]
assert len(prog) == 32
if witlen < 32:
prog = prog[0:witlen]
elif witlen > 32:
prog += bytes([0 for _ in range(witlen - 32)])
return CScript([CScriptOp.encode_op_n(witver), prog])
scripts = [("s0", CScript([pubs[0], OP_CHECKSIG])), ("dummy", CScript([OP_RETURN]))]
tap = taproot_construct(pubs[1], scripts)
if not p2sh and witver == 1 and witlen == 32:
add_spender(spenders, "applic/keypath", p2sh=p2sh, spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[1], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
add_spender(spenders, "applic/scriptpath", p2sh=p2sh, leaf="s0", spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[0], **SINGLE_SIG, failure={"leaf": "dummy"}, **ERR_OP_RETURN)
else:
add_spender(spenders, "applic/keypath", p2sh=p2sh, spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[1], standard=False)
add_spender(spenders, "applic/scriptpath", p2sh=p2sh, leaf="s0", spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[0], **SINGLE_SIG, standard=False)
# == Test various aspects of BIP341 spending paths ==
# A set of functions that compute the hashing partner in a Merkle tree, designed to exercise
# edge cases. This relies on the taproot_construct feature that a lambda can be passed in
# instead of a subtree, to compute the partner to be hashed with.
PARTNER_MERKLE_FN = [
# Combine with itself
lambda h: h,
# Combine with hash 0
lambda h: bytes([0 for _ in range(32)]),
# Combine with hash 2^256-1
lambda h: bytes([0xff for _ in range(32)]),
# Combine with itself-1 (BE)
lambda h: (int.from_bytes(h, 'big') - 1).to_bytes(32, 'big'),
# Combine with itself+1 (BE)
lambda h: (int.from_bytes(h, 'big') + 1).to_bytes(32, 'big'),
# Combine with itself-1 (LE)
lambda h: (int.from_bytes(h, 'little') - 1).to_bytes(32, 'big'),
# Combine with itself+1 (LE)
lambda h: (int.from_bytes(h, 'little') + 1).to_bytes(32, 'little'),
# Combine with random bitflipped version of self.
lambda h: (int.from_bytes(h, 'little') ^ (1 << random.randrange(256))).to_bytes(32, 'little')
]
# Start with a tree of that has depth 1 for "128deep" and depth 2 for "129deep".
scripts = [("128deep", CScript([pubs[0], OP_CHECKSIG])), [("129deep", CScript([pubs[0], OP_CHECKSIG])), random.choice(PARTNER_MERKLE_FN)]]
# Add 127 nodes on top of that tree, so that "128deep" and "129deep" end up at their designated depths.
for _ in range(127):
scripts = [scripts, random.choice(PARTNER_MERKLE_FN)]
tap = taproot_construct(pubs[0], scripts)
# Test that spends with a depth of 128 work, but 129 doesn't (even with a tree with weird Merkle branches in it).
add_spender(spenders, "spendpath/merklelimit", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"leaf": "129deep"}, **ERR_CONTROLBLOCK_SIZE)
# Test that flipping the negation bit invalidates spends.
add_spender(spenders, "spendpath/negflag", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"negflag": lambda ctx: 1 - default_negflag(ctx)}, **ERR_WITNESS_PROGRAM_MISMATCH)
# Test that bitflips in the Merkle branch invalidate it.
add_spender(spenders, "spendpath/bitflipmerkle", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"merklebranch": bitflipper(default_merklebranch)}, **ERR_WITNESS_PROGRAM_MISMATCH)
# Test that bitflips in the inner pubkey invalidate it.
add_spender(spenders, "spendpath/bitflippubkey", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"pubkey_inner": bitflipper(default_pubkey_inner)}, **ERR_WITNESS_PROGRAM_MISMATCH)
# Test that empty witnesses are invalid.
add_spender(spenders, "spendpath/emptywit", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"witness": []}, **ERR_EMPTY_WITNESS)
# Test that adding garbage to the control block invalidates it.
add_spender(spenders, "spendpath/padlongcontrol", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_controlblock(ctx) + random_bytes(random.randrange(1, 32))}, **ERR_CONTROLBLOCK_SIZE)
# Test that truncating the control block invalidates it.
add_spender(spenders, "spendpath/trunclongcontrol", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:random.randrange(1, 32)]}, **ERR_CONTROLBLOCK_SIZE)
scripts = [("s", CScript([pubs[0], OP_CHECKSIG]))]
tap = taproot_construct(pubs[1], scripts)
# Test that adding garbage to the control block invalidates it.
add_spender(spenders, "spendpath/padshortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_controlblock(ctx) + random_bytes(random.randrange(1, 32))}, **ERR_CONTROLBLOCK_SIZE)
# Test that truncating the control block invalidates it.
add_spender(spenders, "spendpath/truncshortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:random.randrange(1, 32)]}, **ERR_CONTROLBLOCK_SIZE)
# Test that truncating the control block to 1 byte ("-1 Merkle length") invalidates it
add_spender(spenders, "spendpath/trunc1shortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:1]}, **ERR_CONTROLBLOCK_SIZE)
# == Test BIP342 edge cases ==
csa_low_val = random.randrange(0, 17) # Within range for OP_n
csa_low_result = csa_low_val + 1
csa_high_val = random.randrange(17, 100) if random.getrandbits(1) else random.randrange(-100, -1) # Outside OP_n range
csa_high_result = csa_high_val + 1
OVERSIZE_NUMBER = 2**31
assert_equal(len(CScriptNum.encode(CScriptNum(OVERSIZE_NUMBER))), 6)
assert_equal(len(CScriptNum.encode(CScriptNum(OVERSIZE_NUMBER-1))), 5)
big_choices = []
big_scriptops = []
for i in range(1000):
r = random.randrange(len(pubs))
big_choices.append(r)
big_scriptops += [pubs[r], OP_CHECKSIGVERIFY]
def big_spend_inputs(ctx):
"""Helper function to construct the script input for t33/t34 below."""
# Instead of signing 999 times, precompute signatures for every (key, hashtype) combination
sigs = {}
for ht in VALID_SIGHASHES_TAPROOT:
for k in range(len(pubs)):
sigs[(k, ht)] = override(default_sign, hashtype=ht, key=secs[k])(ctx)
num = get(ctx, "num")
return [sigs[(big_choices[i], random.choice(VALID_SIGHASHES_TAPROOT))] for i in range(num - 1, -1, -1)]
# Various BIP342 features
scripts = [
# 0) drop stack element and OP_CHECKSIG
("t0", CScript([OP_DROP, pubs[1], OP_CHECKSIG])),
# 1) normal OP_CHECKSIG
("t1", CScript([pubs[1], OP_CHECKSIG])),
# 2) normal OP_CHECKSIGVERIFY
("t2", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1])),
# 3) Hypothetical OP_CHECKMULTISIG script that takes a single sig as input
("t3", CScript([OP_0, OP_SWAP, OP_1, pubs[1], OP_1, OP_CHECKMULTISIG])),
# 4) Hypothetical OP_CHECKMULTISIGVERIFY script that takes a single sig as input
("t4", CScript([OP_0, OP_SWAP, OP_1, pubs[1], OP_1, OP_CHECKMULTISIGVERIFY, OP_1])),
# 5) OP_IF script that needs a true input
("t5", CScript([OP_IF, pubs[1], OP_CHECKSIG, OP_ELSE, OP_RETURN, OP_ENDIF])),
# 6) OP_NOTIF script that needs a true input
("t6", CScript([OP_NOTIF, OP_RETURN, OP_ELSE, pubs[1], OP_CHECKSIG, OP_ENDIF])),
# 7) OP_CHECKSIG with an empty key
("t7", CScript([OP_0, OP_CHECKSIG])),
# 8) OP_CHECKSIGVERIFY with an empty key
("t8", CScript([OP_0, OP_CHECKSIGVERIFY, OP_1])),
# 9) normal OP_CHECKSIGADD that also ensures return value is correct
("t9", CScript([csa_low_val, pubs[1], OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
# 10) OP_CHECKSIGADD with empty key
("t10", CScript([csa_low_val, OP_0, OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
# 11) OP_CHECKSIGADD with missing counter stack element
("t11", CScript([pubs[1], OP_CHECKSIGADD, OP_1, OP_EQUAL])),
# 12) OP_CHECKSIG that needs invalid signature
("t12", CScript([pubs[1], OP_CHECKSIGVERIFY, pubs[0], OP_CHECKSIG, OP_NOT])),
# 13) OP_CHECKSIG with empty key that needs invalid signature
("t13", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_CHECKSIG, OP_NOT])),
# 14) OP_CHECKSIGADD that needs invalid signature
("t14", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, pubs[0], OP_CHECKSIGADD, OP_NOT])),
# 15) OP_CHECKSIGADD with empty key that needs invalid signature
("t15", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_0, OP_CHECKSIGADD, OP_NOT])),
# 16) OP_CHECKSIG with unknown pubkey type
("t16", CScript([OP_1, OP_CHECKSIG])),
# 17) OP_CHECKSIGADD with unknown pubkey type
("t17", CScript([OP_0, OP_1, OP_CHECKSIGADD])),
# 18) OP_CHECKSIGVERIFY with unknown pubkey type
("t18", CScript([OP_1, OP_CHECKSIGVERIFY, OP_1])),
# 19) script longer than 10000 bytes and over 201 non-push opcodes
("t19", CScript([OP_0, OP_0, OP_2DROP] * 10001 + [pubs[1], OP_CHECKSIG])),
# 20) OP_CHECKSIGVERIFY with empty key
("t20", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_0, OP_CHECKSIGVERIFY, OP_1])),
# 21) Script that grows the stack to 1000 elements
("t21", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1] + [OP_DUP] * 999 + [OP_DROP] * 999)),
# 22) Script that grows the stack to 1001 elements
("t22", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1] + [OP_DUP] * 1000 + [OP_DROP] * 1000)),
# 23) Script that expects an input stack of 1000 elements
("t23", CScript([OP_DROP] * 999 + [pubs[1], OP_CHECKSIG])),
# 24) Script that expects an input stack of 1001 elements
("t24", CScript([OP_DROP] * 1000 + [pubs[1], OP_CHECKSIG])),
# 25) Script that pushes a MAX_SCRIPT_ELEMENT_SIZE-bytes element
("t25", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE), OP_DROP, pubs[1], OP_CHECKSIG])),
# 26) Script that pushes a (MAX_SCRIPT_ELEMENT_SIZE+1)-bytes element
("t26", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, pubs[1], OP_CHECKSIG])),
# 27) CHECKSIGADD that must fail because numeric argument number is >4 bytes
("t27", CScript([CScriptNum(OVERSIZE_NUMBER), pubs[1], OP_CHECKSIGADD])),
# 28) Pushes random CScriptNum value, checks OP_CHECKSIGADD result
("t28", CScript([csa_high_val, pubs[1], OP_CHECKSIGADD, csa_high_result, OP_EQUAL])),
# 29) CHECKSIGADD that succeeds with proper sig because numeric argument number is <=4 bytes
("t29", CScript([CScriptNum(OVERSIZE_NUMBER-1), pubs[1], OP_CHECKSIGADD])),
# 30) Variant of t1 with "normal" 33-byte pubkey
("t30", CScript([b'\x03' + pubs[1], OP_CHECKSIG])),
# 31) Variant of t2 with "normal" 33-byte pubkey
("t31", CScript([b'\x02' + pubs[1], OP_CHECKSIGVERIFY, OP_1])),
# 32) Variant of t28 with "normal" 33-byte pubkey
("t32", CScript([csa_high_val, b'\x03' + pubs[1], OP_CHECKSIGADD, csa_high_result, OP_EQUAL])),
# 33) 999-of-999 multisig
("t33", CScript(big_scriptops[:1998] + [OP_1])),
# 34) 1000-of-1000 multisig
("t34", CScript(big_scriptops[:2000] + [OP_1])),
# 35) Variant of t9 that uses a non-minimally encoded input arg
("t35", CScript([bytes([csa_low_val]), pubs[1], OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
# 36) Empty script
("t36", CScript([])),
]
# Add many dummies to test huge trees
for j in range(100000):
scripts.append((None, CScript([OP_RETURN, random.randrange(100000)])))
random.shuffle(scripts)
tap = taproot_construct(pubs[0], scripts)
common = {
"hashtype": hashtype,
"key": secs[1],
"tap": tap,
}
# Test that MAX_SCRIPT_ELEMENT_SIZE byte stack element inputs are valid, but not one more (and 80 bytes is standard but 81 is not).
add_spender(spenders, "tapscript/inputmaxlimit", leaf="t0", **common, standard=False, inputs=[getter("sign"), random_bytes(MAX_SCRIPT_ELEMENT_SIZE)], failure={"inputs": [getter("sign"), random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1)]}, **ERR_PUSH_LIMIT)
add_spender(spenders, "tapscript/input80limit", leaf="t0", **common, inputs=[getter("sign"), random_bytes(80)])
add_spender(spenders, "tapscript/input81limit", leaf="t0", **common, standard=False, inputs=[getter("sign"), random_bytes(81)])
# Test that OP_CHECKMULTISIG and OP_CHECKMULTISIGVERIFY cause failure, but OP_CHECKSIG and OP_CHECKSIGVERIFY work.
add_spender(spenders, "tapscript/disabled_checkmultisig", leaf="t1", **common, **SINGLE_SIG, failure={"leaf": "t3"}, **ERR_TAPSCRIPT_CHECKMULTISIG)
add_spender(spenders, "tapscript/disabled_checkmultisigverify", leaf="t2", **common, **SINGLE_SIG, failure={"leaf": "t4"}, **ERR_TAPSCRIPT_CHECKMULTISIG)
# Test that OP_IF and OP_NOTIF do not accept non-0x01 as truth value (the MINIMALIF rule is consensus in Tapscript)
add_spender(spenders, "tapscript/minimalif", leaf="t5", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x02']}, **ERR_MINIMALIF)
add_spender(spenders, "tapscript/minimalnotif", leaf="t6", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x03']}, **ERR_MINIMALIF)
add_spender(spenders, "tapscript/minimalif", leaf="t5", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x0001']}, **ERR_MINIMALIF)
add_spender(spenders, "tapscript/minimalnotif", leaf="t6", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x0100']}, **ERR_MINIMALIF)
# Test that 1-byte public keys (which are unknown) are acceptable but nonstandard with unrelated signatures, but 0-byte public keys are not valid.
add_spender(spenders, "tapscript/unkpk/checksig", leaf="t16", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t7"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/unkpk/checksigadd", leaf="t17", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/unkpk/checksigverify", leaf="t18", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t8"}, **ERR_UNKNOWN_PUBKEY)
# Test that 33-byte public keys (which are unknown) are acceptable but nonstandard with valid signatures, but normal pubkeys are not valid in that case.
add_spender(spenders, "tapscript/oldpk/checksig", leaf="t30", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t1"}, **ERR_SIG_SCHNORR)
add_spender(spenders, "tapscript/oldpk/checksigadd", leaf="t31", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t2"}, **ERR_SIG_SCHNORR)
add_spender(spenders, "tapscript/oldpk/checksigverify", leaf="t32", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t28"}, **ERR_SIG_SCHNORR)
# Test that 0-byte public keys are not acceptable.
add_spender(spenders, "tapscript/emptypk/checksig", leaf="t1", **SINGLE_SIG, **common, failure={"leaf": "t7"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/emptypk/checksigverify", leaf="t2", **SINGLE_SIG, **common, failure={"leaf": "t8"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/emptypk/checksigadd", leaf="t9", **SINGLE_SIG, **common, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/emptypk/checksigadd", leaf="t35", standard=False, **SINGLE_SIG, **common, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
# Test that OP_CHECKSIGADD results are as expected
add_spender(spenders, "tapscript/checksigaddresults", leaf="t28", **SINGLE_SIG, **common, failure={"leaf": "t27"}, err_msg="unknown error")
add_spender(spenders, "tapscript/checksigaddoversize", leaf="t29", **SINGLE_SIG, **common, failure={"leaf": "t27"}, err_msg="unknown error")
# Test that OP_CHECKSIGADD requires 3 stack elements.
add_spender(spenders, "tapscript/checksigadd3args", leaf="t9", **SINGLE_SIG, **common, failure={"leaf": "t11"}, **ERR_STACK_EMPTY)
# Test that empty signatures do not cause script failure in OP_CHECKSIG and OP_CHECKSIGADD (but do fail with empty pubkey, and do fail OP_CHECKSIGVERIFY)
add_spender(spenders, "tapscript/emptysigs/checksig", leaf="t12", **common, inputs=[b'', getter("sign")], failure={"leaf": "t13"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/emptysigs/nochecksigverify", leaf="t12", **common, inputs=[b'', getter("sign")], failure={"leaf": "t20"}, **ERR_UNKNOWN_PUBKEY)
add_spender(spenders, "tapscript/emptysigs/checksigadd", leaf="t14", **common, inputs=[b'', getter("sign")], failure={"leaf": "t15"}, **ERR_UNKNOWN_PUBKEY)
# Test that scripts over 10000 bytes (and over 201 non-push ops) are acceptable.
add_spender(spenders, "tapscript/no10000limit", leaf="t19", **SINGLE_SIG, **common)
# Test that a stack size of 1000 elements is permitted, but 1001 isn't.
add_spender(spenders, "tapscript/1000stack", leaf="t21", **SINGLE_SIG, **common, failure={"leaf": "t22"}, **ERR_STACK_SIZE)
# Test that an input stack size of 1000 elements is permitted, but 1001 isn't.
add_spender(spenders, "tapscript/1000inputs", leaf="t23", **common, inputs=[getter("sign")] + [b'' for _ in range(999)], failure={"leaf": "t24", "inputs": [getter("sign")] + [b'' for _ in range(1000)]}, **ERR_STACK_SIZE)
# Test that pushing a MAX_SCRIPT_ELEMENT_SIZE byte stack element is valid, but one longer is not.
add_spender(spenders, "tapscript/pushmaxlimit", leaf="t25", **common, **SINGLE_SIG, failure={"leaf": "t26"}, **ERR_PUSH_LIMIT)
# Test that 999-of-999 multisig works (but 1000-of-1000 triggers stack size limits)
add_spender(spenders, "tapscript/bigmulti", leaf="t33", **common, inputs=big_spend_inputs, num=999, failure={"leaf": "t34", "num": 1000}, **ERR_STACK_SIZE)
# Test that the CLEANSTACK rule is consensus critical in tapscript
add_spender(spenders, "tapscript/cleanstack", leaf="t36", tap=tap, inputs=[b'\x01'], failure={"inputs": [b'\x01', b'\x01']}, **ERR_CLEANSTACK)
# == Test for sigops ratio limit ==
# Given a number n, and a public key pk, functions that produce a (CScript, sigops). Each script takes as
# input a valid signature with the passed pk followed by a dummy push of bytes that are to be dropped, and
# will execute sigops signature checks.
SIGOPS_RATIO_SCRIPTS = [
# n OP_CHECKSIGVERFIYs and 1 OP_CHECKSIG.
lambda n, pk: (CScript([OP_DROP, pk] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_CHECKSIG]), n + 1),
# n OP_CHECKSIGVERIFYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIGVERIFY.
lambda n, pk: (CScript([OP_DROP, pk, OP_0, OP_IF, OP_2DUP, OP_CHECKSIGVERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_2, OP_SWAP, OP_CHECKSIGADD, OP_3, OP_EQUAL]), n + 1),
# n OP_CHECKSIGVERIFYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIG.
lambda n, pk: (CScript([random_bytes(220), OP_2DROP, pk, OP_1, OP_NOTIF, OP_2DUP, OP_CHECKSIG, OP_VERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_4, OP_SWAP, OP_CHECKSIGADD, OP_5, OP_EQUAL]), n + 1),
# n OP_CHECKSIGVERFIYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIGADD.
lambda n, pk: (CScript([OP_DROP, pk, OP_1, OP_IF, OP_ELSE, OP_2DUP, OP_6, OP_SWAP, OP_CHECKSIGADD, OP_7, OP_EQUALVERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_8, OP_SWAP, OP_CHECKSIGADD, OP_9, OP_EQUAL]), n + 1),
# n+1 OP_CHECKSIGs, but also one OP_CHECKSIG with an empty signature.
lambda n, pk: (CScript([OP_DROP, OP_0, pk, OP_CHECKSIG, OP_NOT, OP_VERIFY, pk] + [OP_2DUP, OP_CHECKSIG, OP_VERIFY] * n + [OP_CHECKSIG]), n + 1),
# n OP_CHECKSIGADDs and 1 OP_CHECKSIG, but also an OP_CHECKSIGADD with an empty signature.
lambda n, pk: (CScript([OP_DROP, OP_0, OP_10, pk, OP_CHECKSIGADD, OP_10, OP_EQUALVERIFY, pk] + [OP_2DUP, OP_16, OP_SWAP, OP_CHECKSIGADD, b'\x11', OP_EQUALVERIFY] * n + [OP_CHECKSIG]), n + 1),
]
for annex in [None, bytes([ANNEX_TAG]) + random_bytes(random.randrange(1000))]:
for hashtype in [SIGHASH_DEFAULT, SIGHASH_ALL]:
for pubkey in [pubs[1], random_bytes(random.choice([x for x in range(2, 81) if x != 32]))]:
for fn_num, fn in enumerate(SIGOPS_RATIO_SCRIPTS):
merkledepth = random.randrange(129)
def predict_sigops_ratio(n, dummy_size):
"""Predict whether spending fn(n, pubkey) with dummy_size will pass the ratio test."""
script, sigops = fn(n, pubkey)
# Predict the size of the witness for a given choice of n
stacklen_size = 1
sig_size = 64 + (hashtype != SIGHASH_DEFAULT)
siglen_size = 1
dummylen_size = 1 + 2 * (dummy_size >= 253)
script_size = len(script)
scriptlen_size = 1 + 2 * (script_size >= 253)
control_size = 33 + 32 * merkledepth
controllen_size = 1 + 2 * (control_size >= 253)
annex_size = 0 if annex is None else len(annex)
annexlen_size = 0 if annex is None else 1 + 2 * (annex_size >= 253)
witsize = stacklen_size + sig_size + siglen_size + dummy_size + dummylen_size + script_size + scriptlen_size + control_size + controllen_size + annex_size + annexlen_size
# sigops ratio test
return witsize + 50 >= 50 * sigops
# Make sure n is high enough that with empty dummy, the script is not valid
n = 0
while predict_sigops_ratio(n, 0):
n += 1
# But allow picking a bit higher still
n += random.randrange(5)
# Now pick dummy size *just* large enough that the overall construction passes
dummylen = 0
while not predict_sigops_ratio(n, dummylen):
dummylen += 1
scripts = [("s", fn(n, pubkey)[0])]
for _ in range(merkledepth):
scripts = [scripts, random.choice(PARTNER_MERKLE_FN)]
tap = taproot_construct(pubs[0], scripts)
standard = annex is None and dummylen <= 80 and len(pubkey) == 32
add_spender(spenders, "tapscript/sigopsratio_%i" % fn_num, tap=tap, leaf="s", annex=annex, hashtype=hashtype, key=secs[1], inputs=[getter("sign"), random_bytes(dummylen)], standard=standard, failure={"inputs": [getter("sign"), random_bytes(dummylen - 1)]}, **ERR_SIGOPS_RATIO)
# Future leaf versions
for leafver in range(0, 0x100, 2):
if leafver == LEAF_VERSION_TAPSCRIPT or leafver == ANNEX_TAG:
# Skip the defined LEAF_VERSION_TAPSCRIPT, and the ANNEX_TAG which is not usable as leaf version
continue
scripts = [
("bare_c0", CScript([OP_NOP])),
("bare_unkver", CScript([OP_NOP]), leafver),
("return_c0", CScript([OP_RETURN])),
("return_unkver", CScript([OP_RETURN]), leafver),
("undecodable_c0", CScript([OP_PUSHDATA1])),
("undecodable_unkver", CScript([OP_PUSHDATA1]), leafver),
("bigpush_c0", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP])),
("bigpush_unkver", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP]), leafver),
("1001push_c0", CScript([OP_0] * 1001)),
("1001push_unkver", CScript([OP_0] * 1001), leafver),
]
random.shuffle(scripts)
tap = taproot_construct(pubs[0], scripts)
add_spender(spenders, "unkver/bare", standard=False, tap=tap, leaf="bare_unkver", failure={"leaf": "bare_c0"}, **ERR_CLEANSTACK)
add_spender(spenders, "unkver/return", standard=False, tap=tap, leaf="return_unkver", failure={"leaf": "return_c0"}, **ERR_OP_RETURN)
add_spender(spenders, "unkver/undecodable", standard=False, tap=tap, leaf="undecodable_unkver", failure={"leaf": "undecodable_c0"}, **ERR_UNDECODABLE)
add_spender(spenders, "unkver/bigpush", standard=False, tap=tap, leaf="bigpush_unkver", failure={"leaf": "bigpush_c0"}, **ERR_PUSH_LIMIT)
add_spender(spenders, "unkver/1001push", standard=False, tap=tap, leaf="1001push_unkver", failure={"leaf": "1001push_c0"}, **ERR_STACK_SIZE)
add_spender(spenders, "unkver/1001inputs", standard=False, tap=tap, leaf="bare_unkver", inputs=[b'']*1001, failure={"leaf": "bare_c0"}, **ERR_STACK_SIZE)
# OP_SUCCESSx tests.
hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
for opval in range(76, 0x100):
opcode = CScriptOp(opval)
if not is_op_success(opcode):
continue
scripts = [
("bare_success", CScript([opcode])),
("bare_nop", CScript([OP_NOP])),
("unexecif_success", CScript([OP_0, OP_IF, opcode, OP_ENDIF])),
("unexecif_nop", CScript([OP_0, OP_IF, OP_NOP, OP_ENDIF])),
("return_success", CScript([OP_RETURN, opcode])),
("return_nop", CScript([OP_RETURN, OP_NOP])),
("undecodable_success", CScript([opcode, OP_PUSHDATA1])),
("undecodable_nop", CScript([OP_NOP, OP_PUSHDATA1])),
("undecodable_bypassed_success", CScript([OP_PUSHDATA1, OP_2, opcode])),
("bigpush_success", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, opcode])),
("bigpush_nop", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, OP_NOP])),
("1001push_success", CScript([OP_0] * 1001 + [opcode])),
("1001push_nop", CScript([OP_0] * 1001 + [OP_NOP])),
]
random.shuffle(scripts)
tap = taproot_construct(pubs[0], scripts)
add_spender(spenders, "opsuccess/bare", standard=False, tap=tap, leaf="bare_success", failure={"leaf": "bare_nop"}, **ERR_CLEANSTACK)
add_spender(spenders, "opsuccess/unexecif", standard=False, tap=tap, leaf="unexecif_success", failure={"leaf": "unexecif_nop"}, **ERR_CLEANSTACK)
add_spender(spenders, "opsuccess/return", standard=False, tap=tap, leaf="return_success", failure={"leaf": "return_nop"}, **ERR_OP_RETURN)
add_spender(spenders, "opsuccess/undecodable", standard=False, tap=tap, leaf="undecodable_success", failure={"leaf": "undecodable_nop"}, **ERR_UNDECODABLE)
add_spender(spenders, "opsuccess/undecodable_bypass", standard=False, tap=tap, leaf="undecodable_success", failure={"leaf": "undecodable_bypassed_success"}, **ERR_UNDECODABLE)
add_spender(spenders, "opsuccess/bigpush", standard=False, tap=tap, leaf="bigpush_success", failure={"leaf": "bigpush_nop"}, **ERR_PUSH_LIMIT)
add_spender(spenders, "opsuccess/1001push", standard=False, tap=tap, leaf="1001push_success", failure={"leaf": "1001push_nop"}, **ERR_STACK_SIZE)
add_spender(spenders, "opsuccess/1001inputs", standard=False, tap=tap, leaf="bare_success", inputs=[b'']*1001, failure={"leaf": "bare_nop"}, **ERR_STACK_SIZE)
# Non-OP_SUCCESSx (verify that those aren't accidentally treated as OP_SUCCESSx)
for opval in range(0, 0x100):
opcode = CScriptOp(opval)
if is_op_success(opcode):
continue
scripts = [
("normal", CScript([OP_RETURN, opcode] + [OP_NOP] * 75)),
("op_success", CScript([OP_RETURN, CScriptOp(0x50)]))
]
tap = taproot_construct(pubs[0], scripts)
add_spender(spenders, "alwaysvalid/notsuccessx", tap=tap, leaf="op_success", inputs=[], standard=False, failure={"leaf": "normal"}) # err_msg differs based on opcode
# == Legacy tests ==
# Also add a few legacy spends into the mix, so that transactions which combine taproot and pre-taproot spends get tested too.
for compressed in [False, True]:
eckey1 = ECKey()
eckey1.set(generate_privkey(), compressed)
pubkey1 = eckey1.get_pubkey().get_bytes()
eckey2 = ECKey()
eckey2.set(generate_privkey(), compressed)
for p2sh in [False, True]:
for witv0 in [False, True]:
for hashtype in VALID_SIGHASHES_ECDSA + [random.randrange(0x04, 0x80), random.randrange(0x84, 0x100)]:
standard = (hashtype in VALID_SIGHASHES_ECDSA) and (compressed or not witv0)
add_spender(spenders, "legacy/pk-wrongkey", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, script=CScript([pubkey1, OP_CHECKSIG]), **SINGLE_SIG, key=eckey1, failure={"key": eckey2}, sigops_weight=4-3*witv0, **ERR_NO_SUCCESS)
add_spender(spenders, "legacy/pkh-sighashflip", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, pkh=pubkey1, key=eckey1, **SIGHASH_BITFLIP, sigops_weight=4-3*witv0, **ERR_NO_SUCCESS)
# Verify that OP_CHECKSIGADD wasn't accidentally added to pre-taproot validation logic.
for p2sh in [False, True]:
for witv0 in [False, True]:
for hashtype in VALID_SIGHASHES_ECDSA + [random.randrange(0x04, 0x80), random.randrange(0x84, 0x100)]:
standard = hashtype in VALID_SIGHASHES_ECDSA and (p2sh or witv0)
add_spender(spenders, "compat/nocsa", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, script=CScript([OP_IF, OP_11, pubkey1, OP_CHECKSIGADD, OP_12, OP_EQUAL, OP_ELSE, pubkey1, OP_CHECKSIG, OP_ENDIF]), key=eckey1, sigops_weight=4-3*witv0, inputs=[getter("sign"), b''], failure={"inputs": [getter("sign"), b'\x01']}, **ERR_UNDECODABLE)
return spenders
def spenders_taproot_inactive():
"""Spenders for testing that pre-activation Taproot rules don't apply."""
spenders = []
sec = generate_privkey()
pub, _ = compute_xonly_pubkey(sec)
scripts = [
("pk", CScript([pub, OP_CHECKSIG])),
("future_leaf", CScript([pub, OP_CHECKSIG]), 0xc2),
("op_success", CScript([pub, OP_CHECKSIG, OP_0, OP_IF, CScriptOp(0x50), OP_ENDIF])),
]
tap = taproot_construct(pub, scripts)
# Test that keypath spending is valid & non-standard, regardless of validity.
add_spender(spenders, "inactive/keypath_valid", key=sec, tap=tap, standard=False)
add_spender(spenders, "inactive/keypath_invalidsig", key=sec, tap=tap, standard=False, sighash=bitflipper(default_sighash))
add_spender(spenders, "inactive/keypath_empty", key=sec, tap=tap, standard=False, witness=[])
# Same for scriptpath spending (and features like annex, leaf versions, or OP_SUCCESS don't change this)
add_spender(spenders, "inactive/scriptpath_valid", key=sec, tap=tap, leaf="pk", standard=False, inputs=[getter("sign")])
add_spender(spenders, "inactive/scriptpath_invalidsig", key=sec, tap=tap, leaf="pk", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
add_spender(spenders, "inactive/scriptpath_invalidcb", key=sec, tap=tap, leaf="pk", standard=False, inputs=[getter("sign")], controlblock=bitflipper(default_controlblock))
add_spender(spenders, "inactive/scriptpath_valid_unkleaf", key=sec, tap=tap, leaf="future_leaf", standard=False, inputs=[getter("sign")])
add_spender(spenders, "inactive/scriptpath_invalid_unkleaf", key=sec, tap=tap, leaf="future_leaf", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
add_spender(spenders, "inactive/scriptpath_valid_opsuccess", key=sec, tap=tap, leaf="op_success", standard=False, inputs=[getter("sign")])
add_spender(spenders, "inactive/scriptpath_valid_opsuccess", key=sec, tap=tap, leaf="op_success", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
return spenders
# Consensus validation flags to use in dumps for tests with "legacy/" or "inactive/" prefix.
LEGACY_FLAGS = "P2SH,DERSIG,CHECKLOCKTIMEVERIFY,CHECKSEQUENCEVERIFY,WITNESS,NULLDUMMY"
# Consensus validation flags to use in dumps for all other tests.
TAPROOT_FLAGS = "P2SH,DERSIG,CHECKLOCKTIMEVERIFY,CHECKSEQUENCEVERIFY,WITNESS,NULLDUMMY,TAPROOT"
def dump_json_test(tx, input_utxos, idx, success, failure):
spender = input_utxos[idx].spender
# Determine flags to dump
flags = LEGACY_FLAGS if spender.comment.startswith("legacy/") or spender.comment.startswith("inactive/") else TAPROOT_FLAGS
fields = [
("tx", tx.serialize().hex()),
("prevouts", [x.output.serialize().hex() for x in input_utxos]),
("index", idx),
("flags", flags),
("comment", spender.comment)
]
# The "final" field indicates that a spend should be always valid, even with more validation flags enabled
# than the listed ones. Use standardness as a proxy for this (which gives a conservative underestimate).
if spender.is_standard:
fields.append(("final", True))
def dump_witness(wit):
return OrderedDict([("scriptSig", wit[0].hex()), ("witness", [x.hex() for x in wit[1]])])
if success is not None:
fields.append(("success", dump_witness(success)))
if failure is not None:
fields.append(("failure", dump_witness(failure)))
# Write the dump to $TEST_DUMP_DIR/x/xyz... where x,y,z,... are the SHA1 sum of the dump (which makes the
# file naming scheme compatible with fuzzing infrastructure).
dump = json.dumps(OrderedDict(fields)) + ",\n"
sha1 = hashlib.sha1(dump.encode("utf-8")).hexdigest()
dirname = os.environ.get("TEST_DUMP_DIR", ".") + ("/%s" % sha1[0])
os.makedirs(dirname, exist_ok=True)
with open(dirname + ("/%s" % sha1), 'w', encoding="utf8") as f:
f.write(dump)
# Data type to keep track of UTXOs, where they were created, and how to spend them.
UTXOData = namedtuple('UTXOData', 'outpoint,output,spender')
class TaprootTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_argument("--dumptests", dest="dump_tests", default=False, action="store_true",
help="Dump generated test cases to directory set by TEST_DUMP_DIR environment variable")
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
# Node 0 has Taproot inactive, Node 1 active.
self.extra_args = [["-par=1", "-vbparams=taproot:1:1"], ["-par=1"]]
def block_submit(self, node, txs, msg, err_msg, cb_pubkey=None, fees=0, sigops_weight=0, witness=False, accept=False):
# Deplete block of any non-tapscript sigops using a single additional 0-value coinbase output.
# It is not impossible to fit enough tapscript sigops to hit the old 80k limit without
# busting txin-level limits. We simply have to account for the p2pk outputs in all
# transactions.
extra_output_script = CScript([OP_CHECKSIG]*((MAX_BLOCK_SIGOPS_WEIGHT - sigops_weight) // WITNESS_SCALE_FACTOR))
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1, pubkey=cb_pubkey, extra_output_script=extra_output_script, fees=fees), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
block_response = node.submitblock(block.serialize().hex())
if err_msg is not None:
assert block_response is not None and err_msg in block_response, "Missing error message '%s' from block response '%s': %s" % (err_msg, "(None)" if block_response is None else block_response, msg)
if (accept):
assert node.getbestblockhash() == block.hash, "Failed to accept: %s (response: %s)" % (msg, block_response)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert node.getbestblockhash() == self.lastblockhash, "Failed to reject: " + msg
def test_spenders(self, node, spenders, input_counts):
"""Run randomized tests with a number of "spenders".
Steps:
1) Generate an appropriate UTXO for each spender to test spend conditions
2) Generate 100 random addresses of all wallet types: pkh/sh_wpkh/wpkh
3) Select random number of inputs from (1)
4) Select random number of addresses from (2) as outputs
Each spender embodies a test; in a large randomized test, it is verified
that toggling the valid argument to each lambda toggles the validity of
the transaction. This is accomplished by constructing transactions consisting
of all valid inputs, except one invalid one.
"""
# Construct a bunch of sPKs that send coins back to the host wallet
self.log.info("- Constructing addresses for returning coins")
host_spks = []
host_pubkeys = []
for i in range(16):
addr = node.getnewaddress(address_type=random.choice(["legacy", "p2sh-segwit", "bech32"]))
info = node.getaddressinfo(addr)
spk = bytes.fromhex(info['scriptPubKey'])
host_spks.append(spk)
host_pubkeys.append(bytes.fromhex(info['pubkey']))
# Initialize variables used by block_submit().
self.lastblockhash = node.getbestblockhash()
self.tip = int(self.lastblockhash, 16)
block = node.getblock(self.lastblockhash)
self.lastblockheight = block['height']
self.lastblocktime = block['time']
# Create transactions spending up to 50 of the wallet's inputs, with one output for each spender, and
# one change output at the end. The transaction is constructed on the Python side to enable
# having multiple outputs to the same address and outputs with no assigned address. The wallet
# is then asked to sign it through signrawtransactionwithwallet, and then added to a block on the
# Python side (to bypass standardness rules).
self.log.info("- Creating test UTXOs...")
random.shuffle(spenders)
normal_utxos = []
mismatching_utxos = [] # UTXOs with input that requires mismatching output position
done = 0
while done < len(spenders):
# Compute how many UTXOs to create with this transaction
count_this_tx = min(len(spenders) - done, (len(spenders) + 4) // 5, 10000)
fund_tx = CTransaction()
# Add the 50 highest-value inputs
unspents = node.listunspent()
random.shuffle(unspents)
unspents.sort(key=lambda x: int(x["amount"] * 100000000), reverse=True)
if len(unspents) > 50:
unspents = unspents[:50]
random.shuffle(unspents)
balance = 0
for unspent in unspents:
balance += int(unspent["amount"] * 100000000)
txid = int(unspent["txid"], 16)
fund_tx.vin.append(CTxIn(COutPoint(txid, int(unspent["vout"])), CScript()))
# Add outputs
cur_progress = done / len(spenders)
next_progress = (done + count_this_tx) / len(spenders)
change_goal = (1.0 - 0.6 * next_progress) / (1.0 - 0.6 * cur_progress) * balance
self.log.debug("Create %i UTXOs in a transaction spending %i inputs worth %.8f (sending ~%.8f to change)" % (count_this_tx, len(unspents), balance * 0.00000001, change_goal * 0.00000001))
for i in range(count_this_tx):
avg = (balance - change_goal) / (count_this_tx - i)
amount = int(random.randrange(int(avg*0.85 + 0.5), int(avg*1.15 + 0.5)) + 0.5)
balance -= amount
fund_tx.vout.append(CTxOut(amount, spenders[done + i].script))
# Add change
fund_tx.vout.append(CTxOut(balance - 10000, random.choice(host_spks)))
# Ask the wallet to sign
ss = BytesIO(bytes.fromhex(node.signrawtransactionwithwallet(ToHex(fund_tx))["hex"]))
fund_tx.deserialize(ss)
# Construct UTXOData entries
fund_tx.rehash()
for i in range(count_this_tx):
utxodata = UTXOData(outpoint=COutPoint(fund_tx.sha256, i), output=fund_tx.vout[i], spender=spenders[done])
if utxodata.spender.need_vin_vout_mismatch:
mismatching_utxos.append(utxodata)
else:
normal_utxos.append(utxodata)
done += 1
# Mine into a block
self.block_submit(node, [fund_tx], "Funding tx", None, random.choice(host_pubkeys), 10000, MAX_BLOCK_SIGOPS_WEIGHT, True, True)
# Consume groups of choice(input_coins) from utxos in a tx, testing the spenders.
self.log.info("- Running %i spending tests" % done)
random.shuffle(normal_utxos)
random.shuffle(mismatching_utxos)
assert done == len(normal_utxos) + len(mismatching_utxos)
left = done
while left:
# Construct CTransaction with random nVersion, nLocktime
tx = CTransaction()
tx.nVersion = random.choice([1, 2, random.randint(-0x80000000, 0x7fffffff)])
min_sequence = (tx.nVersion != 1 and tx.nVersion != 0) * 0x80000000 # The minimum sequence number to disable relative locktime
if random.choice([True, False]):
tx.nLockTime = random.randrange(LOCKTIME_THRESHOLD, self.lastblocktime - 7200) # all absolute locktimes in the past
else:
tx.nLockTime = random.randrange(self.lastblockheight + 1) # all block heights in the past
# Decide how many UTXOs to test with.
acceptable = [n for n in input_counts if n <= left and (left - n > max(input_counts) or (left - n) in [0] + input_counts)]
num_inputs = random.choice(acceptable)
# If we have UTXOs that require mismatching inputs/outputs left, include exactly one of those
# unless there is only one normal UTXO left (as tests with mismatching UTXOs require at least one
# normal UTXO to go in the first position), and we don't want to run out of normal UTXOs.
input_utxos = []
while len(mismatching_utxos) and (len(input_utxos) == 0 or len(normal_utxos) == 1):
input_utxos.append(mismatching_utxos.pop())
left -= 1
# Top up until we hit num_inputs (but include at least one normal UTXO always).
for _ in range(max(1, num_inputs - len(input_utxos))):
input_utxos.append(normal_utxos.pop())
left -= 1
# The first input cannot require a mismatching output (as there is at least one output).
while True:
random.shuffle(input_utxos)
if not input_utxos[0].spender.need_vin_vout_mismatch:
break
first_mismatch_input = None
for i in range(len(input_utxos)):
if input_utxos[i].spender.need_vin_vout_mismatch:
first_mismatch_input = i
assert first_mismatch_input is None or first_mismatch_input > 0
# Decide fee, and add CTxIns to tx.
amount = sum(utxo.output.nValue for utxo in input_utxos)
fee = min(random.randrange(MIN_FEE * 2, MIN_FEE * 4), amount - DUST_LIMIT) # 10000-20000 sat fee
in_value = amount - fee
tx.vin = [CTxIn(outpoint=utxo.outpoint, nSequence=random.randint(min_sequence, 0xffffffff)) for utxo in input_utxos]
tx.wit.vtxinwit = [CTxInWitness() for _ in range(len(input_utxos))]
sigops_weight = sum(utxo.spender.sigops_weight for utxo in input_utxos)
self.log.debug("Test: %s" % (", ".join(utxo.spender.comment for utxo in input_utxos)))
# Add 1 to 4 random outputs (but constrained by inputs that require mismatching outputs)
num_outputs = random.choice(range(1, 1 + min(4, 4 if first_mismatch_input is None else first_mismatch_input)))
assert in_value >= 0 and fee - num_outputs * DUST_LIMIT >= MIN_FEE
for i in range(num_outputs):
tx.vout.append(CTxOut())
if in_value <= DUST_LIMIT:
tx.vout[-1].nValue = DUST_LIMIT
elif i < num_outputs - 1:
tx.vout[-1].nValue = in_value
else:
tx.vout[-1].nValue = random.randint(DUST_LIMIT, in_value)
in_value -= tx.vout[-1].nValue
tx.vout[-1].scriptPubKey = random.choice(host_spks)
sigops_weight += CScript(tx.vout[-1].scriptPubKey).GetSigOpCount(False) * WITNESS_SCALE_FACTOR
fee += in_value
assert fee >= 0
# Select coinbase pubkey
cb_pubkey = random.choice(host_pubkeys)
sigops_weight += 1 * WITNESS_SCALE_FACTOR
# Precompute one satisfying and one failing scriptSig/witness for each input.
input_data = []
for i in range(len(input_utxos)):
fn = input_utxos[i].spender.sat_function
fail = None
success = fn(tx, i, [utxo.output for utxo in input_utxos], True)
if not input_utxos[i].spender.no_fail:
fail = fn(tx, i, [utxo.output for utxo in input_utxos], False)
input_data.append((fail, success))
if self.options.dump_tests:
dump_json_test(tx, input_utxos, i, success, fail)
# Sign each input incorrectly once on each complete signing pass, except the very last.
for fail_input in list(range(len(input_utxos))) + [None]:
# Skip trying to fail at spending something that can't be made to fail.
if fail_input is not None and input_utxos[fail_input].spender.no_fail:
continue
# Expected message with each input failure, may be None(which is ignored)
expected_fail_msg = None if fail_input is None else input_utxos[fail_input].spender.err_msg
# Fill inputs/witnesses
for i in range(len(input_utxos)):
tx.vin[i].scriptSig = input_data[i][i != fail_input][0]
tx.wit.vtxinwit[i].scriptWitness.stack = input_data[i][i != fail_input][1]
# Submit to mempool to check standardness
is_standard_tx = fail_input is None and all(utxo.spender.is_standard for utxo in input_utxos) and tx.nVersion >= 1 and tx.nVersion <= 2
tx.rehash()
msg = ','.join(utxo.spender.comment + ("*" if n == fail_input else "") for n, utxo in enumerate(input_utxos))
if is_standard_tx:
node.sendrawtransaction(tx.serialize().hex(), 0)
assert node.getmempoolentry(tx.hash) is not None, "Failed to accept into mempool: " + msg
else:
assert_raises_rpc_error(-26, None, node.sendrawtransaction, tx.serialize().hex(), 0)
# Submit in a block
self.block_submit(node, [tx], msg, witness=True, accept=fail_input is None, cb_pubkey=cb_pubkey, fees=fee, sigops_weight=sigops_weight, err_msg=expected_fail_msg)
if (len(spenders) - left) // 200 > (len(spenders) - left - len(input_utxos)) // 200:
self.log.info(" - %i tests done" % (len(spenders) - left))
assert left == 0
assert len(normal_utxos) == 0
assert len(mismatching_utxos) == 0
self.log.info(" - Done")
def run_test(self):
# Post-taproot activation tests go first (pre-taproot tests' blocks are invalid post-taproot).
self.log.info("Post-activation tests...")
self.nodes[1].generate(101)
self.test_spenders(self.nodes[1], spenders_taproot_active(), input_counts=[1, 2, 2, 2, 2, 3])
# Re-connect nodes in case they have been disconnected
self.disconnect_nodes(0, 1)
self.connect_nodes(0, 1)
# Transfer value of the largest 500 coins to pre-taproot node.
addr = self.nodes[0].getnewaddress()
unsp = self.nodes[1].listunspent()
unsp = sorted(unsp, key=lambda i: i['amount'], reverse=True)
unsp = unsp[:500]
rawtx = self.nodes[1].createrawtransaction(
inputs=[{
'txid': i['txid'],
'vout': i['vout']
} for i in unsp],
outputs={addr: sum(i['amount'] for i in unsp)}
)
rawtx = self.nodes[1].signrawtransactionwithwallet(rawtx)['hex']
# Mine a block with the transaction
block = create_block(tmpl=self.nodes[1].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS), txlist=[rawtx])
add_witness_commitment(block)
block.rehash()
block.solve()
assert_equal(None, self.nodes[1].submitblock(block.serialize().hex()))
self.sync_blocks()
# Pre-taproot activation tests.
self.log.info("Pre-activation tests...")
# Run each test twice; once in isolation, and once combined with others. Testing in isolation
# means that the standardness is verified in every test (as combined transactions are only standard
# when all their inputs are standard).
self.test_spenders(self.nodes[0], spenders_taproot_inactive(), input_counts=[1])
self.test_spenders(self.nodes[0], spenders_taproot_inactive(), input_counts=[2, 3])
if __name__ == '__main__':
TaprootTest().main()
| {
"content_hash": "29a605d91b0a41cf53a661be7814d173",
"timestamp": "",
"source": "github",
"line_count": 1479,
"max_line_length": 363,
"avg_line_length": 58.98242055442867,
"alnum_prop": 0.6469307044190978,
"repo_name": "jnewbery/bitcoin",
"id": "6ee2b72c114b351c7cef1faefa1048df73a87db2",
"size": "87489",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test/functional/feature_taproot.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28173"
},
{
"name": "C",
"bytes": "959143"
},
{
"name": "C++",
"bytes": "8134257"
},
{
"name": "CMake",
"bytes": "29132"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "M4",
"bytes": "218255"
},
{
"name": "Makefile",
"bytes": "124030"
},
{
"name": "Objective-C",
"bytes": "113876"
},
{
"name": "Objective-C++",
"bytes": "5497"
},
{
"name": "Python",
"bytes": "2246986"
},
{
"name": "QMake",
"bytes": "798"
},
{
"name": "Sage",
"bytes": "35184"
},
{
"name": "Scheme",
"bytes": "9339"
},
{
"name": "Shell",
"bytes": "166312"
}
],
"symlink_target": ""
} |
scene.run("BehaviorSetCommon.py")
def setupBehaviorSet():
print "Setting up behavior set for gestures..."
#scene.loadAssetsFromPath("behaviorsets/MocapReaching/skeletons")
#scene.loadAssetsFromPath("behaviorsets/MocapReaching/motions")
scene.addAssetPath("script", "behaviorsets/MocapReaching/scripts")
# map the zebra2 skeleton
assetManager = scene.getAssetManager()
motionPath = "behaviorsets/MocapReaching/motions/"
skel = scene.getSkeleton("ChrGarza.sk")
if skel == None:
scene.loadAssetsFromPath("behaviorsets/MocapReaching/skeletons")
scene.run("zebra2-map.py")
zebra2Map = scene.getJointMapManager().getJointMap("zebra2")
garzaSkeleton = scene.getSkeleton("ChrGarza.sk")
zebra2Map.applySkeleton(garzaSkeleton)
mocapRReachMotions = StringVec();
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackFloor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackHigh01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackLow01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumNear01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachForwardFloor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachForwardHigh01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachForwardLow01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachForwardMediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachForwardMediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30Floor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30High01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30Low01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumNear01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60Floor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60High01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60Low01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumNear01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120Floor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120High01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120Low01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumNear01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30Floor01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30High01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30Low01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumFar01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumMid01")
mocapRReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumNear01")
mocapRReachMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Grasp")
mocapRReachMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Reach")
mocapRReachMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Release")
mocapRReachMotions.append("HandsAtSide_RArm_GestureYou")
mocapLReachMotions = StringVec();
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackFloor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackHigh01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackLow01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumNear01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachForwardFloor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachForwardHigh01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachForwardLow01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachForwardMediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachForwardMediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30Floor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30High01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30Low01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumNear01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60Floor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60High01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60Low01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumNear01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120Floor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120High01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120Low01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumNear01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30Floor01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30High01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30Low01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumFar01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumMid01")
mocapLReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumNear01")
mocapLReachMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Grasp")
mocapLReachMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Reach")
mocapLReachMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Release")
mocapLReachMotions.append("HandsAtSide_LArm_GestureYou")
# mirror the right hand motions to the left hand side
for i in range(0,len(mocapRReachMotions)):
motion = scene.getMotion(mocapRReachMotions[i])
if motion == None:
assetManager.loadAsset(motionPath+mocapRReachMotions[i]+'.skm')
motion = scene.getMotion(mocapRReachMotions[i])
#print 'motionName = ' + locoMotions[i]
if motion != None:
motion.setMotionSkeletonName("ChrGarza.sk")
zebra2Map.applyMotion(motion)
mirrorMotion1 = scene.getMotion(mocapRReachMotions[i])
mirrorMotion1.mirror(mocapLReachMotions[i], "ChrGarza.sk")
zebra2Map.applyMotion(scene.getMotion("ChrGarza@IdleStand01"))
def retargetBehaviorSet(charName):
mocapReachMotions = StringVec();
mocapReachMotions.append("ChrGarza@IdleStand01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackFloor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackHigh01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackLow01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachBackMediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachForwardFloor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachForwardHigh01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachForwardLow01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachForwardMediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachForwardMediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30High01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft30MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60High01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachLeft60MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120High01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight120MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30High01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_ReachRight30MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackFloor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackHigh01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackLow01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachBackMediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachForwardFloor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachForwardHigh01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachForwardLow01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachForwardMediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachForwardMediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30High01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft30MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60High01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachLeft60MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120High01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight120MediumNear01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30Floor01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30High01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30Low01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumFar01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumMid01")
mocapReachMotions.append("ChrGarza@IdleStand01_LReachRight30MediumNear01")
grabMotions = StringVec()
grabMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Grasp")
grabMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Grasp")
grabMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Reach")
grabMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Reach")
grabMotions.append("ChrHarmony_Relax001_HandGraspSmSphere_Release")
grabMotions.append("ChrHarmony_Relax001_LHandGraspSmSphere_Release")
grabMotions.append("HandsAtSide_LArm_GestureYou")
grabMotions.append("HandsAtSide_RArm_GestureYou")
#outDir = scene.getMediaPath() + '/retarget/motion/' + skelName + '/';
#print 'outDir = ' + outDir ;
#if not os.path.exists(outDir):
# os.makedirs(outDir)
sbChar = scene.getCharacter(charName)
if sbChar == None:
return
skelName = sbChar.getSkeleton().getName()
createRetargetInstance('ChrGarza.sk', skelName)
assetManager = scene.getAssetManager()
for i in range(0, len(mocapReachMotions)):
sbMotion = assetManager.getMotion(mocapReachMotions[i])
if sbMotion != None:
sbMotion.setMotionSkeletonName('ChrGarza.sk')
for i in range(0, len(grabMotions)):
sbMotion = assetManager.getMotion(grabMotions[i])
if sbMotion != None:
sbMotion.setMotionSkeletonName('common.sk')
# retarget mocap reaching
#for n in range(0, len(mocapReachMotions)):
# motion = scene.getMotion(mocapReachMotions[n])
# if motion is not None:
# retargetMotion(mocapReachMotions[n], 'ChrGarza.sk', skelName, outDir + 'MocapReaching/');
# else:
# print "Cannot find motion " + mocapReachMotions[n] + ", it will be excluded from the reach setup..."
scene.run("init-example-reach-mocap.py")
reachSetup(charName, "KNN", 'ChrGarza.sk', '')
| {
"content_hash": "5179b9aa492a94c918d6fcfdaf0567f3",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 104,
"avg_line_length": 57.31578947368421,
"alnum_prop": 0.8311082856537402,
"repo_name": "gsi-upm/SmartSim",
"id": "1b1df0aa4131707564558a918f84b4f5a07ef349",
"size": "14157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smartbody/data/behaviorsets/BehaviorSetMocapReaching.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11708"
},
{
"name": "C",
"bytes": "941473"
},
{
"name": "C#",
"bytes": "733730"
},
{
"name": "C++",
"bytes": "16389947"
},
{
"name": "CMake",
"bytes": "114424"
},
{
"name": "D",
"bytes": "175403"
},
{
"name": "GLSL",
"bytes": "45459"
},
{
"name": "Groff",
"bytes": "2619"
},
{
"name": "HTML",
"bytes": "1128698"
},
{
"name": "Inno Setup",
"bytes": "8592"
},
{
"name": "Java",
"bytes": "371478"
},
{
"name": "M4",
"bytes": "16806"
},
{
"name": "Makefile",
"bytes": "240549"
},
{
"name": "Objective-C",
"bytes": "4511"
},
{
"name": "Objective-C++",
"bytes": "29141"
},
{
"name": "Pascal",
"bytes": "13551"
},
{
"name": "Protocol Buffer",
"bytes": "3178"
},
{
"name": "Python",
"bytes": "989019"
},
{
"name": "Rust",
"bytes": "105"
},
{
"name": "Shell",
"bytes": "248995"
},
{
"name": "Smalltalk",
"bytes": "1540"
},
{
"name": "Smarty",
"bytes": "179"
},
{
"name": "XSLT",
"bytes": "3925"
}
],
"symlink_target": ""
} |
import time
from dtest import Tester
from tools.decorators import since
@since("1.2")
class TestCQL(Tester):
def prepare(self):
cluster = self.cluster
cluster.populate(1).start()
node1 = cluster.nodelist()[0]
time.sleep(0.2)
session = self.patient_cql_connection(node1)
self.create_ks(session, 'ks', 1)
return session
def batch_preparation_test(self):
""" Test preparation of batch statement (#4202) """
session = self.prepare()
session.execute("""
CREATE TABLE cf (
k varchar PRIMARY KEY,
c int,
)
""")
query = "BEGIN BATCH INSERT INTO cf (k, c) VALUES (?, ?); APPLY BATCH"
pq = session.prepare(query)
session.execute(pq, ['foo', 4])
| {
"content_hash": "b2e49700725468427238a3492c1bdb97",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 78,
"avg_line_length": 23.457142857142856,
"alnum_prop": 0.5566382460414129,
"repo_name": "carlyeks/cassandra-dtest",
"id": "2deab8328166801c3d34fa58cbaadb03ce4a7294",
"size": "821",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cql_prepared_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2204314"
},
{
"name": "Shell",
"bytes": "2035"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.