gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for invite related views.
"""
from soc.logic.exceptions import BadRequest
from soc.modules.gci.models.request import GCIRequest
from tests.profile_utils import GCIProfileHelper
from tests.test_utils import GCIDjangoTestCase
from tests.utils.invite_utils import GCIInviteHelper
class BaseInviteTest(GCIDjangoTestCase):
"""Base class for invite tests.
"""
def _invitee(self):
invitee_data = GCIProfileHelper(self.gci, self.dev_test)
invitee_data.createOtherUser('invitee@example.com')
invitee_data.createProfile()
invitee_data.notificationSettings(new_invites=True)
return invitee_data.profile
class InviteViewTest(BaseInviteTest):
"""Tests user invite views.
"""
def setUp(self):
super(InviteViewTest, self).setUp()
self.init()
def assertInviteTemplatesUsed(self, response):
"""Asserts that all the templates are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/invite/base.html')
def testLoggedInCannotInvite(self):
url = self._inviteMentorUrl()
response = self.get(url)
self.assertResponseForbidden(response)
url = self._inviteOrgAdminUrl()
response = self.get(url)
self.assertResponseForbidden(response)
def testUserCannotInvite(self):
self.data.createUser()
url = self._inviteMentorUrl()
response = self.get(url)
self.assertResponseForbidden(response)
url = self._inviteOrgAdminUrl()
response = self.get(url)
self.assertResponseForbidden(response)
def testMentorCannotInvite(self):
self.data.createMentor(self.org)
url = self._inviteMentorUrl()
response = self.get(url)
self.assertResponseForbidden(response)
url = self._inviteOrgAdminUrl()
response = self.get(url)
self.assertResponseForbidden(response)
def testOrgAdminCanInvite(self):
self.data.createOrgAdmin(self.org)
url = self._inviteMentorUrl()
response = self.get(url)
self.assertInviteTemplatesUsed(response)
url = self._inviteOrgAdminUrl()
response = self.get(url)
self.assertInviteTemplatesUsed(response)
def testInviteOrgAdmin(self):
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
post_data = {
'identifiers': invitee.user.link_id,
}
response = self.post(self._inviteOrgAdminUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().get()
self.assertPropertiesEqual(self._defaultOrgAdminInviteProperties(), invite)
def testInviteMentor(self):
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
post_data = {
'identifiers': invitee.user.link_id,
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().get()
self.assertPropertiesEqual(self._defaultMentorInviteProperties(), invite)
def testSecondInviteForbidden(self):
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
GCIInviteHelper().createMentorInvite(self.org, invitee.user)
post_data = {
'identifiers': invitee.user.link_id,
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseOK(response)
self._assertFormValidationError(response, 'identifiers')
def testOrgAdminInviteAfterMentorInvite(self):
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
GCIInviteHelper().createMentorInvite(self.org, invitee.user)
post_data = {
'identifiers': invitee.user.link_id,
}
response = self.post(self._inviteOrgAdminUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().filter('role =', 'org_admin').get()
self.assertPropertiesEqual(self._defaultOrgAdminInviteProperties(), invite)
def testMentorInviteAfterOrgAdminInvite(self):
# TODO(dhans): this test should fail in the future:
# a existing mentor invite should be extended to become org_admin one
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
GCIInviteHelper().createOrgAdminInvite(self.org, invitee.user)
post_data = {
'identifiers': invitee.user.link_id,
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().filter('role =', 'mentor').get()
self.assertPropertiesEqual(self._defaultMentorInviteProperties(), invite)
def testInviteByEmailAddress(self):
self.data.createOrgAdmin(self.org)
self._invitee()
post_data = {
'identifiers': 'invitee@example.com'
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().get()
self.assertPropertiesEqual(self._defaultMentorInviteProperties(), invite)
def testInviteByEmailAddressNotExistingUserForbidden(self):
self.data.createOrgAdmin(self.org)
post_data = {
'identifiers': 'invitee@example.com'
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseOK(response)
self._assertFormValidationError(response, 'identifiers')
def testInviteByEmailAndUsername(self):
# TODO(dhans): in the perfect world, only one invite should be sent
self.data.createOrgAdmin(self.org)
invitee = self._invitee()
post_data = {
'identifiers': 'invitee@example.com, %s' % invitee.user.link_id
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseRedirect(response,
'/gci/dashboard/%s' % self.gci.key().name())
invite = GCIRequest.all().fetch(10)
self.assertEqual(len(invite), 2)
# we would prefer self.assertEqual(len(invite), 1)
def testInviteByInvalidEmailAddressForbidden(self):
self.data.createOrgAdmin(self.org)
self._testInviteInvalidEmailAddress('@example.com')
self._testInviteInvalidEmailAddress('John Smith @example.com')
self._testInviteInvalidEmailAddress('test@example')
self._testInviteInvalidEmailAddress('test@example.com>')
def _testInviteInvalidEmailAddress(self, email):
post_data = {
'identifiers': email
}
response = self.post(self._inviteMentorUrl(), post_data)
self.assertResponseOK(response)
self._assertFormValidationError(response, 'identifiers')
def _invitee(self):
invitee_data = GCIProfileHelper(self.gci, self.dev_test)
invitee_data.createOtherUser('invitee@example.com')
invitee_data.createProfile()
invitee_data.notificationSettings(new_invites=True)
return invitee_data.profile
def _inviteOrgAdminUrl(self):
return '/gci/invite/org_admin/%s' % self.org.key().name()
def _inviteMentorUrl(self):
return '/gci/invite/mentor/%s' % self.org.key().name()
def _defaultMentorInviteProperties(self):
properties = self._defaultInviteProperties()
properties['role'] = 'mentor'
return properties
def _defaultOrgAdminInviteProperties(self):
properties = self._defaultInviteProperties()
properties['role'] = 'org_admin'
return properties
def _defaultInviteProperties(self):
return {
'org': self.org,
'type': 'Invitation',
'status': 'pending',
}
def _assertFormValidationError(self, response, error_field):
assert response.context['form'].errors.get(error_field) is not None
class ManageInviteTest(BaseInviteTest):
"""Tests for Manage Invite views.
"""
def setUp(self):
super(ManageInviteTest, self).setUp()
self.init()
self.invitee = self._invitee()
self.invite = GCIInviteHelper().createOrgAdminInvite(
self.org, self.invitee.user)
def assertInviteTemplatesUsed(self, response):
"""Asserts that all the templates are used.
"""
self.assertGCITemplatesUsed(response)
self.assertTemplateUsed(response, 'v2/modules/gci/invite/base.html')
def testLoggedInCannotManageInvite(self):
url = self._manageInviteUrl(self.invite)
response = self.get(url)
self.assertResponseForbidden(response)
def testUserCannotManageInvite(self):
self.data.createUser()
url = self._manageInviteUrl(self.invite)
response = self.get(url)
self.assertResponseForbidden(response)
def testMentorCannotManageInvite(self):
self.data.createMentor(self.org)
url = self._manageInviteUrl(self.invite)
response = self.get(url)
self.assertResponseForbidden(response)
def testOrgAdminCanInvite(self):
self.data.createOrgAdmin(self.org)
url = self._manageInviteUrl(self.invite)
response = self.get(url)
self.assertInviteTemplatesUsed(response)
def testInvalidPostDataForbidden(self):
self.data.createOrgAdmin(self.org)
# empty post data
post_data = {}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseCode(response, BadRequest.status)
# only invalid data
post_data = {
'invalid_field': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseCode(response, BadRequest.status)
def testWithdrawInvite(self):
self.data.createOrgAdmin(self.org)
post_data = {
'withdraw': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseRedirect(response, self._manageInviteUrl(self.invite))
new_invite = GCIRequest.all().get()
self.assertTrue(new_invite.status == 'withdrawn')
def testPendingInviteCannotBeResubmitted(self):
self.data.createOrgAdmin(self.org)
post_data = {
'resubmit': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseForbidden(response)
def testResubmitInvite(self):
self.data.createOrgAdmin(self.org)
self.invite.status = 'withdrawn'
self.invite.put()
post_data = {
'resubmit': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseRedirect(response, self._manageInviteUrl(self.invite))
new_invite = GCIRequest.all().get()
self.assertTrue(new_invite.status == 'pending')
def testAcceptedInviteCannotBeManaged(self):
self.data.createOrgAdmin(self.org)
self.invite.status = 'accepted'
self.invite.put()
post_data = {
'resubmit': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseForbidden(response)
post_data = {
'withdraw': ''
}
response = self.post(self._manageInviteUrl(self.invite), post_data)
self.assertResponseForbidden(response)
def _manageInviteUrl(self, invite):
return '/gci/invite/manage/%s/%s' % (
self.invite.org.scope.key().name(), self.invite.key().id())
|
|
import gffutils
import tempfile
import os
import random
import gzip
from bcbio.utils import file_exists
from bcbio.distributed.transaction import file_transaction
from bcbio.provenance import do
from bcbio.log import logger
def guess_infer_extent(gtf_file):
"""
guess if we need to use the gene extent option when making a gffutils
database by making a tiny database of 1000 lines from the original
GTF and looking for all of the features
"""
_, ext = os.path.splitext(gtf_file)
tmp_out = tempfile.NamedTemporaryFile(suffix=".gtf", delete=False).name
with open(tmp_out, "w") as out_handle:
count = 0
in_handle = open(gtf_file) if ext != ".gz" else gzip.open(gtf_file)
for line in in_handle:
if count > 1000:
break
out_handle.write(line)
count += 1
in_handle.close()
db = gffutils.create_db(tmp_out, dbfn=":memory:", infer_gene_extent=False)
os.remove(tmp_out)
features = [x for x in db.featuretypes()]
if "gene" in features and "transcript" in features:
return False
else:
return True
def get_gtf_db(gtf, in_memory=False):
"""
create a gffutils DB
"""
db_file = gtf + ".db"
if file_exists(db_file):
return gffutils.FeatureDB(db_file)
db_file = ":memory:" if in_memory else db_file
if in_memory or not file_exists(db_file):
infer_extent = guess_infer_extent(gtf)
db = gffutils.create_db(gtf, dbfn=db_file,
infer_gene_extent=infer_extent)
if in_memory:
return db
else:
return gffutils.FeatureDB(db_file)
def gtf_to_bed(gtf, alt_out_dir=None):
"""
create a BED file of transcript-level features with attached gene name
or gene ids
"""
out_file = os.path.splitext(gtf)[0] + ".bed"
if file_exists(out_file):
return out_file
if not os.access(os.path.dirname(out_file), os.W_OK | os.X_OK):
if not alt_out_dir:
raise IOError("Cannot write transcript BED output file %s" % out_file)
else:
out_file = os.path.join(alt_out_dir, os.path.basename(out_file))
with open(out_file, "w") as out_handle:
db = get_gtf_db(gtf)
for feature in db.features_of_type('transcript', order_by=("seqid", "start", "end")):
chrom = feature.chrom
start = feature.start
end = feature.end
attributes = feature.attributes.keys()
strand = feature.strand
name = (feature['gene_name'][0] if 'gene_name' in attributes else
feature['gene_id'][0])
line = "\t".join([str(x) for x in [chrom, start, end, name, ".",
strand]])
out_handle.write(line + "\n")
return out_file
def complete_features(db):
"""
iterator returning features which are complete (have a 'gene_id' and a
'transcript_id')
"""
for feature in db.all_features():
gene_id = feature.attributes.get('gene_id', [None])[0]
transcript_id = feature.attributes.get('transcript_id', [None])[0]
if gene_id and transcript_id and feature.featuretype != "transcript":
yield feature
def gtf_to_fasta(gtf_file, ref_fasta, cds=False, out_file=None):
"""
convert a GTF to FASTA format if cds=True, use the start/stop codons
to output only the CDS
handles malformed FASTA files where a single transcript is repeated multiple
times by just using the first one
"""
if out_file and file_exists(out_file):
return out_file
if not out_file:
out_file = tempfile.NamedTemporaryFile(delete=False, suffix=".fa").name
tmp_file = out_file + ".tmp"
if cds:
cmd = "gffread -g {ref_fasta} -x {tx_tmp_file} {gtf_file}"
else:
cmd = "gffread -g {ref_fasta} -w {tx_tmp_file} {gtf_file}"
message = "Converting %s to FASTA format." % gtf_file
with file_transaction(tmp_file) as tx_tmp_file:
do.run(cmd.format(**locals()), message)
transcript = ""
skipping = False
with file_transaction(out_file) as tx_out_file:
with open(tmp_file) as in_handle, open(tx_out_file, "w") as out_handle:
for line in in_handle:
if line.startswith(">"):
cur_transcript = line.split(" ")[0][1:]
if transcript == cur_transcript:
logger.info("Transcript %s has already been seen, skipping this "
"version." % cur_transcript)
skipping = True
else:
transcript = cur_transcript
skipping = False
line = ">" + transcript + "\n"
if not skipping:
out_handle.write(line)
os.remove(tmp_file)
return out_file
def partition_gtf(gtf, coding=False, out_file=False):
"""
return a GTF file of all non-coding or coding transcripts. the GTF must be annotated
with gene_biotype = "protein_coding" or to have the source column set to the
biotype for all coding transcripts. set coding to
True to get only the coding, false to get only the non-coding
"""
if out_file and file_exists(out_file):
return out_file
if not out_file:
out_file = tempfile.NamedTemporaryFile(delete=False,
suffix=".gtf").name
if coding:
pred = lambda biotype: biotype and biotype == "protein_coding"
else:
pred = lambda biotype: biotype and biotype != "protein_coding"
biotype_lookup = _biotype_lookup_fn(gtf)
db = get_gtf_db(gtf)
with file_transaction(out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for feature in db.all_features():
biotype = biotype_lookup(feature)
if pred(biotype):
out_handle.write(str(feature) + "\n")
return out_file
def split_gtf(gtf, sample_size=None, out_dir=None):
"""
split a GTF file into two equal parts, randomly selecting genes.
sample_size will select up to sample_size genes in total
"""
if out_dir:
part1_fn = os.path.basename(os.path.splitext(gtf)[0]) + ".part1.gtf"
part2_fn = os.path.basename(os.path.splitext(gtf)[0]) + ".part2.gtf"
part1 = os.path.join(out_dir, part1_fn)
part2 = os.path.join(out_dir, part2_fn)
if file_exists(part1) and file_exists(part2):
return part1, part2
else:
part1 = tempfile.NamedTemporaryFile(delete=False, suffix=".part1.gtf").name
part2 = tempfile.NamedTemporaryFile(delete=False, suffix=".part2.gtf").name
db = get_gtf_db(gtf)
gene_ids = set([x['gene_id'][0] for x in db.all_features()])
if not sample_size or (sample_size and sample_size > len(gene_ids)):
sample_size = len(gene_ids)
gene_ids = set(random.sample(gene_ids, sample_size))
part1_ids = set(random.sample(gene_ids, sample_size / 2))
part2_ids = gene_ids.difference(part1_ids)
with open(part1, "w") as part1_handle:
for gene in part1_ids:
for feature in db.children(gene):
part1_handle.write(str(feature) + "\n")
with open(part2, "w") as part2_handle:
for gene in part2_ids:
for feature in db.children(gene):
part2_handle.write(str(feature) + "\n")
return part1, part2
def get_coding_noncoding_transcript_ids(gtf):
"""
return a set of coding and non-coding transcript_ids from a GTF
"""
coding_gtf = partition_gtf(gtf, coding=True)
coding_db = get_gtf_db(coding_gtf)
coding_ids = set([x['transcript_id'][0] for x in coding_db.all_features()
if 'transcript_id' in x.attributes])
noncoding_gtf = partition_gtf(gtf)
noncoding_db = get_gtf_db(noncoding_gtf)
noncoding_ids = set([x['transcript_id'][0] for x in noncoding_db.all_features()
if 'transcript_id' in x.attributes])
return coding_ids, noncoding_ids
def get_gene_source_set(gtf):
"""
get a dictionary of the set of all sources for a gene
"""
gene_to_source = {}
db = get_gtf_db(gtf)
for feature in complete_features(db):
gene_id = feature['gene_id'][0]
sources = gene_to_source.get(gene_id, set([])).union(set([feature.source]))
gene_to_source[gene_id] = sources
return gene_to_source
def get_transcript_source_set(gtf):
"""
get a dictionary of the set of all sources of the gene for a given
transcript
"""
gene_to_source = get_gene_source_set(gtf)
transcript_to_source = {}
db = get_gtf_db(gtf)
for feature in complete_features(db):
gene_id = feature['gene_id'][0]
transcript_to_source[feature['transcript_id'][0]] = gene_to_source[gene_id]
return transcript_to_source
def get_rRNA(gtf):
"""
extract rRNA genes and transcripts from a gtf file
"""
rRNA_biotypes = ["rRNA", "Mt_rRNA", "tRNA", "MT_tRNA"]
db = get_gtf_db(gtf)
biotype_lookup = _biotype_lookup_fn(gtf)
features = []
if not biotype_lookup:
return None
for feature in db.features_of_type("transcript"):
biotype = biotype_lookup(feature)
if biotype in rRNA_biotypes:
features.append((feature['gene_id'][0], feature['transcript_id'][0]))
return features
def _biotype_lookup_fn(gtf):
"""
return a function that will look up the biotype of a feature
this checks for either gene_biotype or biotype being set or for the source
column to have biotype information
"""
db = get_gtf_db(gtf)
sources = set([feature.source for feature in db.all_features()])
gene_biotypes = set([feature.attributes.get("gene_biotype", [None])[0]
for feature in db.all_features()])
biotypes = set([feature.attributes.get("biotype", [None])[0]
for feature in db.all_features()])
if "protein_coding" in sources:
return lambda feature: feature.source
elif "protein_coding" in biotypes:
return lambda feature: feature.attributes.get("biotype", [None])[0]
elif "protein_coding" in gene_biotypes:
return lambda feature: feature.attributes.get("gene_biotype", [None])[0]
else:
return None
def tx2genefile(gtf, out_file=None):
"""
write out a file of transcript->gene mappings.
use the installed tx2gene.csv if it exists, else write a new one out
"""
installed_tx2gene = os.path.join(os.path.dirname(gtf), "tx2gene.csv")
if file_exists(installed_tx2gene):
return installed_tx2gene
if file_exists(out_file):
return out_file
with file_transaction(out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for k, v in transcript_to_gene(gtf).iteritems():
out_handle.write(",".join([k, v]) + "\n")
return out_file
def transcript_to_gene(gtf):
"""
return a dictionary keyed by transcript_id of the associated gene_id
"""
gene_lookup = {}
for feature in complete_features(get_gtf_db(gtf)):
gene_id = feature.attributes.get('gene_id', [None])[0]
transcript_id = feature.attributes.get('transcript_id', [None])[0]
gene_lookup[transcript_id] = gene_id
return gene_lookup
def is_qualimap_compatible(gtf):
"""
Qualimap needs a very specific GTF format or it fails, so skip it if
the GTF is not in that format
"""
if not gtf:
return False
db = get_gtf_db(gtf)
def qualimap_compatible(feature):
gene_id = feature.attributes.get('gene_id', [None])[0]
transcript_id = feature.attributes.get('transcript_id', [None])[0]
exon_number = feature.attributes.get('exon_number', [None])[0]
gene_biotype = feature.attributes.get('gene_biotype', [None])[0]
return gene_id and transcript_id and exon_number and gene_biotype
for feature in db.all_features():
if qualimap_compatible(feature):
return True
return False
def canonical_transcripts(gtf, out_file):
"""
given a GTF file, produce a new GTF file with only the longest transcript
for each gene
function lifted from:
https://pythonhosted.org/gffutils/_modules/gffutils/helpers.html
"""
if file_exists(out_file):
return out_file
db = get_gtf_db(gtf)
with file_transaction(out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for gene in db.features_of_type('gene'):
exon_list = []
for ti, transcript in enumerate(db.children(gene, level=1)):
cds_len = 0
total_len = 0
exons = list(db.children(transcript, level=1))
for exon in exons:
exon_length = len(exon)
if exon.featuretype == 'CDS':
cds_len += exon_length
total_len += exon_length
exon_list.append((cds_len, total_len, transcript, exons))
# If we have CDS, then use the longest coding transcript
if max(i[0] for i in exon_list) > 0:
best = sorted(exon_list)[0]
# Otherwise, just choose the longest
else:
best = sorted(exon_list, key=lambda x: x[1])[0]
for exon in db.children(best[2], level=1):
out_handle.write(str(exon) + "\n")
return out_file
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
import unittest
from grit.format.policy_templates import policy_template_generator
from grit.format.policy_templates.writers import mock_writer
from grit.format.policy_templates.writers import template_writer
class PolicyTemplateGeneratorUnittest(unittest.TestCase):
'''Unit tests for policy_template_generator.py.'''
def do_test(self, policy_data, writer):
'''Executes a test case.
Creates and invokes an instance of PolicyTemplateGenerator with
the given arguments.
Notice: Plain comments are used in test methods instead of docstrings,
so that method names do not get overridden by the docstrings in the
test output.
Args:
policy_data: The list of policies and groups as it would be
loaded from policy_templates.json.
writer: A writer used for this test. It is usually derived from
mock_writer.MockWriter.
'''
writer.tester = self
config = {
'app_name': '_app_name',
'frame_name': '_frame_name',
'os_name': '_os_name',
}
if not 'messages' in policy_data:
policy_data['messages'] = {}
if not 'placeholders' in policy_data:
policy_data['placeholders'] = []
if not 'policy_definitions' in policy_data:
policy_data['policy_definitions'] = []
policy_generator = policy_template_generator.PolicyTemplateGenerator(
config,
policy_data)
res = policy_generator.GetTemplateText(writer)
writer.Test()
return res
def testSequence(self):
# Test the sequence of invoking the basic PolicyWriter operations,
# in case of empty input data structures.
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self):
self.log = 'init;'
def Init(self):
self.log += 'prepare;'
def BeginTemplate(self):
self.log += 'begin;'
def EndTemplate(self):
self.log += 'end;'
def GetTemplateText(self):
self.log += 'get_text;'
return 'writer_result_string'
def Test(self):
self.tester.assertEquals(self.log,
'init;prepare;begin;end;get_text;')
result = self.do_test({}, LocalMockWriter())
self.assertEquals(result, 'writer_result_string')
def testEmptyGroups(self):
# Test that empty policy groups are not passed to the writer.
policies_mock = {
'policy_definitions': [
{'name': 'Group1', 'type': 'group', 'policies': [],
'desc': '', 'caption': ''},
{'name': 'Group2', 'type': 'group', 'policies': [],
'desc': '', 'caption': ''},
{'name': 'Group3', 'type': 'group', 'policies': [],
'desc': '', 'caption': ''},
]
}
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self):
self.log = ''
def BeginPolicyGroup(self, group):
self.log += '['
def EndPolicyGroup(self):
self.log += ']'
def Test(self):
self.tester.assertEquals(self.log, '')
self.do_test(policies_mock, LocalMockWriter())
def testGroups(self):
# Test that policy groups are passed to the writer in the correct order.
policies_mock = {
'policy_definitions': [
{
'name': 'Group1', 'type': 'group',
'caption': '', 'desc': '',
'policies': [{'name': 'TAG1', 'type': 'mock', 'supported_on': [],
'caption': '', 'desc': ''}]
},
{
'name': 'Group2', 'type': 'group',
'caption': '', 'desc': '',
'policies': [{'name': 'TAG2', 'type': 'mock', 'supported_on': [],
'caption': '', 'desc': ''}]
},
{
'name': 'Group3', 'type': 'group',
'caption': '', 'desc': '',
'policies': [{'name': 'TAG3', 'type': 'mock', 'supported_on': [],
'caption': '', 'desc': ''}]
},
]
}
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self):
self.log = ''
def BeginPolicyGroup(self, group):
self.log += '[' + group['policies'][0]['name']
def EndPolicyGroup(self):
self.log += ']'
def Test(self):
self.tester.assertEquals(self.log, '[TAG1][TAG2][TAG3]')
self.do_test(policies_mock, LocalMockWriter())
def testPolicies(self):
# Test that policies are passed to the writer in the correct order.
policy_defs_mock = {
'policy_definitions': [
{
'name': 'Group1',
'type': 'group',
'caption': '',
'desc': '',
'policies': [
{'name': 'Group1Policy1', 'type': 'string', 'supported_on': [],
'caption': '', 'desc': ''},
{'name': 'Group1Policy2', 'type': 'string', 'supported_on': [],
'caption': '', 'desc': ''},
]
},
{
'name': 'Group2',
'type': 'group',
'caption': '',
'desc': '',
'policies': [
{'name': 'Group2Policy3', 'type': 'string', 'supported_on': [],
'caption': '', 'desc': ''},
]
}
]
}
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self):
self.policy_name = None
self.policy_list = []
def BeginPolicyGroup(self, group):
self.group = group;
def EndPolicyGroup(self):
self.group = None
def WritePolicy(self, policy):
self.tester.assertEquals(policy['name'][0:6], self.group['name'])
self.policy_list.append(policy['name'])
def Test(self):
self.tester.assertEquals(
self.policy_list,
['Group1Policy1', 'Group1Policy2', 'Group2Policy3'])
self.do_test( policy_defs_mock, LocalMockWriter())
def testPolicyTexts(self):
# Test that GUI messages of policies all get placeholders replaced.
policy_data_mock = {
'policy_definitions': [
{
'name': 'Group1',
'type': 'group',
'desc': '',
'caption': '',
'policies': [
{
'name': 'Policy1',
'caption': '1. app_name -- $1',
'label': '2. os_name -- $2',
'desc': '3. frame_name -- $3',
'type': 'string',
'supported_on': []
},
]
}
]
}
class LocalMockWriter(mock_writer.MockWriter):
def WritePolicy(self, policy):
if policy['name'] == 'Policy1':
self.tester.assertEquals(policy['caption'],
'1. app_name -- _app_name')
self.tester.assertEquals(policy['label'],
'2. os_name -- _os_name')
self.tester.assertEquals(policy['desc'],
'3. frame_name -- _frame_name')
elif policy['name'] == 'Group1':
pass
else:
self.tester.fail()
self.do_test(policy_data_mock, LocalMockWriter())
def testIntEnumTexts(self):
# Test that GUI messages are assigned correctly to int-enums
# (aka dropdown menus).
policy_defs_mock = {
'policy_definitions': [{
'name': 'Policy1',
'type': 'int-enum',
'caption': '', 'desc': '',
'supported_on': [],
'items': [
{'name': 'item1', 'value': 0, 'caption': 'string1', 'desc': ''},
{'name': 'item2', 'value': 1, 'caption': 'string2', 'desc': ''},
{'name': 'item3', 'value': 3, 'caption': 'string3', 'desc': ''},
]
}]
}
class LocalMockWriter(mock_writer.MockWriter):
def WritePolicy(self, policy):
self.tester.assertEquals(policy['items'][0]['caption'], 'string1')
self.tester.assertEquals(policy['items'][1]['caption'], 'string2')
self.tester.assertEquals(policy['items'][2]['caption'], 'string3')
self.do_test(policy_defs_mock, LocalMockWriter())
def testStringEnumTexts(self):
# Test that GUI messages are assigned correctly to string-enums
# (aka dropdown menus).
policy_data_mock = {
'policy_definitions': [{
'name': 'Policy1',
'type': 'string-enum',
'caption': '', 'desc': '',
'supported_on': [],
'items': [
{'name': 'item1', 'value': 'one', 'caption': 'string1', 'desc': ''},
{'name': 'item2', 'value': 'two', 'caption': 'string2', 'desc': ''},
{'name': 'item3', 'value': 'three', 'caption': 'string3', 'desc': ''},
]
}]
}
class LocalMockWriter(mock_writer.MockWriter):
def WritePolicy(self, policy):
self.tester.assertEquals(policy['items'][0]['caption'], 'string1')
self.tester.assertEquals(policy['items'][1]['caption'], 'string2')
self.tester.assertEquals(policy['items'][2]['caption'], 'string3')
self.do_test(policy_data_mock, LocalMockWriter())
def testPolicyFiltering(self):
# Test that policies are filtered correctly based on their annotations.
policy_data_mock = {
'policy_definitions': [
{
'name': 'Group1',
'type': 'group',
'caption': '',
'desc': '',
'policies': [
{
'name': 'Group1Policy1',
'type': 'string',
'caption': '',
'desc': '',
'supported_on': [
'chrome.aaa:8-', 'chrome.bbb:8-', 'chrome.ccc:8-'
]
},
{
'name': 'Group1Policy2',
'type': 'string',
'caption': '',
'desc': '',
'supported_on': ['chrome.ddd:8-']
},
]
}, {
'name': 'Group2',
'type': 'group',
'caption': '',
'desc': '',
'policies': [
{
'name': 'Group2Policy3',
'type': 'string',
'caption': '',
'desc': '',
'supported_on': ['chrome.eee:8-']
},
]
}, {
'name': 'SinglePolicy',
'type': 'int',
'caption': '',
'desc': '',
'supported_on': ['chrome.eee:8-']
}
]
}
# This writer accumulates the list of policies it is asked to write.
# This list is stored in the result_list member variable and can
# be used later for assertions.
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self, platforms):
self.platforms = platforms
self.policy_name = None
self.result_list = []
def BeginPolicyGroup(self, group):
self.group = group;
self.result_list.append('begin_' + group['name'])
def EndPolicyGroup(self):
self.result_list.append('end_group')
self.group = None
def WritePolicy(self, policy):
self.result_list.append(policy['name'])
def IsPolicySupported(self, policy):
# Call the original (non-mock) implementation of this method.
return template_writer.TemplateWriter.IsPolicySupported(self, policy)
local_mock_writer = LocalMockWriter(['eee'])
self.do_test(policy_data_mock, local_mock_writer)
# Test that only policies of platform 'eee' were written:
self.assertEquals(
local_mock_writer.result_list,
['begin_Group2', 'Group2Policy3', 'end_group', 'SinglePolicy'])
local_mock_writer = LocalMockWriter(['ddd', 'bbb'])
self.do_test(policy_data_mock, local_mock_writer)
# Test that only policies of platforms 'ddd' and 'bbb' were written:
self.assertEquals(
local_mock_writer.result_list,
['begin_Group1', 'Group1Policy1', 'Group1Policy2', 'end_group'])
def testSortingInvoked(self):
# Tests that policy-sorting happens before passing policies to the writer.
policy_data = {
'policy_definitions': [
{'name': 'zp', 'type': 'string', 'supported_on': [],
'caption': '', 'desc': ''},
{'name': 'ap', 'type': 'string', 'supported_on': [],
'caption': '', 'desc': ''},
]
}
class LocalMockWriter(mock_writer.MockWriter):
def __init__(self):
self.result_list = []
def WritePolicy(self, policy):
self.result_list.append(policy['name'])
def Test(self):
self.tester.assertEquals(
self.result_list,
['ap', 'zp'])
self.do_test(policy_data, LocalMockWriter())
if __name__ == '__main__':
unittest.main()
|
|
"""
This module contains a class that does the though work of chunk / unchunking html.
You may look at :py:class:`.boner.HtmlBoner` if you are looking for a more usable class.
"""
import copy
from collections import namedtuple
from lxml import html
#: a sentence, a list of semantic elts to insert inside sentence, the original parent
TextChunk = namedtuple("TextChunk", "text elts parent istail")
#: skip chunk are chunk of html that does not contains sentences
SkipChunk = namedtuple("SkipChunk", "parent")
#: a semantic elt to insert in a sentence at a specific position
Elt = namedtuple("Elt", "elt start end")
def _s(s):
"""return an empty sentence for None
"""
return "" if s is None else s
class HtmlChunker:
"""This utility chunk an html in text chunk, retaining the tags.
You can then re-inject the text in html, while eg. adding a specific class.
"""
#: tags that do not split text. Instead we will retain their position for later construction
#: (cf. https://developer.mozilla.org/en-US/docs/Web/HTML/Element#Inline_text_semantics)
#: but we removed br, as it generally means a different sentence.
TEXT_SEMANTICS = {
"a", "abbr", "b", "bdi", "bdo", "cite", "code", "data", "dfn", "em", "i", "kbd",
"mark", "q", "rp", "rt", "rtc", "ruby", "s", "samp", "small", "span", "strong", "sub",
"sup", "time", "u", "var", "wbr"}
#: we won't search text inside those tags
SKIPPED_TAGS = {
"head", "script", "noscript", "style", "canvas"}
def is_semantic(self, element):
""":return bool: true if element is a "semantic element" (does not split text).
"""
return (
element.tag in self.TEXT_SEMANTICS and
# verify children are also semantics
all(self.is_semantic(e) for e in element))
def shall_skip(self, element):
"""compute if element shall be skipped
"""
return (
element.tag in self.SKIPPED_TAGS or
isinstance(element, html.HtmlComment) or
not bool(element.text_content().strip()))
def _process_semantic_child(self, element, start):
"""process children of elemets which are of type semantic.
This use recursivity.
:param element: the parent html element
:param int start: the actual position in text
:return tuple: the retrieved text
and a list of Elt, semantic sub element with their points of insertion
"""
text = _s(element.text)
elts = []
for child in element:
subtext, subelts = self._process_semantic_child(child, start + len(text))
text += subtext
elts.extend(subelts)
elts = [Elt(element, start, start + len(text))] + elts
text += _s(element.tail)
return text, elts
def chunk_tree(self, tree, tail=None):
"""harvest sentences in html keeping its structure
use recursivity.
:param tree: the html element being processed
:param tail: a list of semantic elements that are part of the tail of this element
:return list: a list of :py:class:`TextChunk`
"""
if tail is None:
tail = []
chunks = []
if self.shall_skip(tree):
chunks.append(SkipChunk(tree))
else:
# get text and children with their additional tail
text = _s(tree.text)
semantics = []
children = []
tails = []
for child in tree:
if self.is_semantic(child):
if not children:
# text before first children, so it's like element text
sub_text, sub_elts = self._process_semantic_child(child, len(text))
text += sub_text
semantics.extend(sub_elts)
else:
# add to last seen children tail
tails[-1].append(child)
else:
# process later
children.append(child)
tails.append([])
# make my text
chunks.append(TextChunk(text, semantics, tree, False))
# add children sentences
for child, child_tail in zip(children, tails):
chunks.extend(self.chunk_tree(child, child_tail))
# process my tail
if tree.tail or tail:
text = _s(tree.tail)
semantics = []
for child in tail:
sub_text, sub_elts = self._process_semantic_child(child, len(text))
text += sub_text
semantics.extend(sub_elts)
chunks.append(TextChunk(text, semantics, tree, True))
return chunks
def _crossing_semantics(self, start, end, semantics):
"""from a list of html semantic elements,
return the one that are included between start and end
:param int start: start of interval to consider
:param int end: end of interval
:param semantics: list of Elt
:return: list of Elt that are to consider for interval, shifted from start
"""
for s in semantics:
elt = None
if s.start >= start and s.end <= end:
elt = s # included
elif s.start <= start and s.end > start:
elt = Elt(s.elt, start, min(end, s.end)) # crossing lower
elif s.end >= end and s.start < end:
elt = Elt(s.elt, max(start, s.start), end) # crossing upper
if elt is not None:
# shift to have start, end relative to actual start
yield Elt(elt.elt, elt.start - start, elt.end - start)
def _introduce_semantics(self, sentence, semantics):
"""utility to unchunk, given a sentence and the list of semantics elements to insert
return an intro text and etree children to rebuild original sentence.
"""
if not semantics:
return sentence, []
intro = sentence[: semantics[0].start]
children = []
while semantics:
child = semantics.pop(0)
included = []
while semantics and semantics[0].end <= child.end:
s = semantics.pop(0)
included.append(Elt(s.elt, s.start - child.start, s.end - child.start))
# recursive
child_intro, child_children = self._introduce_semantics(
sentence[child.start:child.end], included)
html_child = html.Element(
child.elt.tag, attrib=child.elt.attrib)
html_child.text = child_intro
for c in child_children:
html_child.append(c)
# tail is text up to next element
tail_end = semantics[0].start if semantics else None
html_child.tail = sentence[child.end: tail_end]
children.append(html_child)
return intro, children
def _rebuild_text(self, chunk, parent, sentence_classes,
span_id_prefix, no_class_no_span, chunk_num):
"""adding text to parent, sentence by sentence
"""
# sentence by sentence
sentence_num = -1
sentence_start = 0
last_element = parent
grand_parent = parent.getparent()
for sentence, classes in sentence_classes:
if sentence is None:
continue
sentence_num += 1
sentence_end = sentence_start + len(sentence)
# reintroduced semantics elts
crossing = list(
self._crossing_semantics(sentence_start, sentence_end, chunk.elts))
text, children = self._introduce_semantics(sentence, crossing)
if classes or not no_class_no_span:
if isinstance(classes, str):
classes = [classes]
# we enclose text in a span of right class
attrib = {}
if span_id_prefix is not None:
attrib["id"] = "%s%d-%d" % (span_id_prefix, chunk_num, sentence_num)
if classes:
attrib["class"] = " ".join(classes)
span = html.Element("span", attrib=attrib)
span.text = text
for child in children:
span.append(child)
if chunk.istail:
# by chance we are processing in an ordered manner, so we can append
grand_parent.append(span)
else:
parent.append(span)
last_element = span
else:
# we introduce our text
if last_element != parent or chunk.istail:
last_element.tail = _s(last_element.tail) + text
else:
last_element.text = _s(last_element.text) + text
# and children
if children:
_parent = grand_parent if chunk.istail else parent
for child in children:
_parent.append(child)
last_element = children[-1]
sentence_start = sentence_end
def unchunk(self, tree, chunks, chunk_classes, span_id_prefix="chunk-", no_class_no_span=True):
"""given a tree, it's chunks and classifications for sentences in chunks,
rebuild the html with span and classes inside.
:param tree: the tree the chunk where generated from
:param chunks: a list of TextChunk/SkipChunk
:param chunk_classes: a list of list of classes to apply to each text in each chunk
:param span_id_prefix: a prefix for span added to html
:return: a new tree
"""
old_to_new = {} # track elements between old and new tree
for chunk_num, (chunk, sentence_classes) in enumerate(zip(chunks, chunk_classes)):
if isinstance(chunk, SkipChunk):
# just clone
if chunk.parent is not None and chunk.parent.getparent() is not None:
new_parent = old_to_new[chunk.parent.getparent()]
else:
# append to root
new_parent = tree
new = copy.deepcopy(chunk.parent)
# remove tail, as it was processed
new.tail = ""
new_parent.append(new)
old_to_new[chunk.parent] = new
elif not chunk.istail:
old_parent = chunk.parent
# make new parent
new_parent = html.Element(
old_parent.tag, attrib=old_parent.attrib, nsmap=old_parent.nsmap)
# map new and old
old_to_new[old_parent] = new_parent
# add to corresponding node, order is conserved by chunks so it works well
old_ancestor = old_parent.getparent()
if old_ancestor is not None:
new_ancestor = old_to_new[old_ancestor]
new_ancestor.append(new_parent)
else:
# this is tail so append text to parent
if chunk.parent is not None:
new_parent = old_to_new[chunk.parent]
else:
# append to root
new_parent = tree
if not isinstance(chunk, SkipChunk):
self._rebuild_text(
chunk, new_parent, sentence_classes,
span_id_prefix, no_class_no_span, chunk_num)
return old_to_new[tree]
|
|
import random
import boto3
import pytest
import sure # noqa # pylint: disable=unused-import
from botocore.exceptions import ClientError
from moto import mock_ec2, settings
from tests import EXAMPLE_AMI_ID
from uuid import uuid4
from unittest import SkipTest
@mock_ec2
def test_subnets_boto3():
ec2 = boto3.resource("ec2", region_name="us-east-1")
client = boto3.client("ec2", region_name="us-east-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18")
ours = client.describe_subnets(SubnetIds=[subnet.id])["Subnets"]
ours.should.have.length_of(1)
client.delete_subnet(SubnetId=subnet.id)
with pytest.raises(ClientError) as ex:
client.describe_subnets(SubnetIds=[subnet.id])
err = ex.value.response["Error"]
err["Code"].should.equal("InvalidSubnetID.NotFound")
with pytest.raises(ClientError) as ex:
client.delete_subnet(SubnetId=subnet.id)
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["ResponseMetadata"].should.have.key("RequestId")
ex.value.response["Error"]["Code"].should.equal("InvalidSubnetID.NotFound")
@mock_ec2
def test_subnet_create_vpc_validation_boto3():
ec2 = boto3.resource("ec2", region_name="us-east-1")
with pytest.raises(ClientError) as ex:
ec2.create_subnet(VpcId="vpc-abcd1234", CidrBlock="10.0.0.0/18")
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["ResponseMetadata"].should.have.key("RequestId")
ex.value.response["Error"]["Code"].should.equal("InvalidVpcID.NotFound")
@mock_ec2
def test_subnet_tagging_boto3():
ec2 = boto3.resource("ec2", region_name="us-east-1")
client = boto3.client("ec2", region_name="us-east-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet = ec2.create_subnet(VpcId=vpc.id, CidrBlock="10.0.0.0/18")
subnet.create_tags(Tags=[{"Key": "a key", "Value": "some value"}])
tag = client.describe_tags(
Filters=[{"Name": "resource-id", "Values": [subnet.id]}]
)["Tags"][0]
tag["Key"].should.equal("a key")
tag["Value"].should.equal("some value")
# Refresh the subnet
subnet = client.describe_subnets(SubnetIds=[subnet.id])["Subnets"][0]
subnet["Tags"].should.equal([{"Key": "a key", "Value": "some value"}])
@mock_ec2
def test_subnet_should_have_proper_availability_zone_set_boto3():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpcA = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnetA = ec2.create_subnet(
VpcId=vpcA.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1b"
)
subnetA.availability_zone.should.equal("us-west-1b")
@mock_ec2
def test_availability_zone_in_create_subnet():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="172.31.0.0/16")
subnet = ec2.create_subnet(
VpcId=vpc.id, CidrBlock="172.31.48.0/20", AvailabilityZoneId="use1-az6"
)
subnet.availability_zone_id.should.equal("use1-az6")
@mock_ec2
def test_default_subnet():
if settings.TEST_SERVER_MODE:
raise SkipTest("ServerMode will have conflicting CidrBlocks")
ec2 = boto3.resource("ec2", region_name="us-west-1")
default_vpc = list(ec2.vpcs.all())[0]
default_vpc.cidr_block.should.equal("172.31.0.0/16")
default_vpc.reload()
default_vpc.is_default.should.be.ok
subnet = ec2.create_subnet(
VpcId=default_vpc.id, CidrBlock="172.31.48.0/20", AvailabilityZone="us-west-1a"
)
subnet.reload()
subnet.map_public_ip_on_launch.shouldnt.be.ok
@mock_ec2
def test_boto3_non_default_subnet():
ec2 = boto3.resource("ec2", region_name="us-west-1")
# Create the non default VPC
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet = ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
subnet.reload()
subnet.map_public_ip_on_launch.shouldnt.be.ok
@mock_ec2
def test_modify_subnet_attribute_public_ip_on_launch():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
random_ip = ".".join(map(str, (random.randint(0, 99) for _ in range(4))))
vpc = ec2.create_vpc(CidrBlock=f"{random_ip}/16")
random_subnet_cidr = f"{random_ip}/20" # Same block as the VPC
subnet = ec2.create_subnet(
VpcId=vpc.id, CidrBlock=random_subnet_cidr, AvailabilityZone="us-west-1a"
)
# 'map_public_ip_on_launch' is set when calling 'DescribeSubnets' action
subnet.reload()
# For non default subnet, attribute value should be 'False'
subnet.map_public_ip_on_launch.shouldnt.be.ok
client.modify_subnet_attribute(
SubnetId=subnet.id, MapPublicIpOnLaunch={"Value": False}
)
subnet.reload()
subnet.map_public_ip_on_launch.shouldnt.be.ok
client.modify_subnet_attribute(
SubnetId=subnet.id, MapPublicIpOnLaunch={"Value": True}
)
subnet.reload()
subnet.map_public_ip_on_launch.should.be.ok
@mock_ec2
def test_modify_subnet_attribute_assign_ipv6_address_on_creation():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
random_ip = ".".join(map(str, (random.randint(0, 99) for _ in range(4))))
vpc = ec2.create_vpc(CidrBlock=f"{random_ip}/16")
random_subnet_cidr = f"{random_ip}/20" # Same block as the VPC
subnet = ec2.create_subnet(
VpcId=vpc.id, CidrBlock=random_subnet_cidr, AvailabilityZone="us-west-1a"
)
# 'map_public_ip_on_launch' is set when calling 'DescribeSubnets' action
subnet.reload()
client.describe_subnets()
# For non default subnet, attribute value should be 'False'
subnet.assign_ipv6_address_on_creation.shouldnt.be.ok
client.modify_subnet_attribute(
SubnetId=subnet.id, AssignIpv6AddressOnCreation={"Value": False}
)
subnet.reload()
subnet.assign_ipv6_address_on_creation.shouldnt.be.ok
client.modify_subnet_attribute(
SubnetId=subnet.id, AssignIpv6AddressOnCreation={"Value": True}
)
subnet.reload()
subnet.assign_ipv6_address_on_creation.should.be.ok
@mock_ec2
def test_modify_subnet_attribute_validation():
# TODO: implement some actual logic
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
@mock_ec2
def test_subnet_get_by_id_boto3():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpcA = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnetA = ec2.create_subnet(
VpcId=vpcA.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
vpcB = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnetB1 = ec2.create_subnet(
VpcId=vpcB.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
ec2.create_subnet(
VpcId=vpcB.id, CidrBlock="10.0.1.0/24", AvailabilityZone="us-west-1b"
)
subnets_by_id = client.describe_subnets(SubnetIds=[subnetA.id, subnetB1.id])[
"Subnets"
]
subnets_by_id.should.have.length_of(2)
subnets_by_id = tuple(map(lambda s: s["SubnetId"], subnets_by_id))
subnetA.id.should.be.within(subnets_by_id)
subnetB1.id.should.be.within(subnets_by_id)
with pytest.raises(ClientError) as ex:
client.describe_subnets(SubnetIds=["subnet-does_not_exist"])
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(400)
ex.value.response["ResponseMetadata"].should.have.key("RequestId")
ex.value.response["Error"]["Code"].should.equal("InvalidSubnetID.NotFound")
@mock_ec2
def test_get_subnets_filtering_boto3():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpcA = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnetA = ec2.create_subnet(
VpcId=vpcA.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
vpcB = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnetB1 = ec2.create_subnet(
VpcId=vpcB.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
subnetB2 = ec2.create_subnet(
VpcId=vpcB.id, CidrBlock="10.0.1.0/24", AvailabilityZone="us-west-1b"
)
nr_of_a_zones = len(client.describe_availability_zones()["AvailabilityZones"])
all_subnets = client.describe_subnets()["Subnets"]
if settings.TEST_SERVER_MODE:
# ServerMode may have other tests running that are creating subnets
all_subnet_ids = [s["SubnetId"] for s in all_subnets]
all_subnet_ids.should.contain(subnetA.id)
all_subnet_ids.should.contain(subnetB1.id)
all_subnet_ids.should.contain(subnetB2.id)
else:
all_subnets.should.have.length_of(3 + nr_of_a_zones)
# Filter by VPC ID
subnets_by_vpc = client.describe_subnets(
Filters=[{"Name": "vpc-id", "Values": [vpcB.id]}]
)["Subnets"]
subnets_by_vpc.should.have.length_of(2)
set([subnet["SubnetId"] for subnet in subnets_by_vpc]).should.equal(
set([subnetB1.id, subnetB2.id])
)
# Filter by CIDR variations
subnets_by_cidr1 = client.describe_subnets(
Filters=[{"Name": "cidr", "Values": ["10.0.0.0/24"]}]
)["Subnets"]
subnets_by_cidr1 = [s["SubnetId"] for s in subnets_by_cidr1]
subnets_by_cidr1.should.contain(subnetA.id)
subnets_by_cidr1.should.contain(subnetB1.id)
subnets_by_cidr1.shouldnt.contain(subnetB2.id)
subnets_by_cidr2 = client.describe_subnets(
Filters=[{"Name": "cidr-block", "Values": ["10.0.0.0/24"]}]
)["Subnets"]
subnets_by_cidr2 = [s["SubnetId"] for s in subnets_by_cidr2]
subnets_by_cidr2.should.contain(subnetA.id)
subnets_by_cidr2.should.contain(subnetB1.id)
subnets_by_cidr2.shouldnt.contain(subnetB2.id)
subnets_by_cidr3 = client.describe_subnets(
Filters=[{"Name": "cidrBlock", "Values": ["10.0.0.0/24"]}]
)["Subnets"]
subnets_by_cidr3 = [s["SubnetId"] for s in subnets_by_cidr3]
subnets_by_cidr3.should.contain(subnetA.id)
subnets_by_cidr3.should.contain(subnetB1.id)
subnets_by_cidr3.shouldnt.contain(subnetB2.id)
# Filter by VPC ID and CIDR
subnets_by_vpc_and_cidr = client.describe_subnets(
Filters=[
{"Name": "vpc-id", "Values": [vpcB.id]},
{"Name": "cidr", "Values": ["10.0.0.0/24"]},
]
)["Subnets"]
subnets_by_vpc_and_cidr.should.have.length_of(1)
subnets_by_vpc_and_cidr[0]["SubnetId"].should.equal(subnetB1.id)
# Filter by subnet ID
subnets_by_id = client.describe_subnets(
Filters=[{"Name": "subnet-id", "Values": [subnetA.id]}]
)["Subnets"]
subnets_by_id.should.have.length_of(1)
subnets_by_id[0]["SubnetId"].should.equal(subnetA.id)
# Filter by availabilityZone
subnets_by_az = client.describe_subnets(
Filters=[
{"Name": "availabilityZone", "Values": ["us-west-1a"]},
{"Name": "vpc-id", "Values": [vpcB.id]},
]
)["Subnets"]
subnets_by_az.should.have.length_of(1)
subnets_by_az[0]["SubnetId"].should.equal(subnetB1.id)
if not settings.TEST_SERVER_MODE:
# Filter by defaultForAz
subnets_by_az = client.describe_subnets(
Filters=[{"Name": "defaultForAz", "Values": ["true"]}]
)["Subnets"]
subnets_by_az.should.have.length_of(nr_of_a_zones)
# Unsupported filter
filters = [{"Name": "not-implemented-filter", "Values": ["foobar"]}]
client.describe_subnets.when.called_with(Filters=filters).should.throw(
NotImplementedError
)
@mock_ec2
def test_create_subnet_response_fields():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet = client.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)["Subnet"]
subnet.should.have.key("AvailabilityZone")
subnet.should.have.key("AvailabilityZoneId")
subnet.should.have.key("AvailableIpAddressCount")
subnet.should.have.key("CidrBlock")
subnet.should.have.key("State")
subnet.should.have.key("SubnetId")
subnet.should.have.key("VpcId")
subnet.should.have.key("Tags")
subnet.should.have.key("DefaultForAz").which.should.equal(False)
subnet.should.have.key("MapPublicIpOnLaunch").which.should.equal(False)
subnet.should.have.key("OwnerId")
subnet.should.have.key("AssignIpv6AddressOnCreation").which.should.equal(False)
subnet_arn = "arn:aws:ec2:{region}:{owner_id}:subnet/{subnet_id}".format(
region=subnet["AvailabilityZone"][0:-1],
owner_id=subnet["OwnerId"],
subnet_id=subnet["SubnetId"],
)
subnet.should.have.key("SubnetArn").which.should.equal(subnet_arn)
subnet.should.have.key("Ipv6CidrBlockAssociationSet").which.should.equal([])
@mock_ec2
def test_describe_subnet_response_fields():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet_object = ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
subnets = client.describe_subnets(SubnetIds=[subnet_object.id])["Subnets"]
subnets.should.have.length_of(1)
subnet = subnets[0]
subnet.should.have.key("AvailabilityZone")
subnet.should.have.key("AvailabilityZoneId")
subnet.should.have.key("AvailableIpAddressCount")
subnet.should.have.key("CidrBlock")
subnet.should.have.key("State")
subnet.should.have.key("SubnetId")
subnet.should.have.key("VpcId")
subnet.shouldnt.have.key("Tags")
subnet.should.have.key("DefaultForAz").which.should.equal(False)
subnet.should.have.key("MapPublicIpOnLaunch").which.should.equal(False)
subnet.should.have.key("OwnerId")
subnet.should.have.key("AssignIpv6AddressOnCreation").which.should.equal(False)
subnet_arn = "arn:aws:ec2:{region}:{owner_id}:subnet/{subnet_id}".format(
region=subnet["AvailabilityZone"][0:-1],
owner_id=subnet["OwnerId"],
subnet_id=subnet["SubnetId"],
)
subnet.should.have.key("SubnetArn").which.should.equal(subnet_arn)
subnet.should.have.key("Ipv6CidrBlockAssociationSet").which.should.equal([])
@mock_ec2
def test_create_subnet_with_invalid_availability_zone():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet_availability_zone = "asfasfas"
with pytest.raises(ClientError) as ex:
client.create_subnet(
VpcId=vpc.id,
CidrBlock="10.0.0.0/24",
AvailabilityZone=subnet_availability_zone,
)
assert str(ex.value).startswith(
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
"operation: Value ({}) for parameter availabilityZone is invalid. Subnets can currently only be created in the following availability zones: ".format(
subnet_availability_zone
)
)
@mock_ec2
def test_create_subnet_with_invalid_cidr_range():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet_cidr_block = "10.1.0.0/20"
with pytest.raises(ClientError) as ex:
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
str(ex.value).should.equal(
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
)
@mock_ec2
def test_create_subnet_with_invalid_cidr_range_multiple_vpc_cidr_blocks():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
ec2.meta.client.associate_vpc_cidr_block(CidrBlock="10.1.0.0/16", VpcId=vpc.id)
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet_cidr_block = "10.2.0.0/20"
with pytest.raises(ClientError) as ex:
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
str(ex.value).should.equal(
"An error occurred (InvalidSubnet.Range) when calling the CreateSubnet "
"operation: The CIDR '{}' is invalid.".format(subnet_cidr_block)
)
@mock_ec2
def test_create_subnet_with_invalid_cidr_block_parameter():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet_cidr_block = "1000.1.0.0/20"
with pytest.raises(ClientError) as ex:
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
str(ex.value).should.equal(
"An error occurred (InvalidParameterValue) when calling the CreateSubnet "
"operation: Value ({}) for parameter cidrBlock is invalid. This is not a valid CIDR block.".format(
subnet_cidr_block
)
)
@mock_ec2
def test_create_subnets_with_multiple_vpc_cidr_blocks():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
ec2.meta.client.associate_vpc_cidr_block(CidrBlock="10.1.0.0/16", VpcId=vpc.id)
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet_cidr_block_primary = "10.0.0.0/24"
subnet_primary = ec2.create_subnet(
VpcId=vpc.id, CidrBlock=subnet_cidr_block_primary
)
subnet_cidr_block_secondary = "10.1.0.0/24"
subnet_secondary = ec2.create_subnet(
VpcId=vpc.id, CidrBlock=subnet_cidr_block_secondary
)
subnets = client.describe_subnets(
SubnetIds=[subnet_primary.id, subnet_secondary.id]
)["Subnets"]
subnets.should.have.length_of(2)
for subnet in subnets:
subnet.should.have.key("AvailabilityZone")
subnet.should.have.key("AvailabilityZoneId")
subnet.should.have.key("AvailableIpAddressCount")
subnet.should.have.key("CidrBlock")
subnet.should.have.key("State")
subnet.should.have.key("SubnetId")
subnet.should.have.key("VpcId")
subnet.shouldnt.have.key("Tags")
subnet.should.have.key("DefaultForAz").which.should.equal(False)
subnet.should.have.key("MapPublicIpOnLaunch").which.should.equal(False)
subnet.should.have.key("OwnerId")
subnet.should.have.key("AssignIpv6AddressOnCreation").which.should.equal(False)
@mock_ec2
def test_create_subnets_with_overlapping_cidr_blocks():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
vpc.reload()
vpc.is_default.shouldnt.be.ok
subnet_cidr_block = "10.0.0.0/24"
with pytest.raises(ClientError) as ex:
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
ec2.create_subnet(VpcId=vpc.id, CidrBlock=subnet_cidr_block)
str(ex.value).should.equal(
"An error occurred (InvalidSubnet.Conflict) when calling the CreateSubnet "
"operation: The CIDR '{}' conflicts with another subnet".format(
subnet_cidr_block
)
)
@mock_ec2
def test_create_subnet_with_tags():
ec2 = boto3.resource("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="172.31.0.0/16")
random_ip = "172.31." + ".".join(
map(str, (random.randint(10, 40) for _ in range(2)))
)
random_cidr = f"{random_ip}/20"
subnet = ec2.create_subnet(
VpcId=vpc.id,
CidrBlock=random_cidr,
AvailabilityZoneId="use1-az6",
TagSpecifications=[
{"ResourceType": "subnet", "Tags": [{"Key": "name", "Value": "some-vpc"}]}
],
)
assert subnet.tags == [{"Key": "name", "Value": "some-vpc"}]
@mock_ec2
def test_available_ip_addresses_in_subnet():
if settings.TEST_SERVER_MODE:
raise SkipTest(
"ServerMode is not guaranteed to be empty - other subnets will affect the count"
)
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
cidr_range_addresses = [
("10.0.0.0/16", 65531),
("10.0.0.0/17", 32763),
("10.0.0.0/18", 16379),
("10.0.0.0/19", 8187),
("10.0.0.0/20", 4091),
("10.0.0.0/21", 2043),
("10.0.0.0/22", 1019),
("10.0.0.0/23", 507),
("10.0.0.0/24", 251),
("10.0.0.0/25", 123),
("10.0.0.0/26", 59),
("10.0.0.0/27", 27),
("10.0.0.0/28", 11),
]
for (cidr, expected_count) in cidr_range_addresses:
validate_subnet_details(client, vpc, cidr, expected_count)
@mock_ec2
def test_available_ip_addresses_in_subnet_with_enis():
if settings.TEST_SERVER_MODE:
raise SkipTest(
"ServerMode is not guaranteed to be empty - other ENI's will affect the count"
)
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
# Verify behaviour for various CIDR ranges (...)
# Don't try to assign ENIs to /27 and /28, as there are not a lot of IP addresses to go around
cidr_range_addresses = [
("10.0.0.0/16", 65531),
("10.0.0.0/17", 32763),
("10.0.0.0/18", 16379),
("10.0.0.0/19", 8187),
("10.0.0.0/20", 4091),
("10.0.0.0/21", 2043),
("10.0.0.0/22", 1019),
("10.0.0.0/23", 507),
("10.0.0.0/24", 251),
("10.0.0.0/25", 123),
("10.0.0.0/26", 59),
]
for (cidr, expected_count) in cidr_range_addresses:
validate_subnet_details_after_creating_eni(client, vpc, cidr, expected_count)
def validate_subnet_details(client, vpc, cidr, expected_ip_address_count):
subnet = client.create_subnet(
VpcId=vpc.id, CidrBlock=cidr, AvailabilityZone="us-west-1b"
)["Subnet"]
subnet["AvailableIpAddressCount"].should.equal(expected_ip_address_count)
client.delete_subnet(SubnetId=subnet["SubnetId"])
def validate_subnet_details_after_creating_eni(
client, vpc, cidr, expected_ip_address_count
):
subnet = client.create_subnet(
VpcId=vpc.id, CidrBlock=cidr, AvailabilityZone="us-west-1b"
)["Subnet"]
# Create a random number of Elastic Network Interfaces
nr_of_eni_to_create = random.randint(0, 5)
ip_addresses_assigned = 0
enis_created = []
for _ in range(0, nr_of_eni_to_create):
# Create a random number of IP addresses per ENI
nr_of_ip_addresses = random.randint(1, 5)
if nr_of_ip_addresses == 1:
# Pick the first available IP address (First 4 are reserved by AWS)
private_address = "10.0.0." + str(ip_addresses_assigned + 4)
eni = client.create_network_interface(
SubnetId=subnet["SubnetId"], PrivateIpAddress=private_address
)["NetworkInterface"]
enis_created.append(eni)
ip_addresses_assigned = ip_addresses_assigned + 1
else:
# Assign a list of IP addresses
private_addresses = [
"10.0.0." + str(4 + ip_addresses_assigned + i)
for i in range(0, nr_of_ip_addresses)
]
eni = client.create_network_interface(
SubnetId=subnet["SubnetId"],
PrivateIpAddresses=[
{"PrivateIpAddress": address} for address in private_addresses
],
)["NetworkInterface"]
enis_created.append(eni)
ip_addresses_assigned = ip_addresses_assigned + nr_of_ip_addresses #
# Verify that the nr of available IP addresses takes these ENIs into account
updated_subnet = client.describe_subnets(SubnetIds=[subnet["SubnetId"]])["Subnets"][
0
]
private_addresses = []
for eni in enis_created:
private_addresses.extend(
[address["PrivateIpAddress"] for address in eni["PrivateIpAddresses"]]
)
error_msg = (
"Nr of IP addresses for Subnet with CIDR {0} is incorrect. Expected: {1}, Actual: {2}. "
"Addresses: {3}"
)
with sure.ensure(
error_msg,
cidr,
str(expected_ip_address_count),
updated_subnet["AvailableIpAddressCount"],
str(private_addresses),
):
updated_subnet["AvailableIpAddressCount"].should.equal(
expected_ip_address_count - ip_addresses_assigned
)
# Clean up, as we have to create a few more subnets that shouldn't interfere with each other
for eni in enis_created:
client.delete_network_interface(NetworkInterfaceId=eni["NetworkInterfaceId"])
client.delete_subnet(SubnetId=subnet["SubnetId"])
@mock_ec2
def test_run_instances_should_attach_to_default_subnet():
# https://github.com/spulec/moto/issues/2877
ec2 = boto3.resource("ec2", region_name="sa-east-1")
client = boto3.client("ec2", region_name="sa-east-1")
sec_group_name = str(uuid4())[0:6]
ec2.create_security_group(
GroupName=sec_group_name, Description="Test security group sg01"
)
# run_instances
instances = client.run_instances(
ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1, SecurityGroups=[sec_group_name],
)
# Assert subnet is created appropriately
subnets = client.describe_subnets(
Filters=[{"Name": "defaultForAz", "Values": ["true"]}]
)["Subnets"]
default_subnet_id = subnets[0]["SubnetId"]
if len(subnets) > 1:
default_subnet_id1 = subnets[1]["SubnetId"]
assert (
instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"]
== default_subnet_id
or instances["Instances"][0]["NetworkInterfaces"][0]["SubnetId"]
== default_subnet_id1
)
if not settings.TEST_SERVER_MODE:
# Available IP addresses will depend on other resources that might be created in parallel
assert (
subnets[0]["AvailableIpAddressCount"] == 4090
or subnets[1]["AvailableIpAddressCount"] == 4090
)
@mock_ec2
def test_describe_subnets_by_vpc_id():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc1 = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet1 = ec2.create_subnet(
VpcId=vpc1.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
vpc2 = ec2.create_vpc(CidrBlock="172.31.0.0/16")
subnet2 = ec2.create_subnet(
VpcId=vpc2.id, CidrBlock="172.31.48.0/20", AvailabilityZone="us-west-1b"
)
subnets = client.describe_subnets(
Filters=[{"Name": "vpc-id", "Values": [vpc1.id]}]
).get("Subnets", [])
subnets.should.have.length_of(1)
subnets[0]["SubnetId"].should.equal(subnet1.id)
subnets = client.describe_subnets(
Filters=[{"Name": "vpc-id", "Values": [vpc2.id]}]
).get("Subnets", [])
subnets.should.have.length_of(1)
subnets[0]["SubnetId"].should.equal(subnet2.id)
# Specify multiple VPCs in Filter.
subnets = client.describe_subnets(
Filters=[{"Name": "vpc-id", "Values": [vpc1.id, vpc2.id]}]
).get("Subnets", [])
subnets.should.have.length_of(2)
# Specify mismatched SubnetIds/Filters.
subnets = client.describe_subnets(
SubnetIds=[subnet1.id], Filters=[{"Name": "vpc-id", "Values": [vpc2.id]}]
).get("Subnets", [])
subnets.should.have.length_of(0)
@mock_ec2
def test_describe_subnets_by_state():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
subnets = client.describe_subnets(
Filters=[{"Name": "state", "Values": ["available"]}]
).get("Subnets", [])
for subnet in subnets:
subnet["State"].should.equal("available")
@mock_ec2
def test_associate_subnet_cidr_block():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet_object = ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
subnets = client.describe_subnets(SubnetIds=[subnet_object.id])["Subnets"]
association_set = subnets[0]["Ipv6CidrBlockAssociationSet"]
association_set.should.equal([])
res = client.associate_subnet_cidr_block(
Ipv6CidrBlock="1080::1:200C:417A/112", SubnetId=subnet_object.id
)
res.should.have.key("Ipv6CidrBlockAssociation")
association = res["Ipv6CidrBlockAssociation"]
association.should.have.key("AssociationId").match("subnet-cidr-assoc-[a-z0-9]+")
association.should.have.key("Ipv6CidrBlock").equals("1080::1:200C:417A/112")
association.should.have.key("Ipv6CidrBlockState").equals({"State": "associated"})
subnets = client.describe_subnets(SubnetIds=[subnet_object.id])["Subnets"]
association_set = subnets[0]["Ipv6CidrBlockAssociationSet"]
association_set.should.have.length_of(1)
association_set[0].should.have.key("AssociationId").equal(
association["AssociationId"]
)
association_set[0].should.have.key("Ipv6CidrBlock").equals("1080::1:200C:417A/112")
@mock_ec2
def test_disassociate_subnet_cidr_block():
ec2 = boto3.resource("ec2", region_name="us-west-1")
client = boto3.client("ec2", region_name="us-west-1")
vpc = ec2.create_vpc(CidrBlock="10.0.0.0/16")
subnet_object = ec2.create_subnet(
VpcId=vpc.id, CidrBlock="10.0.0.0/24", AvailabilityZone="us-west-1a"
)
client.associate_subnet_cidr_block(
Ipv6CidrBlock="1080::1:200C:417A/111", SubnetId=subnet_object.id
)
association_id = client.associate_subnet_cidr_block(
Ipv6CidrBlock="1080::1:200C:417A/999", SubnetId=subnet_object.id
)["Ipv6CidrBlockAssociation"]["AssociationId"]
subnets = client.describe_subnets(SubnetIds=[subnet_object.id])["Subnets"]
association_set = subnets[0]["Ipv6CidrBlockAssociationSet"]
association_set.should.have.length_of(2)
client.disassociate_subnet_cidr_block(AssociationId=association_id)
subnets = client.describe_subnets(SubnetIds=[subnet_object.id])["Subnets"]
association_set = subnets[0]["Ipv6CidrBlockAssociationSet"]
association_set.should.have.length_of(1)
association_set[0]["Ipv6CidrBlock"].should.equal("1080::1:200C:417A/111")
@mock_ec2
def test_describe_subnets_dryrun():
client = boto3.client("ec2", region_name="us-east-1")
with pytest.raises(ClientError) as ex:
client.describe_subnets(DryRun=True)
ex.value.response["ResponseMetadata"]["HTTPStatusCode"].should.equal(412)
ex.value.response["Error"]["Code"].should.equal("DryRunOperation")
ex.value.response["Error"]["Message"].should.equal(
"An error occurred (DryRunOperation) when calling the DescribeSubnets operation: Request would have succeeded, but DryRun flag is set"
)
|
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import threading
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Any, Callable, Iterable, Sequence, Tuple
from pants.base.specs import Specs
from pants.engine.addresses import Addresses
from pants.engine.fs import Digest, DigestContents, Snapshot
from pants.engine.internals import native_engine
from pants.engine.internals.scheduler import SchedulerSession, Workunit
from pants.engine.internals.selectors import Params
from pants.engine.rules import Get, MultiGet, QueryRule, collect_rules, rule
from pants.engine.target import Targets
from pants.engine.unions import UnionMembership, union
from pants.goal.run_tracker import RunTracker
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.util.logging import LogLevel
logger = logging.getLogger(__name__)
# -----------------------------------------------------------------------------------------------
# Streaming workunits plugin API
# -----------------------------------------------------------------------------------------------
@dataclass(frozen=True)
class TargetInfo:
filename: str
@dataclass(frozen=True)
class ExpandedSpecs:
targets: dict[str, list[TargetInfo]]
@dataclass(frozen=True)
class StreamingWorkunitContext:
_scheduler: SchedulerSession
_run_tracker: RunTracker
_specs: Specs
_options_bootstrapper: OptionsBootstrapper
@property
def run_tracker(self) -> RunTracker:
"""Returns the RunTracker for the current run of Pants."""
return self._run_tracker
def single_file_digests_to_bytes(self, digests: Sequence[Digest]) -> tuple[bytes, ...]:
"""Given a list of Digest objects, each representing the contents of a single file, return a
list of the bytes corresponding to each of those Digests in sequence."""
return self._scheduler.single_file_digests_to_bytes(digests)
def snapshots_to_file_contents(
self, snapshots: Sequence[Snapshot]
) -> Tuple[DigestContents, ...]:
"""Given a sequence of Snapshot objects, return a tuple of DigestContents representing the
files contained in those `Snapshot`s in sequence."""
return self._scheduler.snapshots_to_file_contents(snapshots)
def ensure_remote_has_recursive(self, digests: Sequence[Digest]) -> None:
"""Invoke the internal ensure_remote_has_recursive function, which ensures that a remote
ByteStore, if it exists, has a copy of the files fingerprinted by each Digest."""
return self._scheduler.ensure_remote_has_recursive(digests)
def get_observation_histograms(self):
"""Invoke the internal get_observation_histograms function, which serializes histograms
generated from Pants-internal observation metrics observed during the current run of Pants.
These metrics are useful for debugging Pants internals.
"""
return self._scheduler.get_observation_histograms()
def get_expanded_specs(self) -> ExpandedSpecs:
"""Return a dict containing the canonicalized addresses of the specs for this run, and what
files they expand to."""
(unexpanded_addresses,) = self._scheduler.product_request(
Addresses, [Params(self._specs, self._options_bootstrapper)]
)
expanded_targets = self._scheduler.product_request(
Targets, [Params(Addresses([addr])) for addr in unexpanded_addresses]
)
targets_dict: dict[str, list[TargetInfo]] = {}
for addr, targets in zip(unexpanded_addresses, expanded_targets):
targets_dict[addr.spec] = [
TargetInfo(
filename=(
tgt.address.filename if tgt.address.is_file_target else str(tgt.address)
)
)
for tgt in targets
]
return ExpandedSpecs(targets=targets_dict)
class WorkunitsCallback(ABC):
@abstractmethod
def __call__(
self,
*,
started_workunits: tuple[Workunit, ...],
completed_workunits: tuple[Workunit, ...],
finished: bool,
context: StreamingWorkunitContext,
) -> None:
"""
:started_workunits: Workunits that have started but not completed.
:completed_workunits: Workunits that have completed.
:finished: True when the last chunk of workunit data is reported to the callback.
:context: A context providing access to functionality relevant to the run.
"""
@property
def can_finish_async(self) -> bool:
"""Can this callback finish its work in the background after the Pants run has already
completed?
The main reason to `return False` is if your callback logs in its final call, when
`finished=True`, as it may end up logging to `.pantsd.d/pants.log` instead of the console,
which is harder for users to find. Otherwise, most callbacks should return `True` to avoid
slowing down Pants from finishing the run.
"""
return False
@dataclass(frozen=True)
class WorkunitsCallbackFactory:
"""A wrapper around a callable that constructs WorkunitsCallbacks.
NB: This extra wrapping is because subtyping is not supported in the return position of a
rule. See #11354 for discussion of that limitation.
"""
callback_factory: Callable[[], WorkunitsCallback]
class WorkunitsCallbackFactories(Tuple[WorkunitsCallbackFactory, ...]):
"""A list of registered factories for WorkunitsCallback instances."""
@union
class WorkunitsCallbackFactoryRequest:
"""A request for a particular WorkunitsCallbackFactory."""
@rule
async def construct_workunits_callback_factories(
union_membership: UnionMembership,
) -> WorkunitsCallbackFactories:
request_types = union_membership.get(WorkunitsCallbackFactoryRequest)
workunit_callback_factories = await MultiGet(
Get(WorkunitsCallbackFactory, WorkunitsCallbackFactoryRequest, request_type())
for request_type in request_types
)
return WorkunitsCallbackFactories(workunit_callback_factories)
# -----------------------------------------------------------------------------------------------
# Streaming workunits handler
# -----------------------------------------------------------------------------------------------
class StreamingWorkunitHandler:
"""Periodically calls each registered WorkunitsCallback in a dedicated thread.
This class should be used as a context manager.
"""
def __init__(
self,
scheduler: SchedulerSession,
run_tracker: RunTracker,
callbacks: Iterable[WorkunitsCallback],
options_bootstrapper: OptionsBootstrapper,
specs: Specs,
report_interval_seconds: float,
pantsd: bool,
max_workunit_verbosity: LogLevel = LogLevel.TRACE,
) -> None:
self.callbacks = callbacks
self.context = StreamingWorkunitContext(
_scheduler=scheduler,
_run_tracker=run_tracker,
_specs=specs,
_options_bootstrapper=options_bootstrapper,
)
self.thread_runner = (
_InnerHandler(
scheduler=scheduler,
context=self.context,
callbacks=self.callbacks,
report_interval=report_interval_seconds,
# TODO(10092) The max verbosity should be a per-client setting, rather than a global
# setting.
max_workunit_verbosity=max_workunit_verbosity,
pantsd=pantsd,
)
if callbacks
else None
)
def __enter__(self) -> None:
if not self.thread_runner:
return
self.thread_runner.start()
def __exit__(self, exc_type, exc_value, traceback) -> None:
if not self.thread_runner:
return
self.thread_runner.end()
if exc_type is not None:
self.thread_runner.join()
class _InnerHandler(threading.Thread):
def __init__(
self,
scheduler: Any,
context: StreamingWorkunitContext,
callbacks: Iterable[WorkunitsCallback],
report_interval: float,
max_workunit_verbosity: LogLevel,
pantsd: bool,
) -> None:
super().__init__(daemon=True)
self.scheduler = scheduler
self.context = context
self.stop_request = threading.Event()
self.report_interval = report_interval
self.callbacks = callbacks
self.max_workunit_verbosity = max_workunit_verbosity
# TODO: Have a thread per callback so that some callbacks can always finish async even
# if others must be finished synchronously.
self.block_until_complete = not pantsd or any(
callback.can_finish_async is False for callback in self.callbacks
)
# Get the parent thread's logging destination. Note that this thread has not yet started
# as we are only in the constructor.
self.logging_destination = native_engine.stdio_thread_get_destination()
def poll_workunits(self, *, finished: bool) -> None:
workunits = self.scheduler.poll_workunits(self.max_workunit_verbosity)
for callback in self.callbacks:
callback(
started_workunits=workunits["started"],
completed_workunits=workunits["completed"],
finished=finished,
context=self.context,
)
def run(self) -> None:
# First, set the thread's logging destination to the parent thread's, meaning the console.
native_engine.stdio_thread_set_destination(self.logging_destination)
while not self.stop_request.isSet(): # type: ignore[attr-defined]
self.poll_workunits(finished=False)
self.stop_request.wait(timeout=self.report_interval)
else:
# Make one final call. Note that this may run after the Pants run has already
# completed, depending on whether the thread was joined or not.
self.poll_workunits(finished=True)
def end(self) -> None:
self.stop_request.set()
if self.block_until_complete:
super().join()
def rules():
return [
QueryRule(WorkunitsCallbackFactories, (UnionMembership,)),
QueryRule(Targets, (Addresses,)),
QueryRule(Addresses, (Specs, OptionsBootstrapper)),
*collect_rules(),
]
|
|
#!/usr/bin/env python
# encoding: utf-8
# File : Config.py
# Author : Ben Wu
# Contact : benwu@fnal.gov
# Date : 2015 Jul 28
#
# Description :
import ROOT
#============================================================================#
#-------------------------- Drawing Config --------------------------#
#============================================================================#
PyColors = [
ROOT.kBlack,
ROOT.kRed,
ROOT.kBlue,
ROOT.kGreen-2,
ROOT.kOrange,
ROOT.kCyan,
ROOT.kMagenta,
ROOT.kYellow,
ROOT.kGreen,
ROOT.kGray,
ROOT.kSpring,
ROOT.kTeal,
ROOT.kAzure,
ROOT.kViolet,
ROOT.kPink
]
# (1001, 228,26,28),
# (1002, 55,126,184),
# (1003, 77,175,74),
# (1004, 152,78,163),
# (1005, 255,127,0),
# (1006, 255,255,51),
# (1007, 166,86,40),
# (1008, 247,129,191),
# (1009, 153,153,153)
PyOutPut = ["png", "root"]
Lumi = 3 * 1000
#============================================================================#
#--------------------------- Processes Map --------------------------#
#============================================================================#
Prodmap = {
"T2tt_425_325" : { "file" : ["Signal_T2tt_mStop425_mLSP325.root"],
"label" : "T2tt(425, 325)",
"type" : "Signal",
"linecolor" : ROOT.kRed-2,
"linestyle" : 2,
"markercolor" : ROOT.kRed-2,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed-2,
"fillstyle" : 1001,
},
"T2tt_500_325" : { "file" : ["Signal_T2tt_mStop500_mLSP325.root"],
"label" : "T2tt(500, 325)",
"type" : "Signal",
"linecolor" : ROOT.kRed,
"linestyle" : 1,
"markercolor" : ROOT.kRed,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed,
"fillstyle" : 1001,
},
"T2tt_650_325" : { "file" : ["Signal_T2tt_mStop650_mLSP325.root"],
"label" : "T2tt(650, 325)",
"type" : "Signal",
"linecolor" : ROOT.kRed+2,
"linestyle" : 1,
"markercolor" : ROOT.kRed+2,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed+2,
"fillstyle" : 1001,
},
"T2tt_850_100" : { "file" : ["Signal_T2tt_mStop850_mLSP100.root"],
"type" : "Signal",
"label" : "T2tt(850, 100)",
"linecolor" : ROOT.kRed+4,
"linestyle" : 1,
"markercolor" : ROOT.kRed+4,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed+4,
"fillstyle" : 1001,
},
"T2bb_600_580" : { "file" : ["Signal_T2bb_mSbottom600_mLSP580.root"],
"type" : "Signal",
"label" : "T2bb(600, 580)",
"linecolor" : ROOT.kRed,
"linestyle" : 1,
"markercolor" : ROOT.kRed,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed,
"fillstyle" : 1001,
},
"T2bb_900_100" : { "file" : ["Signal_T2bb_mSbottom900_mLSP100.root"],
"type" : "Signal",
"label" : "T2bb(900, 100)",
"linecolor" : ROOT.kRed+4,
"linestyle" : 1,
"markercolor" : ROOT.kRed+4,
"markerstyle" : 1,
"fillcolor" : ROOT.kRed+4,
"fillstyle" : 1001,
},
"TTbarDiLep" : { "file" : ["TTbarDiLep.root"],
"type" : "Background",
"label" : "t#bar{t}(ll)",
"linecolor" : ROOT.kGreen,
"linestyle" : 1,
"markercolor" : ROOT.kGreen,
"markerstyle" : 1,
"fillcolor" : ROOT.kGreen,
"fillstyle" : 1001,
},
"TTbar" : { "file" : ["TTbarInc.root"],
"type" : "Background",
"label" : "t#bar{t}",
"linecolor" : ROOT.kGreen,
"linestyle" : 1,
"markercolor" : ROOT.kGreen,
"markerstyle" : 1,
"fillcolor" : ROOT.kGreen,
"fillstyle" : 1001,
},
"WJetsToLNu" : { "file" : [
"WJetsToLNu_HT_100to200.root",
"WJetsToLNu_HT_200to400.root",
"WJetsToLNu_HT_400to600.root",
"WJetsToLNu_HT_600to800.root",
"WJetsToLNu_HT_800to1200.root",
"WJetsToLNu_HT_1200to2500.root",
# "WJetsToLNu_HT_2500toInf.root",
# "WJetsToLNu_HT_600toInf.root",
],
"label" : "W(l#nu)",
"type" : "Background",
"linecolor" : ROOT.kMagenta+1,
"linestyle" : 1,
"markercolor" : ROOT.kMagenta+1,
"markerstyle" : 1,
"fillcolor" : ROOT.kMagenta+1,
"fillstyle" : 1001,
},
"DYJetsToLL" : { "file" : [
"DYJetsToLL_HT_100to200.root",
"DYJetsToLL_HT_200to400.root",
"DYJetsToLL_HT_400to600.root",
"DYJetsToLL_HT_600toInf.root"],
"label" : "DY(ll)",
"type" : "Background",
"linecolor" : ROOT.kRed,
# "linecolor" : ROOT.kYellow-7,
"linestyle" : 1,
"markercolor" : ROOT.kYellow-7,
"markerstyle" : 1,
"fillcolor" : ROOT.kYellow-7,
"fillstyle" : 1001,
},
"ZJetsToNuNu" : { "file" : [ "ZJetsToNuNu_HT_100to200.root", "ZJetsToNuNu_HT_200to400.root",
"ZJetsToNuNu_HT_400to600.root", "ZJetsToNuNu_HT_600toInf.root" ],
"label" : "Z(#nu#nu)",
"type" : "Background",
"linecolor" : ROOT.kTeal+4,
"linestyle" : 1,
"markercolor" : ROOT.kTeal+4,
"markerstyle" : 1,
"fillcolor" : ROOT.kTeal+4,
"fillstyle" : 1001,
},
"QCD" : { "file" : ["QCD_HT1000to1500.root", "QCD_HT1500to2000.root", "QCD_HT2000toInf.root",
"QCD_HT200to300.root", "QCD_HT300to500.root",
"QCD_HT500to700.root", "QCD_HT700to1000.root"],
"label" : "QCD",
"type" : "Background",
"linecolor" : ROOT.kBlue,
"linestyle" : 1,
"markercolor" : ROOT.kBlue,
"markerstyle" : 1,
"fillcolor" : ROOT.kBlue,
"fillstyle" : 1001,
},
"TTZ" : { "file" : [ "TTZToLLNuNu.root", "TTZToQQ.root"],
"label" : "t#bar{t}Z",
# "type" : "Signal",
"type" : "Background",
"linecolor" : ROOT.kOrange+2,
"linestyle" : 1,
"markercolor" : ROOT.kOrange+2,
"markerstyle" : 24,
"fillcolor" : ROOT.kOrange+2,
"fillstyle" : 1001,
},
"TTW" : { "file" : [ "TTWJetsToQQ.root", "TTWJetsToLNu.root"],
"label" : "t#bar{t}W",
"type" : "Background",
"linecolor" : ROOT.kSpring+2,
"linestyle" : 1,
"markercolor" : ROOT.kSpring+2,
"markerstyle" : 1,
"fillcolor" : ROOT.kSpring +2,
"fillstyle" : 1001,
},
"TW" : { "file" : [ "tW_antitop.root", "tW_top.root"],
"label" : "tW",
"type" : "Background",
"linecolor" : ROOT.kYellow,
"linestyle" : 1,
"markercolor" : ROOT.kYellow,
"markerstyle" : 1,
"fillcolor" : ROOT.kYellow,
"fillstyle" : 1001,
},
}
#============================================================================#
#-------------------------- User Functions --------------------------#
#============================================================================#
def PrintHist(hist):
for bins in range(1, hist.GetNbinsX()):
print(bins, hist.GetBinCenter(bins), hist.GetBinContent(bins))
#============================================================================#
#----------------------- Histogram properties -----------------------#
#============================================================================#
HIST_Proper = {
"MissTopTagPT": {
"Rebinx": 5,
"HistLabel": "----",
"Logy": False,
"Logx": False,
"YScale": 1.2,
"YMax": 0,
# "DrawOpt": "P text",
# "legloc" : "None",
# "HistFun": PrintHist
}
}
DIR_Proper = {
"Top_G3Top30": {"DirLabel": "TT"}
}
|
|
from nose.tools import *
from mongoalchemy.document import Document, Index, DocumentField, MissingValueException, DocumentException, DictDoc, document_type_registry
from mongoalchemy.fields import *
from test.util import known_failure, get_session
# Document Types used in some tests
class TestDoc(Document):
int1 = IntField()
def __repr__(self):
return 'TestDoc(int1=%d)' % self.int1
# Document Types used in some tests
class TestDoc2(Document):
sfield = StringField()
def __repr__(self):
return 'TestDoc(int1=%s)' % self.sfield
class T(Document, DictDoc):
i = IntField()
j = IntField(required=False)
s = StringField(required=False)
l = ListField(IntField(), required=False)
a = IntField(required=False, db_field='aa')
index = Index().ascending('i')
class DocA(Document):
test_doc = DocumentField(TestDoc, required=False)
test_doc2 = DocumentField(TestDoc2, required=False)
def __eq__(self, other):
if self.__class__ != other.__class__:
return False
return self.test_doc.int1 == other.test_doc.int1
def __repr__(self):
return 'DocA()'
# Tests
def test_setup():
document_type_registry.clear()
def test_basic():
class Doc(Document):
count = IntField()
s = get_session()
d = Doc(count=0)
s.insert(d)
assert d.mongo_id
def test_basic2():
class Doc(Document):
config_collection_name = 'DocCol'
count = IntField()
assert Doc.class_name() == 'Doc', Doc.class_name()
assert Doc.get_collection_name() == 'DocCol'
def test_update_ops():
td = TestDoc(int1=1)
doca = DocA(test_doc=td)
assert doca.get_dirty_ops() == {
'$set' : { 'test_doc.int1' : 1 }
}, doca.get_dirty_ops()
class DocB(Document):
a = DocumentField(DocA)
b = IntField()
assert DocB(a=DocA()).get_dirty_ops() == {}
def test_delete_field():
class Doc(Document):
a = IntField()
b = IntField()
d = Doc()
d.a = 5
assert d.a == 5
del d.a
try:
b = d.a
assert False, 'delete attribute a failed'
except AttributeError:
pass
try:
del d.b
assert False, 'delete attribute b failed'
except AttributeError:
pass
@raises(DocumentException)
def bad_extra_fields_param_test():
class BadDoc(Document):
config_extra_fields = 'blah'
def extra_fields_test():
class BadDoc(Document):
config_extra_fields = 'ignore'
doc_with_extra = {'foo' : [1]}
unwrapped = BadDoc.unwrap(doc_with_extra)
assert unwrapped.get_extra_fields() == doc_with_extra
assert BadDoc.wrap(unwrapped) == doc_with_extra
@raises(MissingValueException)
def test_required_fields():
class Doc(Document):
i = IntField()
Doc().wrap()
@raises(AttributeError)
def test_missing_fields():
class Doc(Document):
i = IntField(required=False)
Doc().i
def test_non_existant_field():
class Doc(Document):
i = IntField(required=False)
Doc().j = 5
def test_default_value():
class Doc(Document):
i = IntField(required=False, default=1)
assert Doc().i == 1
@raises(Exception)
def bad_field_test():
s = get_session()
s.clear_collection(TestDoc)
t = TestDoc(int1=1, str4='sdasa')
def loading_test():
s = get_session()
s.clear_collection(TestDoc)
t = TestDoc(int1=123431)
s.insert(t)
for td in s.query(TestDoc):
break
assert td.int1 == t.int1
def docfield_test():
class SuperDoc(Document):
int1 = IntField()
sub = DocumentField(TestDoc)
s = get_session()
s.clear_collection(TestDoc, SuperDoc)
doc = TestDoc(int1=3)
sup = SuperDoc(int1=4, sub=doc)
s.insert(sup)
for sd in s.query(SuperDoc):
break
assert sd.int1 == sup.int1
assert sd.sub.int1 == doc.int1
def test_non_ma_property_attribute_error():
''' At the time of writing, it was possble for a normal class field to be
treated as a MA one. If the instance's field raised an attribute
error we would try to access the "required" attribute of the class
level field. This attribute only exists on MA Field instances,
though '''
class Doc(Document):
i = IntField()
@property
def foo(self):
raise AttributeError()
x = Doc(i=2)
assert Doc.unwrap(x.wrap()).i == x.i
def test_doc_field_with_alternate_name():
class Doc(Document):
i = IntField(db_field='ii')
def __eq__(self, other):
return self.i == other.i
d = Doc(i=3)
wrapped = d.wrap()
assert wrapped == {'ii' : 3}
assert d == Doc.unwrap({'ii' : 3})
def test_doc_field():
sd = TestDoc(int1=0)
doca = DocA(test_doc=sd)
wrapped = doca.wrap()
unwrapped = DocA.unwrap(wrapped)
assert unwrapped == doca
@raises(BadValueException)
def wrong_wrap_type_test():
doc1 = TestDoc(int1=0)
doc2 = TestDoc2(sfield='a')
doca = DocA(test_doc=doc2)
doca.wrap()
@raises(BadValueException)
def wrong_wrap_type_test2():
doc2 = TestDoc2(sfield=1) # this is an invalid value
doca = DocA(test_doc2=doc2)
doca.wrap()
def is_valid_unwrap_test_true():
assert DocA.test_doc.is_valid_unwrap({ 'int1' : 1 }) == True
def is_valid_unwrap_test_false():
assert DocA.test_doc2.is_valid_unwrap({ 'int1' : 1 }) == False
@raises(BadValueException)
def wrong_unwrap_type_test():
DocA.unwrap({ 'test_doc2' : { 'int1' : 1 } })
@raises(MissingValueException)
def test_upsert_with_required():
class D(Document):
a = IntField()
c = IntField()
b = IntField(required=False)
s = get_session()
s.clear_collection(D)
s.update(D(b=4, c=4), id_expression=D.b == 4, upsert=True)
def test_upsert_with_no_changes():
class D(Document):
a = IntField()
c = IntField()
b = IntField(required=False)
b_index = Index().ascending('b')
s = get_session(safe=True)
s.clear_collection(D)
# D.b_index.ensure(s.db[D.get_collection_name()])
s.update(D(a=1, b=4, c=4), id_expression=D.b == 4, upsert=True)
d = s.query(D).one()
s.update(d, upsert=True)
def test_unwrapped_is_not_dirty():
class D(Document):
a = IntField()
s = get_session()
s.clear_collection(D)
s.insert(D(a=1))
d = s.query(D).one()
assert len(d.get_dirty_ops()) == 0, len(d.get_dirty_ops())
def test_update_with_unset():
class D(Document, DictDoc):
a = IntField()
c = IntField()
b = IntField(required=False)
b_index = Index().ascending('b')
s = get_session(safe=True)
s.clear_collection(D)
# D.b_index.ensure(s.db[D.get_collection_name()])
s.update(D(a=1, b=4, c=4), id_expression=D.b == 4, upsert=True)
d = s.query(D).one()
del d.c
s.update(d)
d = s.query(D).one()
assert 'c' not in d
@with_setup(test_setup)
def test_self_reference():
class D(Document):
d = DocumentField('D', required=False)
a = IntField()
d = D(d=D(a=5), a=4)
assert d.wrap() == { 'd' : { 'a' : 5 }, 'a' : 4 }
s = get_session()
s.clear_collection(D)
s.insert(d)
d_from_db = s.query(D).one()
assert d_from_db.d.a == d.d.a
@with_setup(test_setup)
def test_config_full_name():
class E(Document):
d = DocumentField('ma.D')
class D(Document):
config_full_name = 'ma.D'
a = IntField()
e = E(d=D(a=4))
assert e.d.a == 4
@with_setup(test_setup)
def test_config_in_list():
class E(Document):
d = ListField(ListField(DocumentField('ma.D')))
class D(Document):
config_full_name = 'ma.D'
a = IntField()
e = E(d=[[D(a=4)]])
assert e.wrap() == { 'd' : [[{'a':4}]] }
@with_setup(test_setup)
@raises(BadFieldSpecification)
def test_bad_string_doc():
class D(Document):
e = DocumentField('E')
D(e=5).wrap()
@with_setup(test_setup)
@raises(BadFieldSpecification)
def test_namespaces_disabled():
class D(Document):
config_namespace = None
e = DocumentField('E')
D(e=5).wrap()
@with_setup(test_setup)
def test_set_parent_on_subtypes():
class D(Document):
a = IntField()
class ParentDoc(Document):
t = TupleField(DocumentField('D'))
e = EnumField(DocumentField('D'), D(a=1))
d = DictField(DocumentField('D'))
# test DictDoc
def test_dictdoc_contains():
t = T(i=1, retrieved_fields=[T.i, T.j])
assert 'i' in t
assert 'j' not in t
assert 's' not in t
assert 'noexist' not in t
assert t['i'] == 1
def test_dictdoc_set():
t = T(i=1, retrieved_fields=[T.i, T.j])
assert 'i' in t
t['i'] = 4
assert t.i == 4
def test_dictdoc_setdefault():
t = T(i=1, retrieved_fields=[T.i, T.j])
assert t.setdefault('i', 4) == 1
assert t.setdefault('j', 3) == 3
def test_set_dict_field():
class TestDict(Document):
data = DictField(AnythingField())
s = get_session()
s.clear_collection(TestDict)
td = TestDict()
td.data = {"foo": "bar", "baz": "qux"}
s.insert(td)
td = s.query(TestDict).one()
td.data = {"foo": "bar"}
s.insert(td)
td = s.query(TestDict).one()
assert td.data == {'foo':'bar'}, td.data
@raises(TypeError)
def test_multiple_id_fields():
class TestMultipleId(Document):
a = IntField(db_field='_id')
b = IntField(db_field='_id')
|
|
# Copyright 2014 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains a class to encapsulate ADB messages.
See the following in the Android source for more details:
https://android.googlesource.com/platform/system/core/+/master/adb/protocol.txt
The ADB transport layer deals in "messages", which consist of a 24 byte header
followed (optionally) by a payload. The header consists of 6 32 bit words which
are sent across the wire in little endian format:
struct message {
unsigned command; /* command identifier constant */
unsigned arg0; /* first argument */
unsigned arg1; /* second argument */
unsigned data_length; /* length of payload (0 is allowed) */
unsigned data_crc32; /* crc32 of data payload */
unsigned magic; /* command ^ 0xffffffff */
};
Receipt of an invalid message header, corrupt message payload, or an
unrecognized command MUST result in the closing of the remote connection. The
protocol depends on shared state and any break in the message stream will
result in state getting out of sync.
This class does not keep any of this state, but rather represents a single
message entity. See adb_protocol.py for the stateful components.
"""
# pytype: skip-file
import collections
import logging
import string
import struct
import threading
from openhtf.plugs.usb import usb_exceptions
from openhtf.util import timeouts
import six
_LOG = logging.getLogger(__name__)
def make_wire_commands(*ids):
"""Assemble the commands."""
cmd_to_wire = {
cmd: sum(ord(c) << (i * 8) for i, c in enumerate(cmd)) for cmd in ids
}
wire_to_cmd = {wire: cmd for cmd, wire in six.iteritems(cmd_to_wire)}
return cmd_to_wire, wire_to_cmd
class RawAdbMessage(
collections.namedtuple(
'RawAdbMessage',
['cmd', 'arg0', 'arg1', 'data_length', 'data_checksum', 'magic'])):
"""Helper class for handling the struct -> AdbMessage mapping."""
def to_adb_message(self, data):
"""Turn the data into an ADB message."""
message = AdbMessage(
AdbMessage.WIRE_TO_CMD.get(self.cmd), self.arg0, self.arg1, data)
if (len(data) != self.data_length or
message.data_crc32 != self.data_checksum):
raise usb_exceptions.AdbDataIntegrityError(
'%s (%s) received invalid data: %s' % (message, self, repr(data)))
return message
class AdbTransportAdapter(object):
"""Transport adapter for reading/writing AdbMessages to another transport.
This class handles the over-the-wire sending/receiving of AdbMessages, and
has a utility method for ignoring messages we don't care about. This is
a 'transport' in that it has Read/Write methods, but those methods only
work on AdbMessage instances, not arbitrary data, so it can't quite be
dropped in anywhere a Transport is used.
This class maintains its own Lock so that multiple threads can read
AdbMessages from the same underlying transport without stealing each
other's headers/data.
"""
def __init__(self, transport):
"""Create an AdbTransportAdapter that writes to/reads from 'transport'."""
self._transport = transport
self._reader_lock = threading.Lock()
self._writer_lock = threading.Lock()
def __str__(self):
trans = str(self._transport)
return '<%s: (%s)' % (type(self).__name__, trans[1:])
def close(self):
"""Close the connection."""
self._transport.close()
def write_message(self, message, timeout):
"""Send the given message over this transport.
Args:
message: The AdbMessage to send.
timeout: Use this timeout for the entire write operation, it should be an
instance of timeouts.PolledTimeout.
"""
with self._writer_lock:
self._transport.write(message.header, timeout.remaining_ms)
# Use any remaining time to send the data. Note that if we get this far,
# we always at least try to send the data (with a minimum of 10ms timeout)
# because we don't want the remote end to get out of sync because we sent
# a header but no data.
if timeout.has_expired():
_LOG.warning('Timed out between AdbMessage header and data, sending '
'data anyway with 10ms timeout')
timeout = timeouts.PolledTimeout.from_millis(10)
self._transport.write(message.data, timeout.remaining_ms)
def read_message(self, timeout):
"""Read an AdbMessage from this transport.
Args:
timeout: Timeout for the entire read operation, in the form of a
timeouts.PolledTimeout instance. Note that for packets with a data
payload, two USB reads are performed.
Returns:
The ADB message read from the device.
Raises:
UsbReadFailedError: There's an error during read, including timeout.
AdbProtocolError: A message is incorrectly formatted.
AdbTimeoutError: timeout is already expired, or expires before we read the
entire message, specifically between reading header and data packets.
"""
with self._reader_lock:
raw_header = self._transport.read(
struct.calcsize(AdbMessage.HEADER_STRUCT_FORMAT),
timeout.remaining_ms)
if not raw_header:
raise usb_exceptions.AdbProtocolError('Adb connection lost')
try:
raw_message = RawAdbMessage(
*struct.unpack(AdbMessage.HEADER_STRUCT_FORMAT, raw_header))
except struct.error as exception:
raise usb_exceptions.AdbProtocolError(
'Unable to unpack ADB command (%s): %s (%s)' %
(AdbMessage.HEADER_STRUCT_FORMAT, raw_header, exception))
if raw_message.data_length > 0:
if timeout.has_expired():
_LOG.warning('Timed out between AdbMessage header and data, reading '
'data anyway with 10ms timeout')
timeout = timeouts.PolledTimeout.from_millis(10)
data = self._transport.read(raw_message.data_length,
timeout.remaining_ms)
else:
data = ''
return raw_message.to_adb_message(data)
def read_until(self, expected_commands, timeout):
"""Read AdbMessages from this transport until we get an expected command.
The ADB protocol specifies that before a successful CNXN handshake, any
other packets must be ignored, so this method provides the ability to
ignore unwanted commands. It's primarily used during the initial
connection to the device. See Read() for more details, including more
exceptions that may be raised.
Args:
expected_commands: Iterable of expected command responses, like ('CNXN',
'AUTH').
timeout: timeouts.PolledTimeout object to use for timeout.
Returns:
The ADB message received that matched one of expected_commands.
Raises:
AdbProtocolError: If timeout expires between reads, this can happen
if we are getting spammed with unexpected commands.
"""
msg = timeouts.loop_until_timeout_or_valid(
timeout, lambda: self.read_message(timeout),
lambda m: m.command in expected_commands, 0)
if msg.command not in expected_commands:
raise usb_exceptions.AdbTimeoutError(
'Timed out establishing connection, waiting for: %s' %
expected_commands)
return msg
class DebugAdbTransportAdapter(AdbTransportAdapter):
"""Debug transport adapter that logs messages read/written."""
def __init__(self, transport):
super(DebugAdbTransportAdapter, self).__init__(transport)
self.messages = []
_LOG.debug('%s logging messages', self)
def close(self):
_LOG.debug('%s logged messages:', self)
for message in self.messages:
_LOG.debug(message)
super(DebugAdbTransportAdapter, self).close()
def read_message(self, timeout):
message = super(DebugAdbTransportAdapter, self).read_message(timeout)
self.messages.append('READING: %s' % message)
return message
def write_message(self, message, timeout):
self.messages.append('WRITING: %s' % message)
super(DebugAdbTransportAdapter, self).write_message(message, timeout)
class AdbMessage(object):
"""ADB Protocol message class.
This class encapsulates all host<->device communication for ADB. The
attributes of this class correspond roughly to the ADB message struct. The
'command' attribute of this class is a stringified version of the unsigned
command struct value, and is one of the values in ids ('SYNC', 'CNXN', 'AUTH',
etc). The arg0 and arg1 attributes have different meanings depending on
the command (see adb_protocol.py for more info).
This class stores the 'data' associated with a message, but some messages
have no data (data will default to ''). Additionally, reading/writing
messages to the wire results in two reads/writes because the data is actually
sent in a second USB transaction. This may have implications to transport
layers that aren't direct libusb reads/writes to a local device (ie, over
a network).
The 'header' attribute returns the over-the-wire format of the header for
this message. To send a message over the header, send its header, followed
by its data if it has any.
Attributes: header command arg0 arg1 data magic
"""
PRINTABLE_DATA = set(string.printable) - set(string.whitespace)
CMD_TO_WIRE, WIRE_TO_CMD = make_wire_commands('SYNC', 'CNXN', 'AUTH', 'OPEN',
'OKAY', 'CLSE', 'WRTE')
# An ADB message is 6 words in little-endian.
HEADER_STRUCT_FORMAT = '<6I'
def __init__(self, command, arg0=0, arg1=0, data=''):
if command not in self.CMD_TO_WIRE:
raise usb_exceptions.AdbProtocolError('Unrecognized ADB command: %s' %
command)
self._command = self.CMD_TO_WIRE[command]
self.arg0 = arg0
self.arg1 = arg1
self.data = data
self.magic = self._command ^ 0xFFFFFFFF
@property
def header(self):
"""The message header."""
return struct.pack(self.HEADER_STRUCT_FORMAT, self._command, self.arg0,
self.arg1, len(self.data), self.data_crc32, self.magic)
@property
def command(self):
"""The ADB command."""
return self.WIRE_TO_CMD[self._command]
def __str__(self):
return '<%s: %s(%s, %s): %s (%s bytes)>' % (
type(self).__name__, self.command, self.arg0, self.arg1, ''.join(
char if char in self.PRINTABLE_DATA else '.'
for char in self.data[:64]), len(self.data))
__repr__ = __str__
@property
def data_crc32(self):
"""Returns the sum of all the data bytes.
The "crc32" used by ADB is actually just a sum of all the bytes, but we
name this data_crc32 to be consistent with ADB.
"""
return sum([ord(x) for x in self.data]) & 0xFFFFFFFF
|
|
import os, time, datetime, requests, warnings, configparser
import pandas as pd
import numpy as np
import quandl
import concurrent.futures
from tqdm import tqdm
def getSingleStock(symbol, from_date, till_date):
repeat_times = 3
message = ""
for _ in range(repeat_times):
try:
data = quandl.get("HKEX/"+symbol, start_date=from_date, end_date=till_date)
data.index = pd.to_datetime(data.index)
return data, ""
except Exception as e:
message = ", fetch exception: " + str(e)
continue
else:
time.sleep(0.1)
return '', message
def getStockPublishDay(dir, symbol):
filename = dir + 'StockPublishDay.csv'
if os.path.exists(filename):
df = pd.read_csv(filename)
publishDay = df[df['Code'] == symbol]
if len(publishDay) == 1:
return publishDay['Date'].values[0]
return ''
def saveStockPublishDay(dir, symbol, date):
filename = dir + 'StockPublishDay.csv'
if os.path.exists(filename):
df = pd.read_csv(filename, index_col=["index"])
publishDate = df[df['Code'] == symbol]
if publishDate.empty:
df.loc[len(df)] = [symbol, date]
else:
df = pd.DataFrame(columns = ['Code', 'Date'])
df.index.name = 'index'
df.loc[len(df)] = [symbol, date]
df.to_csv(filename)
def judgeOpenDaysInRange(from_date, to_date):
holidays=["2017-01-01", "2017-01-02",
"2017-01-27", "2017-01-28", "2017-01-29", "2017-01-30", "2017-01-31", "2017-02-01", "2017-02-02",
"2017-04-02", "2017-04-03", "2017-04-04",
"2017-05-01",
"2017-05-28", "2017-05-29", "2017-05-30",
"2017-10-01", "2017-10-02", "2017-10-03", "2017-10-04", "2017-10-05","2017-10-06","2017-10-07","2017-10-08"]
#holidays = cal.holidays(from_date, to_date)
duedays = pd.bdate_range(from_date, to_date)
df = pd.DataFrame()
df['Date'] = duedays
df['Holiday'] = duedays.isin(holidays)
opendays = df[df['Holiday'] == False]
return opendays
def judgeNeedPreDownload(dir, symbol, from_date, to_date):
dateList = judgeOpenDaysInRange(from_date, to_date)
if len(dateList) > 0:
publishDay = pd.Timestamp(getStockPublishDay(dir, symbol))
lastDay = pd.Timestamp(dateList['Date'].index[-1])
if pd.isnull(publishDay) or lastDay > publishDay:
return True
return False
def judgeNeedPostDownload(from_date, to_date):
today = pd.Timestamp(datetime.datetime.now().strftime("%Y-%m-%d"))
start_date = pd.Timestamp(from_date)
end_date = pd.Timestamp(to_date)
if start_date >= today:
return False
if end_date > today:
to_date = today.strftime("%Y-%m-%d")
dateList = judgeOpenDaysInRange(from_date, to_date)
if len(dateList) > 0:
return True
return False
def updateSingleStockData(dir, symbol, from_date, till_date, force_check):
startTime = time.time()
message = ""
if len(symbol) == 0:
return startTime, message
now_date = pd.Timestamp((datetime.datetime.now()).strftime("%Y-%m-%d"))
end_date = pd.Timestamp(till_date)
filename = dir + symbol + '.csv'
try:
stockData = pd.read_csv(filename, index_col=["Date"])
except Exception as e:
#print(symbol, " read csv exception, ", e)
if str(e) == 'Index Date invalid':
stockData = pd.read_csv(filename,index_col=0)
stockData.index.name = 'Date'
stockData.sort_index(ascending=True, inplace=True)
stockData.to_csv(filename)
else:
stockData, message = getSingleStock(symbol, from_date, till_date)
if len(stockData) > 0:
stockData['last_update'] = now_date.strftime("%Y-%m-%d")
stockData.to_csv(filename)
message = message + ", database updated"
return startTime, message
modified = False
first_date = pd.Timestamp(stockData.index[0])
last_date = pd.Timestamp(stockData.index[-1])
if 'last_update' in stockData:
lastUpdateTime = pd.Timestamp(stockData['last_update'].iloc[0])
else:
lastUpdateTime = pd.Timestamp('1970-01-01')
updateOnce = now_date > lastUpdateTime
if end_date > last_date and (updateOnce or force_check):
to_date = (last_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if judgeNeedPostDownload(to_date, till_date):
message = message + ", download post data from " + to_date + " to " + till_date
moreStockData, tempMessage = getSingleStock(symbol, to_date, till_date)
message = message + tempMessage
if len(moreStockData) > 0:
modified = True
stockData = pd.concat([stockData, moreStockData])
stockData.index.name = 'date'
if modified:
stockData['last_update'] = now_date.strftime("%Y-%m-%d")
stockData = stockData[~stockData.index.duplicated(keep='first')]
stockData.sort_index(ascending=True, inplace=True)
stockData.to_csv(filename)
elif updateOnce:
stockData['last_update'] = now_date.strftime("%Y-%m-%d")
stockData = stockData[~stockData.index.duplicated(keep='first')]
stockData.sort_index(ascending=True, inplace=True)
stockData.to_csv(filename)
message = message + ", nothing updated"
else:
message = ""
return startTime, message
def getStocksList():
# https://www.quandl.com/api/v3/databases/HKEX/codes?api_key=X1xf_1YJLkT9mdxDj13v
# down the zip file above and unzip to the _share folder
# rename the stock code name from "HKEX/XXXX" to "XXXX"
Config = configparser.ConfigParser()
Config.read("../../config.ini")
dir = Config.get('Paths', 'STOCK_HK')
if os.path.exists(dir) == False:
os.makedirs(dir)
share_dir = dir + Config.get('Paths', 'CSV_SHARE')
if os.path.exists(share_dir) == False:
os.makedirs(share_dir)
filename = share_dir + 'HKEX-datasets-codes.csv'
if os.path.exists(filename):
listData = pd.read_csv(filename)
listData = listData[listData['Code'].astype(str).str.isdigit()]
return listData['Code'].values.tolist()
return []
def updateStockData_HK(symbols, from_date, till_date, force_check = False):
Config = configparser.ConfigParser()
Config.read("../../config.ini")
dir = Config.get('Paths', 'STOCK_HK')
quandl.ApiConfig.api_key = Config.get('Quandl', 'KEY')
if os.path.exists(dir) == False:
os.makedirs(dir)
stocklist = getStocksList()
for symbol in symbols:
if symbol not in stocklist:
stocklist.append(symbol)
symbols = stocklist
pbar = tqdm(total=len(symbols))
log_errors = []
log_update = []
# debug only
for symbol in symbols:
startTime, message = updateSingleStockData(dir, symbol, from_date, till_date, force_check)
outMessage = '%-*s fetched in: %.4s seconds' % (6, symbol, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
# with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
# # Start the load operations and mark each future with its URL
# future_to_stock = {executor.submit(updateSingleStockData, dir, symbol, from_date, till_date, force_check): symbol for symbol in symbols}
# for future in concurrent.futures.as_completed(future_to_stock):
# stock = future_to_stock[future]
# try:
# startTime, message = future.result()
# except Exception as exc:
# log_errors.append('%r generated an exception: %s' % (stock, exc))
# else:
# if len(message) > 0: log_update.append(message)
# outMessage = '%-*s fetched in: %.4s seconds' % (6, stock, (time.time() - startTime))
# pbar.set_description(outMessage)
# pbar.update(1)
pbar.close()
if len(log_errors) > 0:
print(log_errors)
return symbols
if __name__ == "__main__":
pd.set_option('precision', 3)
pd.set_option('display.width',1000)
warnings.filterwarnings('ignore', category=pd.io.pytables.PerformanceWarning)
now = datetime.datetime.now().strftime("%Y-%m-%d")
updateStockData_HK([], "1990-01-01", now, True)
|
|
"""
:mod:`psphere.client` - A client for communicating with a vSphere server
========================================================================
.. module:: client
The main module for accessing a vSphere server.
.. moduleauthor:: Jonathan Kinred <jonathan.kinred@gmail.com>
"""
# Copyright 2010 Jonathan Kinred
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import suds
import time
from urllib2 import URLError
from suds.plugin import MessagePlugin
from suds.transport import TransportError
from psphere import soap, ManagedObject
from psphere.config import _config_value
from psphere.errors import (ConfigError, ObjectNotFoundError, TaskFailedError,
NotLoggedInError)
from psphere.managedobjects import ServiceInstance, Task, classmapper
logger = logging.getLogger(__name__)
class Client(suds.client.Client):
"""A client for communicating with a VirtualCenter/ESX/ESXi server
>>> from psphere.client import Client
>>> Client = Client(server="esx.foo.com", username="me", password="pass")
:param server: The server of the server. e.g. https://esx.foo.com/sdk
:type server: str
:param username: The username to connect with
:type username: str
:param password: The password to connect with
:type password: str
:param wsdl_location: Whether to use the provided WSDL or load the server WSDL
:type wsdl_location: The string "local" (default) or "remote"
:param timeout: The timeout to use when connecting to the server
:type timeout: int (default=30)
:param plugins: The plugins classes that will be used to process messages
before send them to the web service
:type plugins: list of classes
"""
def __init__(self, server=None, username=None, password=None,
wsdl_location="local", timeout=30, plugins=[]):
self._logged_in = False
if server is None:
server = _config_value("general", "server")
if username is None:
username = _config_value("general", "username")
if password is None:
password = _config_value("general", "password")
if server is None:
raise ConfigError("server must be set in config file or Client()")
if username is None:
raise ConfigError("username must be set in config file or Client()")
if password is None:
raise ConfigError("password must be set in config file or Client()")
self.server = server
self.username = username
self.password = password
url = "https://%s/sdk" % self.server
if wsdl_location == "local":
current_path = os.path.abspath(os.path.dirname(__file__))
current_path = current_path.replace('\\', '/')
if not current_path.startswith('/') :
current_path = '/' + current_path
if current_path.endswith('/') :
current_path = current_path[:-1]
wsdl_uri = ("file://%s/wsdl/vimService.wsdl" % current_path)
elif wsdl_location == "remote":
wsdl_uri = url + "/vimService.wsdl"
else:
raise ValueError("wsdl_location must be \"local\" or \"remote\"")
# Init the base class
try:
# Add ExtraConfigPlugin to the plugins
plugins.append(ExtraConfigPlugin())
suds.client.Client.__init__(self, wsdl_uri, plugins=plugins)
except URLError:
logger.critical("Failed to connect to %s", self.server)
raise
except IOError:
logger.critical("Failed to load the local WSDL from %s", wsdl_uri)
raise
except TransportError:
logger.critical("Failed to load the remote WSDL from %s", wsdl_uri)
raise
self.options.transport.options.timeout = timeout
self.set_options(location=url)
mo_ref = soap.ManagedObjectReference("ServiceInstance",
"ServiceInstance")
self.si = ServiceInstance(mo_ref, self)
try:
self.sc = self.si.RetrieveServiceContent()
except URLError, e:
logger.critical("Failed to connect to %s" % self.server)
logger.critical("urllib2 said: %s" % e.reason)
raise
if self._logged_in is False:
self.login(self.username, self.password)
def login(self, username=None, password=None):
"""Login to a vSphere server.
>>> client.login(username='Administrator', password='strongpass')
:param username: The username to authenticate as.
:type username: str
:param password: The password to authenticate with.
:type password: str
"""
if username is None:
username = self.username
if password is None:
password = self.password
logger.debug("Logging into server")
self.sc.sessionManager.Login(userName=username, password=password)
self._logged_in = True
def logout(self):
"""Logout of a vSphere server."""
if self._logged_in is True:
self.si.flush_cache()
self.sc.sessionManager.Logout()
self._logged_in = False
def invoke(self, method, _this, **kwargs):
"""Invoke a method on the server.
>>> client.invoke('CurrentTime', client.si)
:param method: The method to invoke, as found in the SDK.
:type method: str
:param _this: The managed object reference against which to invoke \
the method.
:type _this: ManagedObject
:param kwargs: The arguments to pass to the method, as \
found in the SDK.
:type kwargs: TODO
"""
if (self._logged_in is False and
method not in ["Login", "RetrieveServiceContent"]):
logger.critical("Cannot exec %s unless logged in", method)
raise NotLoggedInError("Cannot exec %s unless logged in" % method)
for kwarg in kwargs:
kwargs[kwarg] = self._marshal(kwargs[kwarg])
result = getattr(self.service, method)(_this=_this, **kwargs)
if hasattr(result, '__iter__') is False:
logger.debug("Returning non-iterable result")
return result
# We must traverse the result and convert any ManagedObjectReference
# to a psphere class, this will then be lazy initialised on use
logger.debug(result.__class__)
logger.debug("Result: %s", result)
logger.debug("Length: %s", len(result))
if type(result) == list:
new_result = []
for item in result:
new_result.append(self._unmarshal(item))
else:
new_result = self._unmarshal(result)
logger.debug("Finished in invoke.")
#property = self.find_and_destroy(property)
#print result
# Return the modified result to the caller
return new_result
def _mor_to_pobject(self, mo_ref):
"""Converts a MOR to a psphere object."""
kls = classmapper(mo_ref._type)
new_object = kls(mo_ref, self)
return new_object
def _marshal(self, obj):
"""Walks an object and marshals any psphere object into MORs."""
logger.debug("Checking if %s needs to be marshalled", obj)
if isinstance(obj, ManagedObject):
logger.debug("obj is a psphere object, converting to MOR")
return obj._mo_ref
if isinstance(obj, list):
logger.debug("obj is a list, recursing it")
new_list = []
for item in obj:
new_list.append(self._marshal(item))
return new_list
if not isinstance(obj, suds.sudsobject.Object):
logger.debug("%s is not a sudsobject subclass, skipping", obj)
return obj
if hasattr(obj, '__iter__'):
logger.debug("obj is iterable, recursing it")
for (name, value) in obj:
setattr(obj, name, self._marshal(value))
return obj
# The obj has nothing that we want to marshal or traverse, return it
logger.debug("obj doesn't need to be marshalled")
return obj
def _unmarshal(self, obj):
"""Walks an object and unmarshals any MORs into psphere objects."""
if isinstance(obj, suds.sudsobject.Object) is False:
logger.debug("%s is not a suds instance, skipping", obj)
return obj
logger.debug("Processing:")
logger.debug(obj)
logger.debug("...with keylist:")
logger.debug(obj.__keylist__)
# If the obj that we're looking at has a _type key
# then create a class of that type and return it immediately
if "_type" in obj.__keylist__:
logger.debug("obj is a MOR, converting to psphere class")
return self._mor_to_pobject(obj)
new_object = obj.__class__()
for sub_obj in obj:
logger.debug("Looking at %s of type %s", sub_obj, type(sub_obj))
if isinstance(sub_obj[1], list):
new_embedded_objs = []
for emb_obj in sub_obj[1]:
new_emb_obj = self._unmarshal(emb_obj)
new_embedded_objs.append(new_emb_obj)
setattr(new_object, sub_obj[0], new_embedded_objs)
continue
if not issubclass(sub_obj[1].__class__, suds.sudsobject.Object):
logger.debug("%s is not a sudsobject subclass, skipping",
sub_obj[1].__class__)
setattr(new_object, sub_obj[0], sub_obj[1])
continue
logger.debug("Obj keylist: %s", sub_obj[1].__keylist__)
if "_type" in sub_obj[1].__keylist__:
logger.debug("Converting nested MOR to psphere class:")
logger.debug(sub_obj[1])
kls = classmapper(sub_obj[1]._type)
logger.debug("Setting %s.%s to %s",
new_object.__class__.__name__,
sub_obj[0],
sub_obj[1])
setattr(new_object, sub_obj[0], kls(sub_obj[1], self))
else:
logger.debug("Didn't find _type in:")
logger.debug(sub_obj[1])
setattr(new_object, sub_obj[0], self._unmarshal(sub_obj[1]))
return new_object
def create(self, type_, **kwargs):
"""Create a SOAP object of the requested type.
>>> client.create('VirtualE1000')
:param type_: The type of SOAP object to create.
:type type_: str
:param kwargs: TODO
:type kwargs: TODO
"""
obj = self.factory.create("ns0:%s" % type_)
for key, value in kwargs.items():
setattr(obj, key, value)
return obj
# Notes
# -----
# A view is a local, static representation of a managed object in
# the inventory. The view is not automatically synchronised with
# the server-side object and can therefore be out of date a moment
# after it is retrieved.
#
# Retrieval of only the properties you intend to use -- through
# the use of the properties parameter -- is considered best
# practise as the properties of some managed objects can be
# costly to retrieve.
def get_view(self, mo_ref, properties=None):
"""Get a view of a vSphere managed object.
:param mo_ref: The MOR to get a view of
:type mo_ref: ManagedObjectReference
:param properties: A list of properties to retrieve from the \
server
:type properties: list
:returns: A view representing the ManagedObjectReference.
:rtype: ManagedObject
"""
# This maps the mo_ref into a psphere class and then instantiates it
kls = classmapper(mo_ref._type)
view = kls(mo_ref, self)
# Update the requested properties of the instance
#view.update_view_data(properties=properties)
return view
def get_views(self, mo_refs, properties=None):
"""Get a list of local view's for multiple managed objects.
:param mo_refs: The list of ManagedObjectReference's that views are \
to be created for.
:type mo_refs: ManagedObjectReference
:param properties: The properties to retrieve in the views.
:type properties: list
:returns: A list of local instances representing the server-side \
managed objects.
:rtype: list of ManagedObject's
"""
property_specs = []
for mo_ref in mo_refs:
property_spec = self.create('PropertySpec')
property_spec.type = str(mo_ref._type)
if properties is None:
properties = []
else:
# Only retrieve the requested properties
if properties == "all":
property_spec.all = True
else:
property_spec.all = False
property_spec.pathSet = properties
property_specs.append(property_spec)
object_specs = []
for mo_ref in mo_refs:
object_spec = self.create('ObjectSpec')
object_spec.obj = mo_ref
object_specs.append(object_spec)
pfs = self.create('PropertyFilterSpec')
pfs.propSet = property_specs
pfs.objectSet = object_specs
object_contents = self.sc.propertyCollector.RetrieveProperties(
specSet=pfs)
views = []
for object_content in object_contents:
# Update the instance with the data in object_content
object_content.obj._set_view_data(object_content=object_content)
views.append(object_content.obj)
return views
def get_search_filter_spec(self, begin_entity, property_spec):
"""Build a PropertyFilterSpec capable of full inventory traversal.
By specifying all valid traversal specs we are creating a PFS that
can recursively select any object under the given entity.
:param begin_entity: The place in the MOB to start the search.
:type begin_entity: ManagedEntity
:param property_spec: TODO
:type property_spec: TODO
:returns: A PropertyFilterSpec, suitable for recursively searching \
under the given ManagedEntity.
:rtype: PropertyFilterSpec
"""
# The selection spec for additional objects we want to filter
ss_strings = ['resource_pool_traversal_spec',
'resource_pool_vm_traversal_spec',
'folder_traversal_spec',
'datacenter_host_traversal_spec',
'datacenter_vm_traversal_spec',
'compute_resource_rp_traversal_spec',
'compute_resource_host_traversal_spec',
'host_vm_traversal_spec']
# Create a selection spec for each of the strings specified above
selection_specs = [
self.create('SelectionSpec', name=ss_string)
for ss_string in ss_strings
]
# A traversal spec for deriving ResourcePool's from found VMs
rpts = self.create('TraversalSpec')
rpts.name = 'resource_pool_traversal_spec'
rpts.type = 'ResourcePool'
rpts.path = 'resourcePool'
rpts.selectSet = [selection_specs[0], selection_specs[1]]
# A traversal spec for deriving ResourcePool's from found VMs
rpvts = self.create('TraversalSpec')
rpvts.name = 'resource_pool_vm_traversal_spec'
rpvts.type = 'ResourcePool'
rpvts.path = 'vm'
crrts = self.create('TraversalSpec')
crrts.name = 'compute_resource_rp_traversal_spec'
crrts.type = 'ComputeResource'
crrts.path = 'resourcePool'
crrts.selectSet = [selection_specs[0], selection_specs[1]]
crhts = self.create('TraversalSpec')
crhts.name = 'compute_resource_host_traversal_spec'
crhts.type = 'ComputeResource'
crhts.path = 'host'
dhts = self.create('TraversalSpec')
dhts.name = 'datacenter_host_traversal_spec'
dhts.type = 'Datacenter'
dhts.path = 'hostFolder'
dhts.selectSet = [selection_specs[2]]
dvts = self.create('TraversalSpec')
dvts.name = 'datacenter_vm_traversal_spec'
dvts.type = 'Datacenter'
dvts.path = 'vmFolder'
dvts.selectSet = [selection_specs[2]]
hvts = self.create('TraversalSpec')
hvts.name = 'host_vm_traversal_spec'
hvts.type = 'HostSystem'
hvts.path = 'vm'
hvts.selectSet = [selection_specs[2]]
fts = self.create('TraversalSpec')
fts.name = 'folder_traversal_spec'
fts.type = 'Folder'
fts.path = 'childEntity'
fts.selectSet = [selection_specs[2], selection_specs[3],
selection_specs[4], selection_specs[5],
selection_specs[6], selection_specs[7],
selection_specs[1]]
obj_spec = self.create('ObjectSpec')
obj_spec.obj = begin_entity
obj_spec.selectSet = [fts, dvts, dhts, crhts, crrts,
rpts, hvts, rpvts]
pfs = self.create('PropertyFilterSpec')
pfs.propSet = [property_spec]
pfs.objectSet = [obj_spec]
return pfs
def invoke_task(self, method, **kwargs):
"""Execute a \*_Task method and wait for it to complete.
:param method: The \*_Task method to invoke.
:type method: str
:param kwargs: The arguments to pass to the method.
:type kwargs: TODO
"""
# Don't execute methods which don't return a Task object
if not method.endswith('_Task'):
logger.error('invoke_task can only be used for methods which '
'return a ManagedObjectReference to a Task.')
return None
task_mo_ref = self.invoke(method=method, **kwargs)
task = Task(task_mo_ref, self)
task.update_view_data(properties=['info'])
# TODO: This returns true when there is an error
while True:
if task.info.state == 'success':
return task
elif task.info.state == 'error':
# TODO: Handle error checking properly
raise TaskFailedError(task.info.error.localizedMessage)
# TODO: Implement progresscallbackfunc
# Sleep two seconds and then refresh the data from the server
time.sleep(2)
task.update_view_data(properties=['info'])
def find_entity_views(self, view_type, begin_entity=None, properties=None):
"""Find all ManagedEntity's of the requested type.
:param view_type: The type of ManagedEntity's to find.
:type view_type: str
:param begin_entity: The MOR to start searching for the entity. \
The default is to start the search at the root folder.
:type begin_entity: ManagedObjectReference or None
:returns: A list of ManagedEntity's
:rtype: list
"""
if properties is None:
properties = []
# Start the search at the root folder if no begin_entity was given
if not begin_entity:
begin_entity = self.sc.rootFolder._mo_ref
property_spec = self.create('PropertySpec')
property_spec.type = view_type
property_spec.all = False
property_spec.pathSet = properties
pfs = self.get_search_filter_spec(begin_entity, property_spec)
# Retrieve properties from server and update entity
obj_contents = self.sc.propertyCollector.RetrieveProperties(specSet=pfs)
views = []
for obj_content in obj_contents:
logger.debug("In find_entity_view with object of type %s",
obj_content.obj.__class__.__name__)
obj_content.obj.update_view_data(properties=properties)
views.append(obj_content.obj)
return views
def find_entity_view(self, view_type, begin_entity=None, filter={},
properties=None):
"""Find a ManagedEntity of the requested type.
Traverses the MOB looking for an entity matching the filter.
:param view_type: The type of ManagedEntity to find.
:type view_type: str
:param begin_entity: The MOR to start searching for the entity. \
The default is to start the search at the root folder.
:type begin_entity: ManagedObjectReference or None
:param filter: Key/value pairs to filter the results. The key is \
a valid parameter of the ManagedEntity type. The value is what \
that parameter should match.
:type filter: dict
:returns: If an entity is found, a ManagedEntity matching the search.
:rtype: ManagedEntity
"""
if properties is None:
properties = []
kls = classmapper(view_type)
# Start the search at the root folder if no begin_entity was given
if not begin_entity:
begin_entity = self.sc.rootFolder._mo_ref
logger.debug("Using %s", self.sc.rootFolder._mo_ref)
property_spec = self.create('PropertySpec')
property_spec.type = view_type
property_spec.all = False
property_spec.pathSet = filter.keys()
pfs = self.get_search_filter_spec(begin_entity, property_spec)
# Retrieve properties from server and update entity
#obj_contents = self.propertyCollector.RetrieveProperties(specSet=pfs)
obj_contents = self.sc.propertyCollector.RetrieveProperties(specSet=pfs)
# TODO: Implement filtering
if not filter:
logger.warning('No filter specified, returning first match.')
# If no filter is specified we just return the first item
# in the list of returned objects
logger.debug("Creating class in find_entity_view (filter)")
view = kls(obj_contents[0].obj, self)
logger.debug("Completed creating class in find_entity_view (filter)")
#view.update_view_data(properties)
return view
matched = False
# Iterate through obj_contents retrieved
for obj_content in obj_contents:
# If there are is no propSet, skip this one
if not obj_content.propSet:
continue
matches = 0
# Iterate through each property in the set
for prop in obj_content.propSet:
for key in filter.keys():
# If the property name is in the defined filter
if prop.name == key:
# ...and it matches the value specified
# TODO: Regex this?
if prop.val == filter[prop.name]:
# We've found a match
matches += 1
else:
break
else:
continue
if matches == len(filter):
filtered_obj_content = obj_content
matched = True
break
else:
continue
if matched is not True:
# There were no matches
raise ObjectNotFoundError("No matching objects for filter")
logger.debug("Creating class in find_entity_view")
view = kls(filtered_obj_content.obj._mo_ref, self)
logger.debug("Completed creating class in find_entity_view")
#view.update_view_data(properties=properties)
return view
class ExtraConfigPlugin(MessagePlugin):
def addAttributeForValue(self, node):
if node.parent.name == 'extraConfig' and node.name == 'value':
node.set('xsi:type', 'xsd:string')
def marshalled(self, context):
context.envelope.walk(self.addAttributeForValue)
|
|
#!/usr/bin/env python
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
# Jun Yang <junyang4711@gmail.com>
#
import time
import numpy
from pyscf import lib
from pyscf.lib import logger
#einsum = numpy.einsum
einsum = lib.einsum
def _gamma1_intermediates(mycc, t1, t2, l1, l2):
doo =-einsum('ie,je->ij', l1, t1)
doo -= einsum('imef,jmef->ij', l2, t2) * .5
dvv = einsum('ma,mb->ab', t1, l1)
dvv += einsum('mnea,mneb->ab', t2, l2) * .5
xt1 = einsum('mnef,inef->mi', l2, t2) * .5
xt2 = einsum('mnfa,mnfe->ae', t2, l2) * .5
xt2 += einsum('ma,me->ae', t1, l1)
dvo = einsum('imae,me->ai', t2, l1)
dvo -= einsum('mi,ma->ai', xt1, t1)
dvo -= einsum('ie,ae->ai', t1, xt2)
dvo += t1.T
dov = l1
return doo, dov, dvo, dvv
# gamma2 intermediates in Chemist's notation
# When computing intermediates, the convention
# dm2[q,p,s,r] = <p^\dagger r^\dagger s q> is assumed in this function.
# It changes to dm2[p,q,r,s] = <p^\dagger r^\dagger s q> in _make_rdm2
def _gamma2_intermediates(mycc, t1, t2, l1, l2):
tau = t2 + einsum('ia,jb->ijab', t1, t1) * 2
miajb = einsum('ikac,kjcb->iajb', l2, t2)
goovv = 0.25 * (l2.conj() + tau)
tmp = einsum('kc,kica->ia', l1, t2)
goovv += einsum('ia,jb->ijab', tmp, t1)
tmp = einsum('kc,kb->cb', l1, t1)
goovv += einsum('cb,ijca->ijab', tmp, t2) * .5
tmp = einsum('kc,jc->kj', l1, t1)
goovv += einsum('kiab,kj->ijab', tau, tmp) * .5
tmp = numpy.einsum('ldjd->lj', miajb)
goovv -= einsum('lj,liba->ijab', tmp, tau) * .25
tmp = numpy.einsum('ldlb->db', miajb)
goovv -= einsum('db,jida->ijab', tmp, tau) * .25
goovv -= einsum('ldia,ljbd->ijab', miajb, tau) * .5
tmp = einsum('klcd,ijcd->ijkl', l2, tau) * .25**2
goovv += einsum('ijkl,klab->ijab', tmp, tau)
goovv = goovv.conj()
gvvvv = einsum('ijab,ijcd->abcd', tau, l2) * 0.125
goooo = einsum('klab,ijab->klij', l2, tau) * 0.125
gooov = einsum('jkba,ib->jkia', tau, l1) * -0.25
gooov += einsum('iljk,la->jkia', goooo, t1)
tmp = numpy.einsum('icjc->ij', miajb) * .25
gooov -= einsum('ij,ka->jkia', tmp, t1)
gooov += einsum('icja,kc->jkia', miajb, t1) * .5
gooov = gooov.conj()
gooov += einsum('jkab,ib->jkia', l2, t1) * .25
govvo = einsum('ia,jb->ibaj', l1, t1)
govvo += numpy.einsum('iajb->ibaj', miajb)
govvo -= einsum('ikac,jc,kb->ibaj', l2, t1, t1)
govvv = einsum('ja,ijcb->iacb', l1, tau) * .25
govvv += einsum('bcad,id->iabc', gvvvv, t1)
tmp = numpy.einsum('kakb->ab', miajb) * .25
govvv += einsum('ab,ic->iacb', tmp, t1)
govvv += einsum('kaib,kc->iabc', miajb, t1) * .5
govvv = govvv.conj()
govvv += einsum('ijbc,ja->iabc', l2, t1) * .25
dovov = goovv.transpose(0,2,1,3) - goovv.transpose(0,3,1,2)
dvvvv = gvvvv.transpose(0,2,1,3) - gvvvv.transpose(0,3,1,2)
doooo = goooo.transpose(0,2,1,3) - goooo.transpose(0,3,1,2)
dovvv = govvv.transpose(0,2,1,3) - govvv.transpose(0,3,1,2)
dooov = gooov.transpose(0,2,1,3) - gooov.transpose(1,2,0,3)
dovvo = govvo.transpose(0,2,1,3)
dovov =(dovov + dovov.transpose(2,3,0,1)) * .5
dvvvv = dvvvv + dvvvv.transpose(1,0,3,2).conj()
doooo = doooo + doooo.transpose(1,0,3,2).conj()
dovvo =(dovvo + dovvo.transpose(3,2,1,0).conj()) * .5
doovv = None # = -dovvo.transpose(0,3,2,1)
dvvov = None
return (dovov, dvvvv, doooo, doovv, dovvo, dvvov, dovvv, dooov)
def make_rdm1(mycc, t1, t2, l1, l2, ao_repr=False):
r'''
One-particle density matrix in the molecular spin-orbital representation
(the occupied-virtual blocks from the orbital response contribution are
not included).
dm1[p,q] = <q^\dagger p> (p,q are spin-orbitals)
The convention of 1-pdm is based on McWeeney's book, Eq (5.4.20).
The contraction between 1-particle Hamiltonian and rdm1 is
E = einsum('pq,qp', h1, rdm1)
'''
d1 = _gamma1_intermediates(mycc, t1, t2, l1, l2)
return _make_rdm1(mycc, d1, with_frozen=True, ao_repr=ao_repr)
def make_rdm2(mycc, t1, t2, l1, l2, ao_repr=False):
r'''
Two-particle density matrix in the molecular spin-orbital representation
dm2[p,q,r,s] = <p^\dagger r^\dagger s q>
where p,q,r,s are spin-orbitals. p,q correspond to one particle and r,s
correspond to another particle. The contraction between ERIs (in
Chemist's notation) and rdm2 is
E = einsum('pqrs,pqrs', eri, rdm2)
'''
d1 = _gamma1_intermediates(mycc, t1, t2, l1, l2)
d2 = _gamma2_intermediates(mycc, t1, t2, l1, l2)
return _make_rdm2(mycc, d1, d2, with_dm1=True, with_frozen=True,
ao_repr=ao_repr)
def _make_rdm1(mycc, d1, with_frozen=True, ao_repr=False):
r'''
One-particle density matrix in the molecular spin-orbital representation
(the occupied-virtual blocks from the orbital response contribution are
not included).
dm1[p,q] = <q^\dagger p> (p,q are spin-orbitals)
The convention of 1-pdm is based on McWeeney's book, Eq (5.4.20).
The contraction between 1-particle Hamiltonian and rdm1 is
E = einsum('pq,qp', h1, rdm1)
'''
doo, dov, dvo, dvv = d1
nocc, nvir = dov.shape
nmo = nocc + nvir
dm1 = numpy.empty((nmo,nmo), dtype=doo.dtype)
dm1[:nocc,:nocc] = doo + doo.conj().T
dm1[:nocc,nocc:] = dov + dvo.conj().T
dm1[nocc:,:nocc] = dm1[:nocc,nocc:].conj().T
dm1[nocc:,nocc:] = dvv + dvv.conj().T
dm1 *= .5
dm1[numpy.diag_indices(nocc)] += 1
if with_frozen and mycc.frozen is not None:
nmo = mycc.mo_occ.size
nocc = numpy.count_nonzero(mycc.mo_occ > 0)
rdm1 = numpy.zeros((nmo,nmo), dtype=dm1.dtype)
rdm1[numpy.diag_indices(nocc)] = 1
moidx = numpy.where(mycc.get_frozen_mask())[0]
rdm1[moidx[:,None],moidx] = dm1
dm1 = rdm1
if ao_repr:
mo = mycc.mo_coeff
dm1 = lib.einsum('pi,ij,qj->pq', mo, dm1, mo.conj())
return dm1
def _make_rdm2(mycc, d1, d2, with_dm1=True, with_frozen=True, ao_repr=False):
r'''
dm2[p,q,r,s] = <p^\dagger r^\dagger s q>
Note the contraction between ERIs (in Chemist's notation) and rdm2 is
E = einsum('pqrs,pqrs', eri, rdm2)
'''
dovov, dvvvv, doooo, doovv, dovvo, dvvov, dovvv, dooov = d2
nocc, nvir = dovov.shape[:2]
nmo = nocc + nvir
dm2 = numpy.empty((nmo,nmo,nmo,nmo), dtype=doooo.dtype)
dovov = numpy.asarray(dovov)
dm2[:nocc,nocc:,:nocc,nocc:] = dovov
dm2[nocc:,:nocc,nocc:,:nocc] = dm2[:nocc,nocc:,:nocc,nocc:].transpose(1,0,3,2).conj()
dovov = None
dovvo = numpy.asarray(dovvo)
dm2[:nocc,:nocc,nocc:,nocc:] =-dovvo.transpose(0,3,2,1)
dm2[nocc:,nocc:,:nocc,:nocc] =-dovvo.transpose(2,1,0,3)
dm2[:nocc,nocc:,nocc:,:nocc] = dovvo
dm2[nocc:,:nocc,:nocc,nocc:] = dovvo.transpose(1,0,3,2).conj()
dovvo = None
dm2[nocc:,nocc:,nocc:,nocc:] = dvvvv
dm2[:nocc,:nocc,:nocc,:nocc] = doooo
dovvv = numpy.asarray(dovvv)
dm2[:nocc,nocc:,nocc:,nocc:] = dovvv
dm2[nocc:,nocc:,:nocc,nocc:] = dovvv.transpose(2,3,0,1)
dm2[nocc:,nocc:,nocc:,:nocc] = dovvv.transpose(3,2,1,0).conj()
dm2[nocc:,:nocc,nocc:,nocc:] = dovvv.transpose(1,0,3,2).conj()
dovvv = None
dooov = numpy.asarray(dooov)
dm2[:nocc,:nocc,:nocc,nocc:] = dooov
dm2[:nocc,nocc:,:nocc,:nocc] = dooov.transpose(2,3,0,1)
dm2[:nocc,:nocc,nocc:,:nocc] = dooov.transpose(1,0,3,2).conj()
dm2[nocc:,:nocc,:nocc,:nocc] = dooov.transpose(3,2,1,0).conj()
if with_frozen and mycc.frozen is not None:
nmo, nmo0 = mycc.mo_occ.size, nmo
nocc = numpy.count_nonzero(mycc.mo_occ > 0)
rdm2 = numpy.zeros((nmo,nmo,nmo,nmo), dtype=dm2.dtype)
moidx = numpy.where(mycc.get_frozen_mask())[0]
idx = (moidx.reshape(-1,1) * nmo + moidx).ravel()
lib.takebak_2d(rdm2.reshape(nmo**2,nmo**2),
dm2.reshape(nmo0**2,nmo0**2), idx, idx)
dm2 = rdm2
if with_dm1:
dm1 = _make_rdm1(mycc, d1, with_frozen)
dm1[numpy.diag_indices(nocc)] -= 1
for i in range(nocc):
# Be careful with the convention of dm1 and the transpose of dm2 at the end
dm2[i,i,:,:] += dm1
dm2[:,:,i,i] += dm1
dm2[:,i,i,:] -= dm1
dm2[i,:,:,i] -= dm1.T
for i in range(nocc):
for j in range(nocc):
dm2[i,i,j,j] += 1
dm2[i,j,j,i] -= 1
# dm2 was computed as dm2[p,q,r,s] = < p^\dagger r^\dagger s q > in the
# above. Transposing it so that it be contracted with ERIs (in Chemist's
# notation):
# E = einsum('pqrs,pqrs', eri, rdm2)
dm2 = dm2.transpose(1,0,3,2)
if ao_repr:
from pyscf.cc import ccsd_rdm
dm2 = ccsd_rdm._rdm2_mo2ao(dm2, mycc.mo_coeff)
return dm2
if __name__ == '__main__':
from functools import reduce
from pyscf import gto
from pyscf import scf
from pyscf import ao2mo
from pyscf.cc import gccsd
from pyscf.cc import addons
mol = gto.Mole()
mol.atom = [
[8 , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]]
mol.basis = '631g'
mol.spin = 2
mol.build()
mf = scf.UHF(mol).run(conv_tol=1.)
mf = scf.addons.convert_to_ghf(mf)
mycc = gccsd.GCCSD(mf)
ecc, t1, t2 = mycc.kernel()
l1, l2 = mycc.solve_lambda()
dm1 = make_rdm1(mycc, t1, t2, l1, l2)
dm2 = make_rdm2(mycc, t1, t2, l1, l2)
nao = mol.nao_nr()
mo_a = mf.mo_coeff[:nao]
mo_b = mf.mo_coeff[nao:]
nmo = mo_a.shape[1]
eri = ao2mo.kernel(mf._eri, mo_a+mo_b, compact=False).reshape([nmo]*4)
orbspin = mf.mo_coeff.orbspin
sym_forbid = (orbspin[:,None] != orbspin)
eri[sym_forbid,:,:] = 0
eri[:,:,sym_forbid] = 0
hcore = scf.RHF(mol).get_hcore()
h1 = reduce(numpy.dot, (mo_a.T.conj(), hcore, mo_a))
h1+= reduce(numpy.dot, (mo_b.T.conj(), hcore, mo_b))
e1 = numpy.einsum('ij,ji', h1, dm1)
e1+= numpy.einsum('ijkl,ijkl', eri, dm2) * .5
e1+= mol.energy_nuc()
print(e1 - mycc.e_tot)
#TODO: test 1pdm, 2pdm against FCI
|
|
# Copyright (c) 2018 Intel, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os_resource_classes as orc
import os_traits as ost
from nova import conf
from nova.db import constants as db_const
from nova import test
from nova.tests.functional.libvirt import integrated_helpers
from nova.tests.unit.virt.libvirt import fakelibvirt
from nova.virt.libvirt.host import SEV_KERNEL_PARAM_FILE
CONF = conf.CONF
class LibvirtReportTraitsTestBase(
integrated_helpers.LibvirtProviderUsageBaseTestCase):
pass
def assertMemEncryptionSlotsEqual(self, slots):
inventory = self._get_provider_inventory(self.host_uuid)
if slots == 0:
self.assertNotIn(orc.MEM_ENCRYPTION_CONTEXT, inventory)
else:
self.assertEqual(
inventory[orc.MEM_ENCRYPTION_CONTEXT],
{
'total': slots,
'min_unit': 1,
'max_unit': 1,
'step_size': 1,
'allocation_ratio': 1.0,
'reserved': 0,
}
)
class LibvirtReportTraitsTests(LibvirtReportTraitsTestBase):
# These must match the capabilities in
# nova.virt.libvirt.driver.LibvirtDriver.capabilities
expected_libvirt_driver_capability_traits = set([
trait for trait in [
ost.COMPUTE_ACCELERATORS,
ost.COMPUTE_DEVICE_TAGGING,
ost.COMPUTE_NET_ATTACH_INTERFACE,
ost.COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,
ost.COMPUTE_VOLUME_ATTACH_WITH_TAG,
ost.COMPUTE_VOLUME_EXTEND,
ost.COMPUTE_VOLUME_MULTI_ATTACH,
ost.COMPUTE_TRUSTED_CERTS,
ost.COMPUTE_IMAGE_TYPE_AKI,
ost.COMPUTE_IMAGE_TYPE_AMI,
ost.COMPUTE_IMAGE_TYPE_ARI,
ost.COMPUTE_IMAGE_TYPE_ISO,
ost.COMPUTE_IMAGE_TYPE_QCOW2,
ost.COMPUTE_IMAGE_TYPE_RAW,
ost.COMPUTE_RESCUE_BFV,
]
])
def test_report_cpu_traits(self):
self.assertEqual([], self._get_all_providers())
self.start_compute()
# Test CPU traits reported on initial node startup, these specific
# trait values are coming from fakelibvirt's baselineCPU result.
# COMPUTE_NODE is always set on the compute node provider.
traits = self._get_provider_traits(self.host_uuid)
for trait in (
'HW_CPU_X86_VMX', 'HW_CPU_X86_INTEL_VMX', 'HW_CPU_X86_AESNI',
'COMPUTE_NODE',
):
self.assertIn(trait, traits)
self._create_trait('CUSTOM_TRAITS')
new_traits = ['CUSTOM_TRAITS', 'HW_CPU_X86_AVX']
self._set_provider_traits(self.host_uuid, new_traits)
# The above is an out-of-band placement operation, as if the operator
# used the CLI. So now we have to "SIGHUP the compute process" to clear
# the report client cache so the subsequent update picks up the change.
self.compute.manager.reset()
self._run_periodics()
# HW_CPU_X86_AVX is filtered out because nova-compute owns CPU traits
# and it's not in the baseline for the host.
traits = set(self._get_provider_traits(self.host_uuid))
expected_traits = self.expected_libvirt_driver_capability_traits.union(
[
'HW_CPU_X86_VMX',
'HW_CPU_X86_INTEL_VMX',
'HW_CPU_X86_AESNI',
'CUSTOM_TRAITS',
# The periodic restored the COMPUTE_NODE trait.
'COMPUTE_NODE',
])
for trait in expected_traits:
self.assertIn(trait, traits)
class LibvirtReportNoSevTraitsTests(LibvirtReportTraitsTestBase):
STUB_INIT_HOST = False
@test.patch_exists(SEV_KERNEL_PARAM_FILE, False)
def setUp(self):
super(LibvirtReportNoSevTraitsTests, self).setUp()
self.start_compute()
def test_sev_trait_off_on(self):
"""Test that the compute service reports the SEV trait in the list of
global traits, but doesn't immediately register it on the
compute host resource provider in the placement API, due to
the kvm-amd kernel module's sev parameter file being (mocked
as) absent.
Then test that if the SEV capability appears (again via
mocking), after a restart of the compute service, the trait
gets registered on the compute host.
Also test that on both occasions, the inventory of the
MEM_ENCRYPTION_CONTEXT resource class on the compute host
corresponds to the absence or presence of the SEV capability.
"""
self.assertFalse(self.compute.driver._host.supports_amd_sev)
sev_trait = ost.HW_CPU_X86_AMD_SEV
global_traits = self._get_all_traits()
self.assertIn(sev_trait, global_traits)
traits = self._get_provider_traits(self.host_uuid)
self.assertNotIn(sev_trait, traits)
self.assertMemEncryptionSlotsEqual(0)
# Now simulate the host gaining SEV functionality. Here we
# simulate a kernel update or reconfiguration which causes the
# kvm-amd kernel module's "sev" parameter to become available
# and set to 1, however it could also happen via a libvirt
# upgrade, for instance.
sev_features = \
fakelibvirt.virConnect._domain_capability_features_with_SEV
with test.nested(
self.patch_exists(SEV_KERNEL_PARAM_FILE, True),
self.patch_open(SEV_KERNEL_PARAM_FILE, "1\n"),
mock.patch.object(fakelibvirt.virConnect,
'_domain_capability_features',
new=sev_features)
) as (mock_exists, mock_open, mock_features):
# Retrigger the detection code. In the real world this
# would be a restart of the compute service.
# As we are changing the domain caps we need to clear the
# cache in the host object.
self.compute.driver._host._domain_caps = None
self.compute.driver._host._set_amd_sev_support()
self.assertTrue(self.compute.driver._host.supports_amd_sev)
mock_exists.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
mock_open.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
# However it won't disappear in the provider tree and get synced
# back to placement until we force a reinventory:
self.compute.manager.reset()
# reset cached traits so they are recalculated.
self.compute.driver._static_traits = None
self._run_periodics()
traits = self._get_provider_traits(self.host_uuid)
self.assertIn(sev_trait, traits)
# Sanity check that we've still got the trait globally.
self.assertIn(sev_trait, self._get_all_traits())
self.assertMemEncryptionSlotsEqual(db_const.MAX_INT)
class LibvirtReportSevTraitsTests(LibvirtReportTraitsTestBase):
STUB_INIT_HOST = False
@test.patch_exists(SEV_KERNEL_PARAM_FILE, True)
@test.patch_open(SEV_KERNEL_PARAM_FILE, "1\n")
@mock.patch.object(
fakelibvirt.virConnect, '_domain_capability_features',
new=fakelibvirt.virConnect._domain_capability_features_with_SEV)
def setUp(self):
super(LibvirtReportSevTraitsTests, self).setUp()
self.flags(num_memory_encrypted_guests=16, group='libvirt')
self.start_compute()
def test_sev_trait_on_off(self):
"""Test that the compute service reports the SEV trait in the list of
global traits, and immediately registers it on the compute
host resource provider in the placement API, due to the SEV
capability being (mocked as) present.
Then test that if the SEV capability disappears (again via
mocking), after a restart of the compute service, the trait
gets removed from the compute host.
Also test that on both occasions, the inventory of the
MEM_ENCRYPTION_CONTEXT resource class on the compute host
corresponds to the absence or presence of the SEV capability.
"""
self.assertTrue(self.compute.driver._host.supports_amd_sev)
sev_trait = ost.HW_CPU_X86_AMD_SEV
global_traits = self._get_all_traits()
self.assertIn(sev_trait, global_traits)
traits = self._get_provider_traits(self.host_uuid)
self.assertIn(sev_trait, traits)
self.assertMemEncryptionSlotsEqual(16)
# Now simulate the host losing SEV functionality. Here we
# simulate a kernel downgrade or reconfiguration which causes
# the kvm-amd kernel module's "sev" parameter to become
# unavailable, however it could also happen via a libvirt
# downgrade, for instance.
with self.patch_exists(SEV_KERNEL_PARAM_FILE, False) as mock_exists:
# Retrigger the detection code. In the real world this
# would be a restart of the compute service.
self.compute.driver._host._domain_caps = None
self.compute.driver._host._set_amd_sev_support()
self.assertFalse(self.compute.driver._host.supports_amd_sev)
mock_exists.assert_has_calls([mock.call(SEV_KERNEL_PARAM_FILE)])
# However it won't disappear in the provider tree and get synced
# back to placement until we force a reinventory:
self.compute.manager.reset()
# reset cached traits so they are recalculated.
self.compute.driver._static_traits = None
self._run_periodics()
traits = self._get_provider_traits(self.host_uuid)
self.assertNotIn(sev_trait, traits)
# Sanity check that we've still got the trait globally.
self.assertIn(sev_trait, self._get_all_traits())
self.assertMemEncryptionSlotsEqual(0)
|
|
import numpy as np
import os
def XMLtoCSV(XMLinput):
"""
This function takes as an input the XML file that comes out of the electronic structure calculations and transforms
it into 2 CSV files. The first one is the 'X part' of the data. It contains a sample per line. Each line has a format:
atom label (string), coordinate x (float), coordinate y (float), coordinate z (float), ... for each atom in the system.
The second file contains the 'Y part' of the data. It has a sample per line with the energy of each sample (float).
:XMLinput: an XML file obtained from grid electronic structure calculations
"""
# These are the output files
fileX = open('X.csv', 'w')
fileY = open('Y.csv', 'w')
# This is the input file
inputFile = open(XMLinput, 'r')
# The information of the molecule is contained in the block <cml:molecule>...<cml:molecule>.
# The atom xyz coordinates, the labels and the energy have to be retrieved
# Each configuration corresponds to one line in the CSV files
for line in inputFile:
data = []
if "<cml:molecule>" in line:
for i in range(3):
line = inputFile.next()
while "</cml:atomArray>" not in line:
indexLab = line.find("elementType=")
indexX = line.find("x3=")
indexY = line.find("y3=")
indexYend = line.find("\n")
indexZ = line.find("z3=")
indexZend = line.find("/>")
if indexLab >= 0:
data.append(line[indexLab + 13])
data.append(line[indexX + 4: indexY - 2])
data.append(line[indexY + 4: indexYend - 1])
if indexZ >= 0:
data.append(line[indexZ + 4: indexZend - 1])
line = inputFile.next()
for i in range(len(data)):
fileX.write(data[i])
fileX.write(",")
fileX.write("\n")
if '<property name="Energy"' in line:
line = inputFile.next()
indexEn1 = line.find("value")
indexEn2 = line.find("/>")
energy = float(line[indexEn1 + 7:indexEn2 - 1])
fileY.write(str(energy) + "\n")
return None
def XYZtoCSV(XYZinput):
"""
This function takes as an input the XYZ file that comes out of VR and transforms it into 2 CSV files. The first one
is the 'X part' of the data. It contains a sample per line. Each line has a format:
atom label (string), coordinate x (float), coordinate y (float), coordinate z (float), ... for each atom in the system.
The second file contains the 'Y part' of the data. It has a sample per line with the energy of each sample (float).
Note: This is specific to a file containing C, H, N as the atoms.
:XMLinput: an XML file obtained from grid electronic structure calculations
"""
# These are the output files
fileX = open('X.csv', 'w')
fileY = open('Y.csv', 'w')
# This is the input file
inputFile = open(XYZinput, 'r')
isFirstLine = True
n_atoms = 0
for line in inputFile:
if isFirstLine:
n_atoms = int(line)
isFirstLine = False
index1 = line.find("Energy")
if index1 >= 0:
index2 = line.find("(hartree)")
energy = float(line[index1+8:index2-1])
fileY.write(str(energy))
fileY.write("\n")
if line[0] == "C" or line[0] == "H":
line = line.replace("\n", "")
line = line.replace("\t",",")
fileX.write(line)
fileX.write(",")
if line[0] == "N":
line = line.replace("\n", "")
line = line.replace("\t", ",")
fileX.write(line)
fileX.write("\n")
def extractMolpro(MolproInput):
"""
This function takes one Molpro .out file and returns the geometry, the energy and the partial charges on the atoms.
:MolproInput: the molpro .out file (string)
:return:
:rawData: List of strings with atom label and atom coordinates - example ['C', '0.1, '0.1', '0.1', ...]
:ene: Value of the energy (string)
:partialCh: List of strings with atom label and its partial charge - example ['C', '6.36', 'H', ...]
"""
# This is the input file
inputFile = open(MolproInput, 'r')
# This will contain the data
rawData = []
ene = "0"
partialCh = []
for line in inputFile:
# The geometry is found on the line after the keyword "geometry={"
if "geometry={" in line:
for i in range(7):
line = inputFile.next()
line = line.strip()
lineSplit = line.split(" ")
for j in range(len(lineSplit)):
rawData.append(lineSplit[j])
# The energy is found two lines after the keyword "Final beta occupancy:"
elif "Final beta occupancy:" in line:
line = inputFile.next()
line = inputFile.next()
line = line.strip()
ene = line[len("!RKS STATE 1.1 Energy"):].strip()
elif "Total charge composition:" in line:
line = inputFile.next()
line = inputFile.next()
for i in range(7):
line = inputFile.next()
lineSplit = line.rstrip().split(" ")
lineSplit = filter(None, lineSplit)
partialCh.append(lineSplit[1])
partialCh.append(lineSplit[-2])
return rawData, ene, partialCh
def list_files(dir, key):
"""
This function walks through a directory and makes a list of the files that have a name containing a particular string
:dir: path to the directory to explore
:key: string to look for in file names
:return: list of files containing "key" in their filename
"""
r = [] # List of files to be joined together
subdirs = [x[0] for x in os.walk(dir)]
for subdir in subdirs:
files = os.walk(subdir).next()[2]
for file in files:
isTrajectory = file.find(key)
if isTrajectory >= 0:
r.append(subdir + "/" + file)
return r
def MolproToCSV(directory, key):
"""
This function extracts all the geometries and energies from Molpro .out files contained in a particular directory.
Only the files that have a particular string in their filename will be read. The geometries are then written to X.csv
where each line is a different geometry. The energies are written to Y.csv where each line is the energy of a
different geometry. The partial charges are written to Q.csv
:directory: path to the directory containing the Molpro .out files (string)
:key: string to look for in the file names (string)
"""
# These are the output files
fileX = open('X.csv', 'w')
fileY = open('Y.csv', 'w')
fileZ = open('Q.csv', 'w')
# Obtaining the list of files to mine
fileList = list_files(directory, key)
# Iterating over all the files
for item in fileList:
# Extracting the geometry and the energy from a Molpro out file
geom, ene, partialCh = extractMolpro(item)
if len(geom) != 28 or ene == "0" or len(partialCh) != 14:
print "The following file couldn't be read properly:"
print item + "\n"
continue
for i in range(len(geom)):
fileX.write(geom[i])
fileX.write(",")
fileX.write("\n")
fileY.write(ene + "\n")
for i in range(len(partialCh)):
fileZ.write(partialCh[i])
fileZ.write(",")
fileZ.write("\n")
def loadX(fileX):
"""
This function takes a .csv file that contains on each line a different configuration of the system in the format
"C,0.1,0.1,0.1,H,0.2,0.2,0.2..." and returns a list of lists with the configurations of the system.
The following functions generate .csv files in the correct format:
1. XMLtoCSV
2. XYZtoCSSV
3. MolproToCSV
The function returns a list of lists where each element is a different configuration for a molecule. For example,
for a sample with 3 hydrogen atoms the matrix returned will be:
``[['H',-0.5,0.0,0.0,'H',0.5,0.0,0.0], ['H',-0.3,0.0,0.0,'H',0.3,0.0,0.0], ['H',-0.7,0.0,0.0,'H',0.7,0.0,0.0]]``
:fileX: The .csv file containing the geometries of the system (string)
:return: a list of lists with characters and floats.
"""
if fileX[-4:] != ".csv":
print "Error: the file extension is not .csv"
quit()
inputFile = open(fileX, 'r')
# Creating an empty matrix of the right size
matrixX = []
for line in inputFile:
line = line.replace(",\n","")
listLine = line.split(",")
# converting the numbers to float
for i in range(0,len(listLine)-1,4):
for j in range(3):
listLine[i+j+1] = float(listLine[i+j+1])
matrixX.append(listLine)
inputFile.close()
return matrixX
def loadY(fileY):
"""
This function takes a .csv file containing the energies of a system and returns an array with the energies contained
in the file.
:fileY: the .csv file containing the energies of the system (string)
:return: numpy array of shape (n_samples, 1)
"""
# Checking that the input file has the correct .csv extension
if fileY[-4:] != ".csv":
print "Error: the file extension is not .csv"
quit()
inputFile = open(fileY, 'r')
y_list = []
for line in inputFile:
y_list.append(float(line))
matrixY = np.asarray(y_list).reshape((len(y_list), 1))
inputFile.close()
return matrixY
def loadPd(fileName):
"""
This function takes a .csv file generated after processing the original CSV files with the package PANDAS.
The new csv file contains on each line a different configuration of the system in the format
"C,0.1,0.1,0.1,H,0.2,0.2,0.2..." and at the end of each line there are two values of the energies. The energies are
calculated at 2 different levels of theory and the worse of the two is first.
It returns a list of lists with the configurations of the system and a numpy array of size (N_samples, 1) with the
difference of the two values of the energies.
For example, for a sample with 3 hydrogen atoms the list of lists returned will be:
``[['H',-0.5,0.0,0.0,'H',0.5,0.0,0.0], ['H',-0.3,0.0,0.0,'H',0.3,0.0,0.0], ['H',-0.7,0.0,0.0,'H',0.7,0.0,0.0]]``
:fileX: The .csv file containing the geometries and the energies at 2 levels of theory for the system
:return:
:matrixX: a list of lists with characters and floats.
:matrixY: and a list of energy differences of size (n_samples,)
"""
if fileName[-4:] != ".csv":
print "Error: the file extension is not .csv"
quit()
inputFile = open(fileName, 'r')
# Creating a matrix with the raw data:
rawData = []
matrixX = []
matrixY = []
isFirstLine = True
for line in inputFile:
if isFirstLine == True:
line = inputFile.next()
isFirstLine = False
line = line.replace("\n","")
listLine = line.split(",")
ene = listLine[-2:]
geom = listLine[1:-2]
for i in range(len(ene)):
ene[i] = float(ene[i])
eneDiff = ene[1] - ene[0]
matrixY.append(eneDiff)
for i in range(0,len(geom)-1,4):
for j in range(3):
geom[i+j+1] = float(geom[i+j+1])
matrixX.append(geom)
matrixY = np.asarray(matrixY)
inputFile.close()
return matrixX, matrixY
def loadPd_q(fileName):
"""
This function takes a .csv file generated after processing the original CSV files with the package PANDAS.
The data is arranged with first the geometries in a 'clean datases' arrangement. This means that the headers tell
the atom label for each coordinate. For example, for a molecule with 3 hydrogens, the first two lines of the csv
file for the geometries look like:
``H1x, H1y, H1z, H2x, H2y, H2z, H3x, H3y, H3z
0,1.350508,0.7790238,0.6630868,1.825709,1.257877,-0.1891705,1.848891,1.089646``
Then there are the partial charges and then 2 values of the energies (all in similar format to the geometries).
**Note**: This is specific to the CH4CN system!
:fileName: .csv file (string)
:return:
:matrixX: a list of lists with characters and floats.
:matrixY: a numpy array of energy differences (floats) of size (n_samples,)
:matrixQ: a list of numpy arrays of the partial charges - size (n_samples, n_atoms)
"""
if fileName[-4:] != ".csv":
print "Error: the file extension is not .csv"
quit()
inputFile = open(fileName, 'r')
isFirstLine = True
# Lists that will contain the data
matrixX = []
matrixY = []
matrixQ = []
# Reading the file
for line in inputFile:
if isFirstLine:
line = inputFile.next()
isFirstLine = False
line = line.replace("\n", "")
listLine = line.split(",")
geom = extractGeom(listLine)
eneDiff = extractEneDiff(listLine)
partQ = extractQ(listLine)
matrixX.append(geom)
matrixY.append(eneDiff)
matrixQ.append(partQ)
matrixY = np.asarray(matrixY)
return matrixX, matrixY, matrixQ
def extractGeom(lineList):
"""
Function used by loadPd_q to extract the geometries.
:lineList: line with geometries in clean format, partial charges and energies
:return: list of geometry in format [['H',-0.5,0.0,0.0,'H',0.5,0.0,0.0], ['H',-0.3,0.0,0.0,'H',0.3,0.0,0.0]...
"""
geomPart = lineList[1:22]
atomLab = ["C","H","H","H","H","C","N"]
finalGeom = []
for i in range(len(atomLab)):
finalGeom.append(atomLab[i])
for j in range(3):
finalGeom.append(float(geomPart[3*i+j]))
return finalGeom
def extractEneDiff(lineList):
"""
This function is used by loadPd_q to extract the energy from a line of the clean data set.
:param lineList: line with geometries in clean format, partial charges and energies
:return: energy difference (float)
"""
enePart = lineList[-2:]
eneDiff = float(enePart[1]) - float(enePart[0])
return eneDiff
def extractQ(lineList):
"""
This function is used by loadPd_q to extract the partial charges from a line of the clean data set.
:lineList: line with geometries in clean format, partial charges and energies
:return: numpy array of partial charges of size (n_atoms)
"""
qPart = lineList[22:-2]
for i in range(len(qPart)):
qPart[i] = float(qPart[i])
qPart = np.asarray(qPart)
return qPart
def CSVtoTew(CSVfile):
"""
This function takes a .csv file generated after processing the original CSV files with the package PANDAS where
the data is arranged with first the geometries in a 'clean datases' arrangement. This means that the headers tell
the atom label for each coordinate. For example, for a molecule with 3 hydrogens, the first two lines of the csv
file for the geometries look like:
``H1x, H1y, H1z, H2x, H2y, H2z, H3x, H3y, H3z
0,1.350508,0.7790238,0.6630868,1.825709,1.257877,-0.1891705,1.848891,1.089646``
Then there are the partial charges and then 2 values of the energies (all in similar format to the geometries).
This function turns it into a monotlithic file that can be used to train Tew method.
:CSVfile: the CSV file with the data
:return: None
"""
inputFile = open(CSVfile, 'r')
outputFile = open("/Users/walfits/Repositories/trainingdata/TewDescriptor/monolithic.dat", "w")
isFirstLine = True
for line in inputFile:
if isFirstLine:
line = inputFile.next()
isFirstLine = False
line = line.strip()
lineSplit = line.split(",")
writeToMono(outputFile, lineSplit)
inputFile.close()
outputFile.close()
def writeToMono(outFile, data):
"""
Function used by CSVtoTew to turn a line of the CSV file into the format of a monolythic trajectory file. It then
writes it to the output file.
:outFile: The monolithic trajectory file
:data: a line of the original CSV file
:return: None
"""
ene = float(data[-2]) - float(data[-1])
xyz = data[1:22]
outFile.write("energy xyz\n")
outFile.write(str(ene) + "\n")
for i in range(7):
for j in range(3):
outFile.write("\t" + str(xyz[i+j]))
outFile.write("\n")
|
|
#! /usr/bin/env python
"""Classes to handle Unix style, MMDF style, and MH style mailboxes."""
import rfc822
import os
__all__ = ["UnixMailbox","MmdfMailbox","MHMailbox","Maildir","BabylMailbox"]
class _Mailbox:
def __init__(self, fp, factory=rfc822.Message):
self.fp = fp
self.seekp = 0
self.factory = factory
def next(self):
while 1:
self.fp.seek(self.seekp)
try:
self._search_start()
except EOFError:
self.seekp = self.fp.tell()
return None
start = self.fp.tell()
self._search_end()
self.seekp = stop = self.fp.tell()
if start != stop:
break
return self.factory(_Subfile(self.fp, start, stop))
class _Subfile:
def __init__(self, fp, start, stop):
self.fp = fp
self.start = start
self.stop = stop
self.pos = self.start
def read(self, length = None):
if self.pos >= self.stop:
return ''
remaining = self.stop - self.pos
if length is None or length < 0:
length = remaining
elif length > remaining:
length = remaining
self.fp.seek(self.pos)
data = self.fp.read(length)
self.pos = self.fp.tell()
return data
def readline(self, length = None):
if self.pos >= self.stop:
return ''
if length is None:
length = self.stop - self.pos
self.fp.seek(self.pos)
data = self.fp.readline(length)
self.pos = self.fp.tell()
return data
def readlines(self, sizehint = -1):
lines = []
while 1:
line = self.readline()
if not line:
break
lines.append(line)
if sizehint >= 0:
sizehint = sizehint - len(line)
if sizehint <= 0:
break
return lines
def tell(self):
return self.pos - self.start
def seek(self, pos, whence=0):
if whence == 0:
self.pos = self.start + pos
elif whence == 1:
self.pos = self.pos + pos
elif whence == 2:
self.pos = self.stop + pos
def close(self):
del self.fp
class UnixMailbox(_Mailbox):
def _search_start(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
raise EOFError
if line[:5] == 'From ' and self._isrealfromline(line):
self.fp.seek(pos)
return
def _search_end(self):
self.fp.readline() # Throw away header line
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line[:5] == 'From ' and self._isrealfromline(line):
self.fp.seek(pos)
return
# An overridable mechanism to test for From-line-ness. You can either
# specify a different regular expression or define a whole new
# _isrealfromline() method. Note that this only gets called for lines
# starting with the 5 characters "From ".
#
# BAW: According to
#http://home.netscape.com/eng/mozilla/2.0/relnotes/demo/content-length.html
# the only portable, reliable way to find message delimiters in a BSD (i.e
# Unix mailbox) style folder is to search for "\n\nFrom .*\n", or at the
# beginning of the file, "^From .*\n". While _fromlinepattern below seems
# like a good idea, in practice, there are too many variations for more
# strict parsing of the line to be completely accurate.
#
# _strict_isrealfromline() is the old version which tries to do stricter
# parsing of the From_ line. _portable_isrealfromline() simply returns
# true, since it's never called if the line doesn't already start with
# "From ".
#
# This algorithm, and the way it interacts with _search_start() and
# _search_end() may not be completely correct, because it doesn't check
# that the two characters preceding "From " are \n\n or the beginning of
# the file. Fixing this would require a more extensive rewrite than is
# necessary. For convenience, we've added a StrictUnixMailbox class which
# uses the older, more strict _fromlinepattern regular expression.
_fromlinepattern = r"From \s*[^\s]+\s+\w\w\w\s+\w\w\w\s+\d?\d\s+" \
r"\d?\d:\d\d(:\d\d)?(\s+[^\s]+)?\s+\d\d\d\d\s*$"
_regexp = None
def _strict_isrealfromline(self, line):
if not self._regexp:
import re
self._regexp = re.compile(self._fromlinepattern)
return self._regexp.match(line)
def _portable_isrealfromline(self, line):
return 1
_isrealfromline = _strict_isrealfromline
class PortableUnixMailbox(UnixMailbox):
_isrealfromline = UnixMailbox._portable_isrealfromline
class MmdfMailbox(_Mailbox):
def _search_start(self):
while 1:
line = self.fp.readline()
if not line:
raise EOFError
if line[:5] == '\001\001\001\001\n':
return
def _search_end(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line == '\001\001\001\001\n':
self.fp.seek(pos)
return
class MHMailbox:
def __init__(self, dirname, factory=rfc822.Message):
import re
pat = re.compile('^[1-9][0-9]*$')
self.dirname = dirname
# the three following lines could be combined into:
# list = map(long, filter(pat.match, os.listdir(self.dirname)))
list = os.listdir(self.dirname)
list = filter(pat.match, list)
list = map(long, list)
list.sort()
# This only works in Python 1.6 or later;
# before that str() added 'L':
self.boxes = map(str, list)
self.factory = factory
def next(self):
if not self.boxes:
return None
fn = self.boxes[0]
del self.boxes[0]
fp = open(os.path.join(self.dirname, fn))
return self.factory(fp)
class Maildir:
# Qmail directory mailbox
def __init__(self, dirname, factory=rfc822.Message):
self.dirname = dirname
self.factory = factory
# check for new mail
newdir = os.path.join(self.dirname, 'new')
boxes = [os.path.join(newdir, f)
for f in os.listdir(newdir) if f[0] != '.']
# Now check for current mail in this maildir
curdir = os.path.join(self.dirname, 'cur')
boxes += [os.path.join(curdir, f)
for f in os.listdir(curdir) if f[0] != '.']
self.boxes = boxes
def next(self):
if not self.boxes:
return None
fn = self.boxes[0]
del self.boxes[0]
fp = open(fn)
return self.factory(fp)
class BabylMailbox(_Mailbox):
def _search_start(self):
while 1:
line = self.fp.readline()
if not line:
raise EOFError
if line == '*** EOOH ***\n':
return
def _search_end(self):
while 1:
pos = self.fp.tell()
line = self.fp.readline()
if not line:
return
if line == '\037\014\n':
self.fp.seek(pos)
return
def _test():
import time
import sys
import os
args = sys.argv[1:]
if not args:
for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER':
if os.environ.has_key(key):
mbox = os.environ[key]
break
else:
print "$MAIL, $LOGNAME nor $USER set -- who are you?"
return
else:
mbox = args[0]
if mbox[:1] == '+':
mbox = os.environ['HOME'] + '/Mail/' + mbox[1:]
elif not '/' in mbox:
mbox = '/usr/mail/' + mbox
if os.path.isdir(mbox):
if os.path.isdir(os.path.join(mbox, 'cur')):
mb = Maildir(mbox)
else:
mb = MHMailbox(mbox)
else:
fp = open(mbox, 'r')
mb = UnixMailbox(fp)
msgs = []
while 1:
msg = mb.next()
if msg is None:
break
msgs.append(msg)
if len(args) <= 1:
msg.fp = None
if len(args) > 1:
num = int(args[1])
print 'Message %d body:'%num
msg = msgs[num-1]
msg.rewindbody()
sys.stdout.write(msg.fp.read())
else:
print 'Mailbox',mbox,'has',len(msgs),'messages:'
for msg in msgs:
f = msg.getheader('from') or ""
s = msg.getheader('subject') or ""
d = msg.getheader('date') or ""
print '-%20.20s %20.20s %-30.30s'%(f, d[5:], s)
if __name__ == '__main__':
_test()
|
|
import unittest
from test import test_support
from contextlib import closing
import gc
import pickle
import select
import signal
import subprocess
import traceback
import sys, os, time, errno
if sys.platform in ('os2', 'riscos'):
raise unittest.SkipTest("Can't test signal on %s" % sys.platform)
class HandlerBCalled(Exception):
pass
def exit_subprocess():
"""Use os._exit(0) to exit the current subprocess.
Otherwise, the test catches the SystemExit and continues executing
in parallel with the original test, so you wind up with an
exponential number of tests running concurrently.
"""
os._exit(0)
def ignoring_eintr(__func, *args, **kwargs):
try:
return __func(*args, **kwargs)
except EnvironmentError as e:
if e.errno != errno.EINTR:
raise
return None
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class InterProcessSignalTests(unittest.TestCase):
MAX_DURATION = 20 # Entire test should last at most 20 sec.
def setUp(self):
self.using_gc = gc.isenabled()
gc.disable()
def tearDown(self):
if self.using_gc:
gc.enable()
def format_frame(self, frame, limit=None):
return ''.join(traceback.format_stack(frame, limit=limit))
def handlerA(self, signum, frame):
self.a_called = True
if test_support.verbose:
print "handlerA invoked from signal %s at:\n%s" % (
signum, self.format_frame(frame, limit=1))
def handlerB(self, signum, frame):
self.b_called = True
if test_support.verbose:
print "handlerB invoked from signal %s at:\n%s" % (
signum, self.format_frame(frame, limit=1))
raise HandlerBCalled(signum, self.format_frame(frame))
def wait(self, child):
"""Wait for child to finish, ignoring EINTR."""
while True:
try:
child.wait()
return
except OSError as e:
if e.errno != errno.EINTR:
raise
def run_test(self):
# Install handlers. This function runs in a sub-process, so we
# don't worry about re-setting the default handlers.
signal.signal(signal.SIGHUP, self.handlerA)
signal.signal(signal.SIGUSR1, self.handlerB)
signal.signal(signal.SIGUSR2, signal.SIG_IGN)
signal.signal(signal.SIGALRM, signal.default_int_handler)
# Variables the signals will modify:
self.a_called = False
self.b_called = False
# Let the sub-processes know who to send signals to.
pid = os.getpid()
if test_support.verbose:
print "test runner's pid is", pid
child = ignoring_eintr(subprocess.Popen, ['kill', '-HUP', str(pid)])
if child:
self.wait(child)
if not self.a_called:
time.sleep(1) # Give the signal time to be delivered.
self.assertTrue(self.a_called)
self.assertFalse(self.b_called)
self.a_called = False
# Make sure the signal isn't delivered while the previous
# Popen object is being destroyed, because __del__ swallows
# exceptions.
del child
try:
child = subprocess.Popen(['kill', '-USR1', str(pid)])
# This wait should be interrupted by the signal's exception.
self.wait(child)
time.sleep(1) # Give the signal time to be delivered.
self.fail('HandlerBCalled exception not raised')
except HandlerBCalled:
self.assertTrue(self.b_called)
self.assertFalse(self.a_called)
if test_support.verbose:
print "HandlerBCalled exception caught"
child = ignoring_eintr(subprocess.Popen, ['kill', '-USR2', str(pid)])
if child:
self.wait(child) # Nothing should happen.
try:
signal.alarm(1)
# The race condition in pause doesn't matter in this case,
# since alarm is going to raise a KeyboardException, which
# will skip the call.
signal.pause()
# But if another signal arrives before the alarm, pause
# may return early.
time.sleep(1)
except KeyboardInterrupt:
if test_support.verbose:
print "KeyboardInterrupt (the alarm() went off)"
except:
self.fail("Some other exception woke us from pause: %s" %
traceback.format_exc())
else:
self.fail("pause returned of its own accord, and the signal"
" didn't arrive after another second.")
finally:
signal.alarm(0)
# Issue 3864. Unknown if this affects earlier versions of freebsd also.
@unittest.skipIf(sys.platform=='freebsd6',
'inter process signals not reliable (do not mix well with threading) '
'on freebsd6')
def test_main(self):
# This function spawns a child process to insulate the main
# test-running process from all the signals. It then
# communicates with that child process over a pipe and
# re-raises information about any exceptions the child
# raises. The real work happens in self.run_test().
os_done_r, os_done_w = os.pipe()
with closing(os.fdopen(os_done_r)) as done_r, \
closing(os.fdopen(os_done_w, 'w')) as done_w:
child = os.fork()
if child == 0:
# In the child process; run the test and report results
# through the pipe.
try:
done_r.close()
# Have to close done_w again here because
# exit_subprocess() will skip the enclosing with block.
with closing(done_w):
try:
self.run_test()
except:
pickle.dump(traceback.format_exc(), done_w)
else:
pickle.dump(None, done_w)
except:
print 'Uh oh, raised from pickle.'
traceback.print_exc()
finally:
exit_subprocess()
done_w.close()
# Block for up to MAX_DURATION seconds for the test to finish.
r, w, x = select.select([done_r], [], [], self.MAX_DURATION)
if done_r in r:
tb = pickle.load(done_r)
if tb:
self.fail(tb)
else:
os.kill(child, signal.SIGKILL)
self.fail('Test deadlocked after %d seconds.' %
self.MAX_DURATION)
# read the exit status to not leak a zombie process
os.waitpid(child, 0)
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class BasicSignalTests(unittest.TestCase):
def trivial_signal_handler(self, *args):
pass
def test_out_of_range_signal_number_raises_error(self):
self.assertRaises(ValueError, signal.getsignal, 4242)
self.assertRaises(ValueError, signal.signal, 4242,
self.trivial_signal_handler)
def test_setting_signal_handler_to_none_raises_error(self):
self.assertRaises(TypeError, signal.signal,
signal.SIGUSR1, None)
def test_getsignal(self):
hup = signal.signal(signal.SIGHUP, self.trivial_signal_handler)
self.assertEqual(signal.getsignal(signal.SIGHUP),
self.trivial_signal_handler)
signal.signal(signal.SIGHUP, hup)
self.assertEqual(signal.getsignal(signal.SIGHUP), hup)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
class WindowsSignalTests(unittest.TestCase):
def test_issue9324(self):
# Updated for issue #10003, adding SIGBREAK
handler = lambda x, y: None
for sig in (signal.SIGABRT, signal.SIGBREAK, signal.SIGFPE,
signal.SIGILL, signal.SIGINT, signal.SIGSEGV,
signal.SIGTERM):
# Set and then reset a handler for signals that work on windows
signal.signal(sig, signal.signal(sig, handler))
with self.assertRaises(ValueError):
signal.signal(-1, handler)
with self.assertRaises(ValueError):
signal.signal(7, handler)
class WakeupFDTests(unittest.TestCase):
def test_invalid_fd(self):
fd = test_support.make_bad_fd()
self.assertRaises(ValueError, signal.set_wakeup_fd, fd)
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class WakeupSignalTests(unittest.TestCase):
TIMEOUT_FULL = 10
TIMEOUT_HALF = 5
def test_wakeup_fd_early(self):
import select
signal.alarm(1)
try:
before_time = time.time()
# We attempt to get a signal during the sleep,
# before select is called
time.sleep(self.TIMEOUT_FULL)
mid_time = time.time()
finally:
signal.alarm(0)
self.assertTrue(mid_time - before_time < self.TIMEOUT_HALF)
select.select([self.read], [], [], self.TIMEOUT_FULL)
after_time = time.time()
self.assertTrue(after_time - mid_time < self.TIMEOUT_HALF)
def test_wakeup_fd_during(self):
import select
signal.alarm(1)
try:
before_time = time.time()
# We attempt to get a signal during the select call
self.assertRaises(select.error, select.select,
[self.read], [], [], self.TIMEOUT_FULL)
after_time = time.time()
finally:
signal.alarm(0)
self.assertTrue(after_time - before_time < self.TIMEOUT_HALF)
def setUp(self):
import fcntl
self.alrm = signal.signal(signal.SIGALRM, lambda x,y:None)
self.read, self.write = os.pipe()
flags = fcntl.fcntl(self.write, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(self.write, fcntl.F_SETFL, flags)
self.old_wakeup = signal.set_wakeup_fd(self.write)
def tearDown(self):
signal.set_wakeup_fd(self.old_wakeup)
os.close(self.read)
os.close(self.write)
signal.signal(signal.SIGALRM, self.alrm)
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class SiginterruptTest(unittest.TestCase):
def setUp(self):
"""Install a no-op signal handler that can be set to allow
interrupts or not, and arrange for the original signal handler to be
re-installed when the test is finished.
"""
self.signum = signal.SIGUSR1
oldhandler = signal.signal(self.signum, lambda x,y: None)
self.addCleanup(signal.signal, self.signum, oldhandler)
def readpipe_interrupted(self):
"""Perform a read during which a signal will arrive. Return True if the
read is interrupted by the signal and raises an exception. Return False
if it returns normally.
"""
# Create a pipe that can be used for the read. Also clean it up
# when the test is over, since nothing else will (but see below for
# the write end).
r, w = os.pipe()
self.addCleanup(os.close, r)
# Create another process which can send a signal to this one to try
# to interrupt the read.
ppid = os.getpid()
pid = os.fork()
if pid == 0:
# Child code: sleep to give the parent enough time to enter the
# read() call (there's a race here, but it's really tricky to
# eliminate it); then signal the parent process. Also, sleep
# again to make it likely that the signal is delivered to the
# parent process before the child exits. If the child exits
# first, the write end of the pipe will be closed and the test
# is invalid.
try:
time.sleep(0.2)
os.kill(ppid, self.signum)
time.sleep(0.2)
finally:
# No matter what, just exit as fast as possible now.
exit_subprocess()
else:
# Parent code.
# Make sure the child is eventually reaped, else it'll be a
# zombie for the rest of the test suite run.
self.addCleanup(os.waitpid, pid, 0)
# Close the write end of the pipe. The child has a copy, so
# it's not really closed until the child exits. We need it to
# close when the child exits so that in the non-interrupt case
# the read eventually completes, otherwise we could just close
# it *after* the test.
os.close(w)
# Try the read and report whether it is interrupted or not to
# the caller.
try:
d = os.read(r, 1)
return False
except OSError, err:
if err.errno != errno.EINTR:
raise
return True
def test_without_siginterrupt(self):
"""If a signal handler is installed and siginterrupt is not called
at all, when that signal arrives, it interrupts a syscall that's in
progress.
"""
i = self.readpipe_interrupted()
self.assertTrue(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertTrue(i)
def test_siginterrupt_on(self):
"""If a signal handler is installed and siginterrupt is called with
a true value for the second argument, when that signal arrives, it
interrupts a syscall that's in progress.
"""
signal.siginterrupt(self.signum, 1)
i = self.readpipe_interrupted()
self.assertTrue(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertTrue(i)
def test_siginterrupt_off(self):
"""If a signal handler is installed and siginterrupt is called with
a false value for the second argument, when that signal arrives, it
does not interrupt a syscall that's in progress.
"""
signal.siginterrupt(self.signum, 0)
i = self.readpipe_interrupted()
self.assertFalse(i)
# Arrival of the signal shouldn't have changed anything.
i = self.readpipe_interrupted()
self.assertFalse(i)
@unittest.skipIf(sys.platform == "win32", "Not valid on Windows")
class ItimerTest(unittest.TestCase):
def setUp(self):
self.hndl_called = False
self.hndl_count = 0
self.itimer = None
self.old_alarm = signal.signal(signal.SIGALRM, self.sig_alrm)
def tearDown(self):
signal.signal(signal.SIGALRM, self.old_alarm)
if self.itimer is not None: # test_itimer_exc doesn't change this attr
# just ensure that itimer is stopped
signal.setitimer(self.itimer, 0)
def sig_alrm(self, *args):
self.hndl_called = True
if test_support.verbose:
print("SIGALRM handler invoked", args)
def sig_vtalrm(self, *args):
self.hndl_called = True
if self.hndl_count > 3:
# it shouldn't be here, because it should have been disabled.
raise signal.ItimerError("setitimer didn't disable ITIMER_VIRTUAL "
"timer.")
elif self.hndl_count == 3:
# disable ITIMER_VIRTUAL, this function shouldn't be called anymore
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
if test_support.verbose:
print("last SIGVTALRM handler call")
self.hndl_count += 1
if test_support.verbose:
print("SIGVTALRM handler invoked", args)
def sig_prof(self, *args):
self.hndl_called = True
signal.setitimer(signal.ITIMER_PROF, 0)
if test_support.verbose:
print("SIGPROF handler invoked", args)
def test_itimer_exc(self):
# XXX I'm assuming -1 is an invalid itimer, but maybe some platform
# defines it ?
self.assertRaises(signal.ItimerError, signal.setitimer, -1, 0)
# Negative times are treated as zero on some platforms.
if 0:
self.assertRaises(signal.ItimerError,
signal.setitimer, signal.ITIMER_REAL, -1)
def test_itimer_real(self):
self.itimer = signal.ITIMER_REAL
signal.setitimer(self.itimer, 1.0)
if test_support.verbose:
print("\ncall pause()...")
signal.pause()
self.assertEqual(self.hndl_called, True)
# Issue 3864. Unknown if this affects earlier versions of freebsd also.
@unittest.skipIf(sys.platform in ('freebsd6', 'netbsd5'),
'itimer not reliable (does not mix well with threading) on some BSDs.')
def test_itimer_virtual(self):
self.itimer = signal.ITIMER_VIRTUAL
signal.signal(signal.SIGVTALRM, self.sig_vtalrm)
signal.setitimer(self.itimer, 0.3, 0.2)
start_time = time.time()
while time.time() - start_time < 60.0:
# use up some virtual time by doing real work
_ = pow(12345, 67890, 10000019)
if signal.getitimer(self.itimer) == (0.0, 0.0):
break # sig_vtalrm handler stopped this itimer
else: # Issue 8424
self.skipTest("timeout: likely cause: machine too slow or load too "
"high")
# virtual itimer should be (0.0, 0.0) now
self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0))
# and the handler should have been called
self.assertEqual(self.hndl_called, True)
# Issue 3864. Unknown if this affects earlier versions of freebsd also.
@unittest.skipIf(sys.platform=='freebsd6',
'itimer not reliable (does not mix well with threading) on freebsd6')
def test_itimer_prof(self):
self.itimer = signal.ITIMER_PROF
signal.signal(signal.SIGPROF, self.sig_prof)
signal.setitimer(self.itimer, 0.2, 0.2)
start_time = time.time()
while time.time() - start_time < 60.0:
# do some work
_ = pow(12345, 67890, 10000019)
if signal.getitimer(self.itimer) == (0.0, 0.0):
break # sig_prof handler stopped this itimer
else: # Issue 8424
self.skipTest("timeout: likely cause: machine too slow or load too "
"high")
# profiling itimer should be (0.0, 0.0) now
self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0))
# and the handler should have been called
self.assertEqual(self.hndl_called, True)
def test_setitimer_tiny(self):
# bpo-30807: C setitimer() takes a microsecond-resolution interval.
# Check that float -> timeval conversion doesn't round
# the interval down to zero, which would disable the timer.
self.itimer = signal.ITIMER_REAL
signal.setitimer(self.itimer, 1e-6)
time.sleep(1)
self.assertEqual(self.hndl_called, True)
def test_main():
test_support.run_unittest(BasicSignalTests, InterProcessSignalTests,
WakeupFDTests, WakeupSignalTests,
SiginterruptTest, ItimerTest,
WindowsSignalTests)
if __name__ == "__main__":
test_main()
|
|
# -*- coding: utf-8 -*-
"""Common interacts."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import numpy as np
from vispy.gloo import Texture2D
from .base import BaseInteract
from .transform import Scale, Range, Subplot, Clip, NDC
from .utils import _get_texture, _get_boxes, _get_box_pos_size
from .visuals import LineVisual
#------------------------------------------------------------------------------
# Grid interact
#------------------------------------------------------------------------------
class Grid(BaseInteract):
"""Grid interact.
NOTE: to be used in a grid, a visual must define `a_box_index`
(by default) or another GLSL variable specified in `box_var`.
Parameters
----------
shape : tuple or str
Number of rows, cols in the grid.
box_var : str
Name of the GLSL variable with the box index.
"""
margin = .075
def __init__(self, shape=(1, 1), shape_var='u_grid_shape', box_var=None):
# Name of the variable with the box index.
self.box_var = box_var or 'a_box_index'
self.shape_var = shape_var
self._shape = shape
ms = 1 - self.margin
mc = 1 - self.margin
self._transforms = [Scale((ms, ms)),
Clip([-mc, -mc, +mc, +mc]),
Subplot(self.shape_var, self.box_var),
]
def attach(self, canvas):
super(Grid, self).attach(canvas)
canvas.transforms.add_on_gpu(self._transforms)
canvas.inserter.insert_vert("""
attribute vec2 {};
uniform vec2 {};
""".format(self.box_var, self.shape_var),
'header')
def map(self, arr, box=None):
assert box is not None
assert len(box) == 2
arr = self._transforms[0].apply(arr)
arr = Subplot(self.shape, box).apply(arr)
return arr
def imap(self, arr, box=None):
assert box is not None
arr = Subplot(self.shape, box).inverse().apply(arr)
arr = self._transforms[0].inverse().apply(arr)
return arr
def add_boxes(self, canvas, shape=None):
shape = shape or self.shape
assert isinstance(shape, tuple)
n, m = shape
n_boxes = n * m
a = 1 + .05
pos = np.array([[-a, -a, +a, -a],
[+a, -a, +a, +a],
[+a, +a, -a, +a],
[-a, +a, -a, -a],
])
pos = np.tile(pos, (n_boxes, 1))
box_index = []
for i in range(n):
for j in range(m):
box_index.append([i, j])
box_index = np.vstack(box_index)
box_index = np.repeat(box_index, 8, axis=0)
boxes = LineVisual()
@boxes.set_canvas_transforms_filter
def _remove_clip(tc):
return tc.remove('Clip')
canvas.add_visual(boxes)
boxes.set_data(pos=pos)
boxes.program['a_box_index'] = box_index.astype(np.float32)
def get_closest_box(self, pos):
x, y = pos
rows, cols = self.shape
j = np.clip(int(cols * (1. + x) / 2.), 0, cols - 1)
i = np.clip(int(rows * (1. - y) / 2.), 0, rows - 1)
return i, j
def update_program(self, program):
program[self.shape_var] = self._shape
# Only set the default box index if necessary.
try:
program[self.box_var]
except KeyError:
program[self.box_var] = (0, 0)
@property
def shape(self):
return self._shape
@shape.setter
def shape(self, value):
self._shape = value
self.update()
#------------------------------------------------------------------------------
# Boxed interact
#------------------------------------------------------------------------------
class Boxed(BaseInteract):
"""Boxed interact.
NOTE: to be used in a boxed, a visual must define `a_box_index`
(by default) or another GLSL variable specified in `box_var`.
Parameters
----------
box_bounds : array-like
A (n, 4) array where each row contains the `(xmin, ymin, xmax, ymax)`
bounds of every box, in normalized device coordinates.
NOTE: the box bounds need to be contained within [-1, 1] at all times,
otherwise an error will be raised. This is to prevent silent clipping
of the values when they are passed to a gloo Texture2D.
box_var : str
Name of the GLSL variable with the box index.
"""
margin = 0
def __init__(self,
box_bounds=None,
box_pos=None,
box_size=None,
box_var=None,
keep_aspect_ratio=True,
):
self._key_pressed = None
self.keep_aspect_ratio = keep_aspect_ratio
# Name of the variable with the box index.
self.box_var = box_var or 'a_box_index'
# Find the box bounds if only the box positions are passed.
if box_bounds is None:
assert box_pos is not None
# This will find a good box size automatically if it is not
# specified.
box_bounds = _get_boxes(box_pos, size=box_size,
keep_aspect_ratio=self.keep_aspect_ratio,
margin=self.margin,
)
self._box_bounds = np.atleast_2d(box_bounds)
assert self._box_bounds.shape[1] == 4
self.n_boxes = len(self._box_bounds)
self._transforms = [Range(NDC, 'box_bounds')]
def attach(self, canvas):
super(Boxed, self).attach(canvas)
canvas.transforms.add_on_gpu(self._transforms)
canvas.inserter.insert_vert("""
#include "utils.glsl"
attribute float {};
uniform sampler2D u_box_bounds;
uniform float n_boxes;""".format(self.box_var), 'header')
canvas.inserter.insert_vert("""
// Fetch the box bounds for the current box (`box_var`).
vec4 box_bounds = fetch_texture({},
u_box_bounds,
n_boxes);
box_bounds = (2 * box_bounds - 1); // See hack in Python.
""".format(self.box_var), 'before_transforms')
def map(self, arr, box=None):
assert box is not None
assert 0 <= box < len(self.box_bounds)
return Range(NDC, self.box_bounds[box]).apply(arr)
def imap(self, arr, box=None):
assert 0 <= box < len(self.box_bounds)
return Range(NDC, self.box_bounds[box]).inverse().apply(arr)
def update_program(self, program):
# Signal bounds (positions).
box_bounds = _get_texture(self._box_bounds, NDC, self.n_boxes, [-1, 1])
box_bounds = box_bounds.astype(np.float32)
# TODO OPTIM: set the texture at initialization and update the data
program['u_box_bounds'] = Texture2D(box_bounds,
internalformat='rgba32f')
program['n_boxes'] = self.n_boxes
# Change the box bounds, positions, or size
#--------------------------------------------------------------------------
@property
def box_bounds(self):
return self._box_bounds
@box_bounds.setter
def box_bounds(self, val):
self._box_bounds = np.atleast_2d(val)
assert self._box_bounds.shape[1] == 4
self.n_boxes = self._box_bounds.shape[0]
self.update()
@property
def box_pos(self):
box_pos, _ = _get_box_pos_size(self._box_bounds)
return box_pos
@box_pos.setter
def box_pos(self, val):
self.box_bounds = _get_boxes(val, size=self.box_size,
margin=self.margin,
keep_aspect_ratio=self.keep_aspect_ratio)
@property
def box_size(self):
_, box_size = _get_box_pos_size(self._box_bounds)
return box_size
@box_size.setter
def box_size(self, val):
assert len(val) == 2
self.box_bounds = _get_boxes(self.box_pos, size=val,
margin=self.margin,
keep_aspect_ratio=self.keep_aspect_ratio)
def get_closest_box(self, pos):
"""Get the box closest to some position."""
pos = np.atleast_2d(pos)
d = np.sum((np.array(self.box_pos) - pos) ** 2, axis=1)
idx = np.argmin(d)
return idx
def update_boxes(self, box_pos, box_size):
"""Set the box bounds from specified box positions and sizes."""
assert box_pos.shape == (self.n_boxes, 2)
assert len(box_size) == 2
self.box_bounds = _get_boxes(box_pos,
size=box_size,
keep_aspect_ratio=self.keep_aspect_ratio,
)
class Stacked(Boxed):
"""Stacked interact.
NOTE: to be used in a stacked, a visual must define `a_box_index`
(by default) or another GLSL variable specified in `box_var`.
Parameters
----------
n_boxes : int
Number of boxes to stack vertically.
margin : int (0 by default)
The margin between the stacked subplots. Can be negative. Must be
between -1 and 1. The unit is relative to each box's size.
box_var : str
Name of the GLSL variable with the box index.
"""
def __init__(self, n_boxes, margin=0, box_var=None, origin=None):
# The margin must be in [-1, 1]
margin = np.clip(margin, -1, 1)
# Normalize the margin.
margin = 2. * margin / float(n_boxes)
# Signal bounds.
b = np.zeros((n_boxes, 4))
b[:, 0] = -1
b[:, 1] = np.linspace(-1, 1 - 2. / n_boxes + margin, n_boxes)
b[:, 2] = 1
b[:, 3] = np.linspace(-1 + 2. / n_boxes - margin, 1., n_boxes)
if origin == 'upper':
b = b[::-1, :]
super(Stacked, self).__init__(b, box_var=box_var,
keep_aspect_ratio=False,
)
|
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271_all_of
except ImportError:
btp_statement_block271_all_of = sys.modules[
"onshape_client.oas.models.btp_statement_block271_all_of"
]
class BTPStatementBlock271(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"space_after_open": (btp_space10.BTPSpace10,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"space_after_open": "spaceAfterOpen", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_statement_block271.BTPStatementBlock271 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
space_after_open (btp_space10.BTPSpace10): [optional] # noqa: E501
atomic (bool): [optional] # noqa: E501
documentation_type (str): [optional] # noqa: E501
end_source_location (int): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
short_descriptor (str): [optional] # noqa: E501
space_after (btp_space10.BTPSpace10): [optional] # noqa: E501
space_before (btp_space10.BTPSpace10): [optional] # noqa: E501
space_default (bool): [optional] # noqa: E501
start_source_location (int): [optional] # noqa: E501
annotation (btp_annotation231.BTPAnnotation231): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
btp_statement269.BTPStatement269,
btp_statement_block271_all_of.BTPStatementBlock271AllOf,
],
"oneOf": [],
}
|
|
"""Application base class.
"""
import argparse
import codecs
import inspect
import locale
import logging
import logging.handlers
import os
import sys
import operator
from .complete import CompleteCommand
from .help import HelpAction, HelpCommand
from .utils import damerau_levenshtein, COST
# Make sure the cliff library has a logging handler
# in case the app developer doesn't set up logging.
# For py26 compat, create a NullHandler
if hasattr(logging, 'NullHandler'):
NullHandler = logging.NullHandler
else:
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logging.getLogger('cliff').addHandler(NullHandler())
class App(object):
"""Application base class.
:param description: one-liner explaining the program purpose
:paramtype description: str
:param version: application version number
:paramtype version: str
:param command_manager: plugin loader
:paramtype command_manager: cliff.commandmanager.CommandManager
:param stdin: Standard input stream
:paramtype stdin: readable I/O stream
:param stdout: Standard output stream
:paramtype stdout: writable I/O stream
:param stderr: Standard error output stream
:paramtype stderr: writable I/O stream
:param interactive_app_factory: callable to create an
interactive application
:paramtype interactive_app_factory: cliff.interactive.InteractiveApp
"""
NAME = os.path.splitext(os.path.basename(sys.argv[0]))[0]
LOG = logging.getLogger(NAME)
CONSOLE_MESSAGE_FORMAT = '%(message)s'
LOG_FILE_MESSAGE_FORMAT = \
'[%(asctime)s] %(levelname)-8s %(name)s %(message)s'
DEFAULT_VERBOSE_LEVEL = 1
DEFAULT_OUTPUT_ENCODING = 'utf-8'
def __init__(self, description, version, command_manager,
stdin=None, stdout=None, stderr=None,
interactive_app_factory=None,
deferred_help=False):
"""Initialize the application.
"""
self.command_manager = command_manager
self.command_manager.add_command('help', HelpCommand)
self.command_manager.add_command('complete', CompleteCommand)
self._set_streams(stdin, stdout, stderr)
self.interactive_app_factory = interactive_app_factory
self.deferred_help = deferred_help
self.parser = self.build_option_parser(description, version)
self.interactive_mode = False
self.interpreter = None
def _set_streams(self, stdin, stdout, stderr):
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
pass
if sys.version_info[:2] == (2, 6):
# Configure the input and output streams. If a stream is
# provided, it must be configured correctly by the
# caller. If not, make sure the versions of the standard
# streams used by default are wrapped with encodings. This
# works around a problem with Python 2.6 fixed in 2.7 and
# later (http://hg.python.org/cpython/rev/e60ef17561dc/).
lang, encoding = locale.getdefaultlocale()
encoding = (getattr(sys.stdout, 'encoding', None) or
encoding or
self.DEFAULT_OUTPUT_ENCODING)
self.stdin = stdin or codecs.getreader(encoding)(sys.stdin)
self.stdout = stdout or codecs.getwriter(encoding)(sys.stdout)
self.stderr = stderr or codecs.getwriter(encoding)(sys.stderr)
else:
self.stdin = stdin or sys.stdin
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
def build_option_parser(self, description, version,
argparse_kwargs=None):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
:param argparse_kwargs: extra keyword argument passed to the
ArgumentParser constructor
:paramtype extra_kwargs: dict
"""
argparse_kwargs = argparse_kwargs or {}
parser = argparse.ArgumentParser(
description=description,
add_help=False,
**argparse_kwargs
)
parser.add_argument(
'--version',
action='version',
version='%(prog)s {0}'.format(version),
)
parser.add_argument(
'-v', '--verbose',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help='Increase verbosity of output. Can be repeated.',
)
parser.add_argument(
'--log-file',
action='store',
default=None,
help='Specify a file to log output. Disabled by default.',
)
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help='Suppress output except warnings and errors.',
)
if self.deferred_help:
parser.add_argument(
'-h', '--help',
dest='deferred_help',
action='store_true',
help="Show help message and exit.",
)
else:
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help="Show this help message and exit.",
)
parser.add_argument(
'--debug',
default=False,
action='store_true',
help='Show tracebacks on errors.',
)
return parser
def configure_logging(self):
"""Create logging handlers for any log output.
"""
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
# Set up logging to a file
if self.options.log_file:
file_handler = logging.FileHandler(
filename=self.options.log_file,
)
formatter = logging.Formatter(self.LOG_FILE_MESSAGE_FORMAT)
file_handler.setFormatter(formatter)
root_logger.addHandler(file_handler)
# Always send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {0: logging.WARNING,
1: logging.INFO,
2: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def print_help_if_requested(self):
"""Print help and exits if deferred help is enabled and requested.
'--help' shows the help message and exits:
* without calling initialize_app if not self.deferred_help (default),
* after initialize_app call if self.deferred_help,
* during initialize_app call if self.deferred_help and subclass calls
explicitly this method in initialize_app.
"""
if self.deferred_help and self.options.deferred_help:
action = HelpAction(None, None, default=self)
action(self.parser, self.options, None, None)
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
if self.deferred_help and self.options.deferred_help and remainder:
# When help is requested and `remainder` has any values disable
# `deferred_help` and instead allow the help subcommand to
# handle the request during run_subcommand(). This turns
# "app foo bar --help" into "app help foo bar". However, when
# `remainder` is empty use print_help_if_requested() to allow
# for an early exit.
# Disabling `deferred_help` here also ensures that
# print_help_if_requested will not fire if called by a subclass
# during its initialize_app().
self.options.deferred_help = False
remainder.insert(0, "help")
self.initialize_app(remainder)
self.print_help_if_requested()
except Exception as err:
if hasattr(self, 'options'):
debug = self.options.debug
else:
debug = True
if debug:
self.LOG.exception(err)
raise
else:
self.LOG.error(err)
return 1
result = 1
if self.interactive_mode:
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
# FIXME(dhellmann): Consider moving these command handling methods
# to a separate class.
def initialize_app(self, argv):
"""Hook for subclasses to take global initialization action
after the arguments are parsed but before a command is run.
Invoked only once, even in interactive mode.
:param argv: List of arguments, including the subcommand to run.
Empty for interactive mode.
"""
return
def prepare_to_run_command(self, cmd):
"""Perform any preliminary work needed to run a command.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
"""
return
def clean_up(self, cmd, result, err):
"""Hook run after a command is done to shutdown the app.
:param cmd: command processor being invoked
:paramtype cmd: cliff.command.Command
:param result: return value of cmd
:paramtype result: int
:param err: exception or None
:paramtype err: Exception
"""
return
def interact(self):
# Defer importing .interactive as cmd2 is a slow import
from .interactive import InteractiveApp
if self.interactive_app_factory is None:
self.interactive_app_factory = InteractiveApp
self.interpreter = self.interactive_app_factory(self,
self.command_manager,
self.stdin,
self.stdout,
)
self.interpreter.cmdloop()
return 0
def get_fuzzy_matches(self, cmd):
"""return fuzzy matches of unknown command
"""
sep = '_'
if self.command_manager.convert_underscores:
sep = ' '
all_cmds = [k[0] for k in self.command_manager]
dist = []
for candidate in sorted(all_cmds):
prefix = candidate.split(sep)[0]
# Give prefix match a very good score
if candidate.startswith(cmd):
dist.append((candidate, 0))
continue
# Levenshtein distance
dist.append((candidate, damerau_levenshtein(cmd, prefix, COST)+1))
dist = sorted(dist, key=operator.itemgetter(1, 0))
matches = []
i = 0
# Find the best similarity
while (not dist[i][1]):
matches.append(dist[i][0])
i += 1
best_similarity = dist[i][1]
while (dist[i][1] == best_similarity):
matches.append(dist[i][0])
i += 1
return matches
def run_subcommand(self, argv):
try:
subcommand = self.command_manager.find_command(argv)
except ValueError as err:
# If there was no exact match, try to find a fuzzy match
the_cmd = argv[0]
fuzzy_matches = self.get_fuzzy_matches(the_cmd)
if fuzzy_matches:
article = 'a'
if self.NAME[0] in 'aeiou':
article = 'an'
self.stdout.write('%s: \'%s\' is not %s %s command. '
'See \'%s --help\'.\n'
% (self.NAME, the_cmd, article,
self.NAME, self.NAME))
self.stdout.write('Did you mean one of these?\n')
for match in fuzzy_matches:
self.stdout.write(' %s\n' % match)
else:
if self.options.debug:
raise
else:
self.LOG.error(err)
return 2
cmd_factory, cmd_name, sub_argv = subcommand
kwargs = {}
if 'cmd_name' in inspect.getargspec(cmd_factory.__init__).args:
kwargs['cmd_name'] = cmd_name
cmd = cmd_factory(self, self.options, **kwargs)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
parsed_args = cmd_parser.parse_args(sub_argv)
result = cmd.run(parsed_args)
except Exception as err:
if self.options.debug:
self.LOG.exception(err)
else:
self.LOG.error(err)
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.debug:
self.LOG.exception(err2)
else:
self.LOG.error('Could not clean up: %s', err2)
if self.options.debug:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.debug:
self.LOG.exception(err3)
else:
self.LOG.error('Could not clean up: %s', err3)
return result
|
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exports data from Google Maps Coordinate to a CSV file.
Usage: python gmc_export.py <team-id> <output-file.csv>
If the output-file argument is omitted, CSV data will be written to stdout.
If authentication is needed, your browser will open a page asking you to
grant permission; click "Accept" to proceed. This script should then
write out a CSV file containing all the job data for your team.
You will need a Google Maps Coordinate Team ID and a client secrets file
in order to use this script.
To find your Team ID:
- Log into coordinate.google.com.
- Select a team.
- Copy the Team ID from the URL (it's the part after the last slash,
and it looks like "M7jknfvZrnTTqIE2cd6k5g").
To get a client secrets file:
- Go to https://console.developers.google.com/ and create a project there.
- Go to APIs & Auth > APIs, scroll down to "Google Maps Coordinate API",
and click the "OFF" button to turn on the API.
- Go to APIs & Auth > Consent screen, and fill in the form.
- Go to APIs & Auth > Credentials, click "Create new Client ID", select
"Installed application", leave the application type as "Other", and
click "Create Client ID".
- Click "Download JSON" to get a client secrets file. Copy this file to
"client_secrets.json" in the same directory as this script.
"""
__author__ = 'jlivni@google.com (Josh Livni), kpy@google.com (Ka-Ping Yee)'
import argparse
import csv
import datetime
import httplib2
import logging
import os
import pprint
import sys
# These modules are part of the Google APIs Client Library for Python, which
# you can install with: sudo pip install --upgrade google-api-python-client
import apiclient.discovery
import oauth2client.client
import oauth2client.tools
def authorize(flags, scope, client_secrets_path, credentials_path):
"""Authorizes an HTTP object with the user's credentials.
Args:
flags: Command-line flags from argparse.ArgumentParser.parse_args().
scope: OAuth scope URL.
client_secret_path: Path to an existing client_secrets.json file.
credentials_path: Path where the user's credentials are stored; if
this file doesn't exist yet, the user will be taken through the
consent flow and then the credentials will be saved here.
"""
storage = oauth2client.file.Storage(credentials_path)
credentials = storage.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(
client_secrets_path, scope=scope,
message=oauth2client.tools.message_if_missing(client_secrets_path))
credentials = oauth2client.tools.run_flow(flow, storage, flags)
return credentials.authorize(httplib2.Http())
def get_service(flags, name, version):
"""Sets up access to a Google API.
Args:
flags: Command-line flags from argparse.ArgumentParser.parse_args().
name: The name of the API, e.g. 'coordinate'.
version: The version of the API, e.g. 'v1'.
"""
return apiclient.discovery.build(name, version, http=authorize(
flags,
'https://www.googleapis.com/auth/' + name,
os.path.join(os.path.dirname(__file__), 'client_secrets.json'),
'user_credentials.json'))
class Team:
def __init__(self, flags, team_id):
"""Data accessor for a Google Maps Coordinate team."""
self.service = get_service(flags, 'coordinate', 'v1')
self.team_id = team_id
def get_custom_fields(self):
"""Returns a dictionary mapping custom field IDs to field names."""
items = self.service.customFieldDef().list(
teamId=self.team_id).execute()['items']
return {int(item['id']): item['name'] for item in items}
def get_all_jobs(self):
"""Yields a sequence of job dictionaries, including the state and the
jobChange timestamps but omitting the rest of the jobChange items."""
jobs = self.service.jobs()
request = jobs.list(
teamId=self.team_id,
fields='items(id,state,jobChange(timestamp)),nextPageToken',
maxResults=100
)
while request:
response = request.execute() # fetch one page
if 'items' not in response:
break
for item in response['items']:
yield item
request = jobs.list_next(request, response) # advance to next page
def export_to_csv(team, out, verbose=False):
"""Exports all the jobs for a team to a CSV output stream."""
if verbose:
sys.stderr.write('.')
custom_fields = team.get_custom_fields()
custom_field_ids, custom_field_names = zip(*sorted(custom_fields.items()))
writer = csv.writer(out)
# Write the first row of column headings.
writer.writerow([
'Job ID',
'Last update',
'Title',
'Assignee',
'Progress',
'Address',
'Lat',
'Lon',
'Contact name',
'Contact phone',
'Notes',
] + list(custom_field_names))
if verbose:
sys.stderr.write('.')
count = 0
for job in team.get_all_jobs():
last_change = max(int(c.get('timestamp') or 0)
for c in job.get('jobChange') or [{}])
dt = datetime.datetime.utcfromtimestamp(last_change/1000)
timestamp = last_change and dt.strftime('%Y-%m-%d %H:%M:%S UTC') or ''
state = job.get('state')
location = state.get('location')
fields = (state.get('customFields') or {}).get('customField') or {}
custom = {int(field.get('customFieldId') or 0): field.get('value', '')
for field in fields}
# Write the field values in the same order as the header row.
writer.writerow([
job['id'],
timestamp,
state.get('title', ''),
state.get('assignee', ''),
state.get('progress', ''),
' / '.join(location.get('addressLine', '')),
location.get('lat'),
location.get('lng'),
state.get('customerName'),
state.get('customerPhoneNumber'),
' / '.join(state.get('note')),
] + [custom.get(id, '') for id in custom_field_ids])
count += 1
if verbose and count % 10 == 0:
sys.stderr.write('.')
return count
def main(argv):
# Get the team ID and other oauth2client flags from the command line.
argparser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[oauth2client.tools.argparser])
argparser.add_argument('team_id', help='Google Maps Coordinate Team ID')
argparser.add_argument('out_file', help='Output CSV file name', nargs='?',
type=argparse.FileType('w'), default=sys.stdout)
flags = argparser.parse_args(argv[1:])
logging.basicConfig() # silence logging warnings
# Export all the data in CSV format to the given file.
sys.stderr.write('Working...')
team = Team(flags, flags.team_id)
count = export_to_csv(team, flags.out_file, verbose=True)
flags.out_file.close()
print >>sys.stderr, '\n%d job%s written to %s.' % (
count, count != 1 and 's' or '', flags.out_file.name)
if __name__ == '__main__':
main(sys.argv)
|
|
t1 = [ 0x02AAB17CF7E90C5E , 0xAC424B03E243A8EC ,
0x72CD5BE30DD5FCD3 , 0x6D019B93F6F97F3A ,
0xCD9978FFD21F9193 , 0x7573A1C9708029E2 ,
0xB164326B922A83C3 , 0x46883EEE04915870 ,
0xEAACE3057103ECE6 , 0xC54169B808A3535C ,
0x4CE754918DDEC47C , 0x0AA2F4DFDC0DF40C ,
0x10B76F18A74DBEFA , 0xC6CCB6235AD1AB6A ,
0x13726121572FE2FF , 0x1A488C6F199D921E ,
0x4BC9F9F4DA0007CA , 0x26F5E6F6E85241C7 ,
0x859079DBEA5947B6 , 0x4F1885C5C99E8C92 ,
0xD78E761EA96F864B , 0x8E36428C52B5C17D ,
0x69CF6827373063C1 , 0xB607C93D9BB4C56E ,
0x7D820E760E76B5EA , 0x645C9CC6F07FDC42 ,
0xBF38A078243342E0 , 0x5F6B343C9D2E7D04 ,
0xF2C28AEB600B0EC6 , 0x6C0ED85F7254BCAC ,
0x71592281A4DB4FE5 , 0x1967FA69CE0FED9F ,
0xFD5293F8B96545DB , 0xC879E9D7F2A7600B ,
0x860248920193194E , 0xA4F9533B2D9CC0B3 ,
0x9053836C15957613 , 0xDB6DCF8AFC357BF1 ,
0x18BEEA7A7A370F57 , 0x037117CA50B99066 ,
0x6AB30A9774424A35 , 0xF4E92F02E325249B ,
0x7739DB07061CCAE1 , 0xD8F3B49CECA42A05 ,
0xBD56BE3F51382F73 , 0x45FAED5843B0BB28 ,
0x1C813D5C11BF1F83 , 0x8AF0E4B6D75FA169 ,
0x33EE18A487AD9999 , 0x3C26E8EAB1C94410 ,
0xB510102BC0A822F9 , 0x141EEF310CE6123B ,
0xFC65B90059DDB154 , 0xE0158640C5E0E607 ,
0x884E079826C3A3CF , 0x930D0D9523C535FD ,
0x35638D754E9A2B00 , 0x4085FCCF40469DD5 ,
0xC4B17AD28BE23A4C , 0xCAB2F0FC6A3E6A2E ,
0x2860971A6B943FCD , 0x3DDE6EE212E30446 ,
0x6222F32AE01765AE , 0x5D550BB5478308FE ,
0xA9EFA98DA0EDA22A , 0xC351A71686C40DA7 ,
0x1105586D9C867C84 , 0xDCFFEE85FDA22853 ,
0xCCFBD0262C5EEF76 , 0xBAF294CB8990D201 ,
0xE69464F52AFAD975 , 0x94B013AFDF133E14 ,
0x06A7D1A32823C958 , 0x6F95FE5130F61119 ,
0xD92AB34E462C06C0 , 0xED7BDE33887C71D2 ,
0x79746D6E6518393E , 0x5BA419385D713329 ,
0x7C1BA6B948A97564 , 0x31987C197BFDAC67 ,
0xDE6C23C44B053D02 , 0x581C49FED002D64D ,
0xDD474D6338261571 , 0xAA4546C3E473D062 ,
0x928FCE349455F860 , 0x48161BBACAAB94D9 ,
0x63912430770E6F68 , 0x6EC8A5E602C6641C ,
0x87282515337DDD2B , 0x2CDA6B42034B701B ,
0xB03D37C181CB096D , 0xE108438266C71C6F ,
0x2B3180C7EB51B255 , 0xDF92B82F96C08BBC ,
0x5C68C8C0A632F3BA , 0x5504CC861C3D0556 ,
0xABBFA4E55FB26B8F , 0x41848B0AB3BACEB4 ,
0xB334A273AA445D32 , 0xBCA696F0A85AD881 ,
0x24F6EC65B528D56C , 0x0CE1512E90F4524A ,
0x4E9DD79D5506D35A , 0x258905FAC6CE9779 ,
0x2019295B3E109B33 , 0xF8A9478B73A054CC ,
0x2924F2F934417EB0 , 0x3993357D536D1BC4 ,
0x38A81AC21DB6FF8B , 0x47C4FBF17D6016BF ,
0x1E0FAADD7667E3F5 , 0x7ABCFF62938BEB96 ,
0xA78DAD948FC179C9 , 0x8F1F98B72911E50D ,
0x61E48EAE27121A91 , 0x4D62F7AD31859808 ,
0xECEBA345EF5CEAEB , 0xF5CEB25EBC9684CE ,
0xF633E20CB7F76221 , 0xA32CDF06AB8293E4 ,
0x985A202CA5EE2CA4 , 0xCF0B8447CC8A8FB1 ,
0x9F765244979859A3 , 0xA8D516B1A1240017 ,
0x0BD7BA3EBB5DC726 , 0xE54BCA55B86ADB39 ,
0x1D7A3AFD6C478063 , 0x519EC608E7669EDD ,
0x0E5715A2D149AA23 , 0x177D4571848FF194 ,
0xEEB55F3241014C22 , 0x0F5E5CA13A6E2EC2 ,
0x8029927B75F5C361 , 0xAD139FABC3D6E436 ,
0x0D5DF1A94CCF402F , 0x3E8BD948BEA5DFC8 ,
0xA5A0D357BD3FF77E , 0xA2D12E251F74F645 ,
0x66FD9E525E81A082 , 0x2E0C90CE7F687A49 ,
0xC2E8BCBEBA973BC5 , 0x000001BCE509745F ,
0x423777BBE6DAB3D6 , 0xD1661C7EAEF06EB5 ,
0xA1781F354DAACFD8 , 0x2D11284A2B16AFFC ,
0xF1FC4F67FA891D1F , 0x73ECC25DCB920ADA ,
0xAE610C22C2A12651 , 0x96E0A810D356B78A ,
0x5A9A381F2FE7870F , 0xD5AD62EDE94E5530 ,
0xD225E5E8368D1427 , 0x65977B70C7AF4631 ,
0x99F889B2DE39D74F , 0x233F30BF54E1D143 ,
0x9A9675D3D9A63C97 , 0x5470554FF334F9A8 ,
0x166ACB744A4F5688 , 0x70C74CAAB2E4AEAD ,
0xF0D091646F294D12 , 0x57B82A89684031D1 ,
0xEFD95A5A61BE0B6B , 0x2FBD12E969F2F29A ,
0x9BD37013FEFF9FE8 , 0x3F9B0404D6085A06 ,
0x4940C1F3166CFE15 , 0x09542C4DCDF3DEFB ,
0xB4C5218385CD5CE3 , 0xC935B7DC4462A641 ,
0x3417F8A68ED3B63F , 0xB80959295B215B40 ,
0xF99CDAEF3B8C8572 , 0x018C0614F8FCB95D ,
0x1B14ACCD1A3ACDF3 , 0x84D471F200BB732D ,
0xC1A3110E95E8DA16 , 0x430A7220BF1A82B8 ,
0xB77E090D39DF210E , 0x5EF4BD9F3CD05E9D ,
0x9D4FF6DA7E57A444 , 0xDA1D60E183D4A5F8 ,
0xB287C38417998E47 , 0xFE3EDC121BB31886 ,
0xC7FE3CCC980CCBEF , 0xE46FB590189BFD03 ,
0x3732FD469A4C57DC , 0x7EF700A07CF1AD65 ,
0x59C64468A31D8859 , 0x762FB0B4D45B61F6 ,
0x155BAED099047718 , 0x68755E4C3D50BAA6 ,
0xE9214E7F22D8B4DF , 0x2ADDBF532EAC95F4 ,
0x32AE3909B4BD0109 , 0x834DF537B08E3450 ,
0xFA209DA84220728D , 0x9E691D9B9EFE23F7 ,
0x0446D288C4AE8D7F , 0x7B4CC524E169785B ,
0x21D87F0135CA1385 , 0xCEBB400F137B8AA5 ,
0x272E2B66580796BE , 0x3612264125C2B0DE ,
0x057702BDAD1EFBB2 , 0xD4BABB8EACF84BE9 ,
0x91583139641BC67B , 0x8BDC2DE08036E024 ,
0x603C8156F49F68ED , 0xF7D236F7DBEF5111 ,
0x9727C4598AD21E80 , 0xA08A0896670A5FD7 ,
0xCB4A8F4309EBA9CB , 0x81AF564B0F7036A1 ,
0xC0B99AA778199ABD , 0x959F1EC83FC8E952 ,
0x8C505077794A81B9 , 0x3ACAAF8F056338F0 ,
0x07B43F50627A6778 , 0x4A44AB49F5ECCC77 ,
0x3BC3D6E4B679EE98 , 0x9CC0D4D1CF14108C ,
0x4406C00B206BC8A0 , 0x82A18854C8D72D89 ,
0x67E366B35C3C432C , 0xB923DD61102B37F2 ,
0x56AB2779D884271D , 0xBE83E1B0FF1525AF ,
0xFB7C65D4217E49A9 , 0x6BDBE0E76D48E7D4 ,
0x08DF828745D9179E , 0x22EA6A9ADD53BD34 ,
0xE36E141C5622200A , 0x7F805D1B8CB750EE ,
0xAFE5C7A59F58E837 , 0xE27F996A4FB1C23C ,
0xD3867DFB0775F0D0 , 0xD0E673DE6E88891A ,
0x123AEB9EAFB86C25 , 0x30F1D5D5C145B895 ,
0xBB434A2DEE7269E7 , 0x78CB67ECF931FA38 ,
0xF33B0372323BBF9C , 0x52D66336FB279C74 ,
0x505F33AC0AFB4EAA , 0xE8A5CD99A2CCE187 ,
0x534974801E2D30BB , 0x8D2D5711D5876D90 ,
0x1F1A412891BC038E , 0xD6E2E71D82E56648 ,
0x74036C3A497732B7 , 0x89B67ED96361F5AB ,
0xFFED95D8F1EA02A2 , 0xE72B3BD61464D43D ,
0xA6300F170BDC4820 , 0xEBC18760ED78A77A ]
t2 = [ 0xE6A6BE5A05A12138 , 0xB5A122A5B4F87C98 ,
0x563C6089140B6990 , 0x4C46CB2E391F5DD5 ,
0xD932ADDBC9B79434 , 0x08EA70E42015AFF5 ,
0xD765A6673E478CF1 , 0xC4FB757EAB278D99 ,
0xDF11C6862D6E0692 , 0xDDEB84F10D7F3B16 ,
0x6F2EF604A665EA04 , 0x4A8E0F0FF0E0DFB3 ,
0xA5EDEEF83DBCBA51 , 0xFC4F0A2A0EA4371E ,
0xE83E1DA85CB38429 , 0xDC8FF882BA1B1CE2 ,
0xCD45505E8353E80D , 0x18D19A00D4DB0717 ,
0x34A0CFEDA5F38101 , 0x0BE77E518887CAF2 ,
0x1E341438B3C45136 , 0xE05797F49089CCF9 ,
0xFFD23F9DF2591D14 , 0x543DDA228595C5CD ,
0x661F81FD99052A33 , 0x8736E641DB0F7B76 ,
0x15227725418E5307 , 0xE25F7F46162EB2FA ,
0x48A8B2126C13D9FE , 0xAFDC541792E76EEA ,
0x03D912BFC6D1898F , 0x31B1AAFA1B83F51B ,
0xF1AC2796E42AB7D9 , 0x40A3A7D7FCD2EBAC ,
0x1056136D0AFBBCC5 , 0x7889E1DD9A6D0C85 ,
0xD33525782A7974AA , 0xA7E25D09078AC09B ,
0xBD4138B3EAC6EDD0 , 0x920ABFBE71EB9E70 ,
0xA2A5D0F54FC2625C , 0xC054E36B0B1290A3 ,
0xF6DD59FF62FE932B , 0x3537354511A8AC7D ,
0xCA845E9172FADCD4 , 0x84F82B60329D20DC ,
0x79C62CE1CD672F18 , 0x8B09A2ADD124642C ,
0xD0C1E96A19D9E726 , 0x5A786A9B4BA9500C ,
0x0E020336634C43F3 , 0xC17B474AEB66D822 ,
0x6A731AE3EC9BAAC2 , 0x8226667AE0840258 ,
0x67D4567691CAECA5 , 0x1D94155C4875ADB5 ,
0x6D00FD985B813FDF , 0x51286EFCB774CD06 ,
0x5E8834471FA744AF , 0xF72CA0AEE761AE2E ,
0xBE40E4CDAEE8E09A , 0xE9970BBB5118F665 ,
0x726E4BEB33DF1964 , 0x703B000729199762 ,
0x4631D816F5EF30A7 , 0xB880B5B51504A6BE ,
0x641793C37ED84B6C , 0x7B21ED77F6E97D96 ,
0x776306312EF96B73 , 0xAE528948E86FF3F4 ,
0x53DBD7F286A3F8F8 , 0x16CADCE74CFC1063 ,
0x005C19BDFA52C6DD , 0x68868F5D64D46AD3 ,
0x3A9D512CCF1E186A , 0x367E62C2385660AE ,
0xE359E7EA77DCB1D7 , 0x526C0773749ABE6E ,
0x735AE5F9D09F734B , 0x493FC7CC8A558BA8 ,
0xB0B9C1533041AB45 , 0x321958BA470A59BD ,
0x852DB00B5F46C393 , 0x91209B2BD336B0E5 ,
0x6E604F7D659EF19F , 0xB99A8AE2782CCB24 ,
0xCCF52AB6C814C4C7 , 0x4727D9AFBE11727B ,
0x7E950D0C0121B34D , 0x756F435670AD471F ,
0xF5ADD442615A6849 , 0x4E87E09980B9957A ,
0x2ACFA1DF50AEE355 , 0xD898263AFD2FD556 ,
0xC8F4924DD80C8FD6 , 0xCF99CA3D754A173A ,
0xFE477BACAF91BF3C , 0xED5371F6D690C12D ,
0x831A5C285E687094 , 0xC5D3C90A3708A0A4 ,
0x0F7F903717D06580 , 0x19F9BB13B8FDF27F ,
0xB1BD6F1B4D502843 , 0x1C761BA38FFF4012 ,
0x0D1530C4E2E21F3B , 0x8943CE69A7372C8A ,
0xE5184E11FEB5CE66 , 0x618BDB80BD736621 ,
0x7D29BAD68B574D0B , 0x81BB613E25E6FE5B ,
0x071C9C10BC07913F , 0xC7BEEB7909AC2D97 ,
0xC3E58D353BC5D757 , 0xEB017892F38F61E8 ,
0xD4EFFB9C9B1CC21A , 0x99727D26F494F7AB ,
0xA3E063A2956B3E03 , 0x9D4A8B9A4AA09C30 ,
0x3F6AB7D500090FB4 , 0x9CC0F2A057268AC0 ,
0x3DEE9D2DEDBF42D1 , 0x330F49C87960A972 ,
0xC6B2720287421B41 , 0x0AC59EC07C00369C ,
0xEF4EAC49CB353425 , 0xF450244EEF0129D8 ,
0x8ACC46E5CAF4DEB6 , 0x2FFEAB63989263F7 ,
0x8F7CB9FE5D7A4578 , 0x5BD8F7644E634635 ,
0x427A7315BF2DC900 , 0x17D0C4AA2125261C ,
0x3992486C93518E50 , 0xB4CBFEE0A2D7D4C3 ,
0x7C75D6202C5DDD8D , 0xDBC295D8E35B6C61 ,
0x60B369D302032B19 , 0xCE42685FDCE44132 ,
0x06F3DDB9DDF65610 , 0x8EA4D21DB5E148F0 ,
0x20B0FCE62FCD496F , 0x2C1B912358B0EE31 ,
0xB28317B818F5A308 , 0xA89C1E189CA6D2CF ,
0x0C6B18576AAADBC8 , 0xB65DEAA91299FAE3 ,
0xFB2B794B7F1027E7 , 0x04E4317F443B5BEB ,
0x4B852D325939D0A6 , 0xD5AE6BEEFB207FFC ,
0x309682B281C7D374 , 0xBAE309A194C3B475 ,
0x8CC3F97B13B49F05 , 0x98A9422FF8293967 ,
0x244B16B01076FF7C , 0xF8BF571C663D67EE ,
0x1F0D6758EEE30DA1 , 0xC9B611D97ADEB9B7 ,
0xB7AFD5887B6C57A2 , 0x6290AE846B984FE1 ,
0x94DF4CDEACC1A5FD , 0x058A5BD1C5483AFF ,
0x63166CC142BA3C37 , 0x8DB8526EB2F76F40 ,
0xE10880036F0D6D4E , 0x9E0523C9971D311D ,
0x45EC2824CC7CD691 , 0x575B8359E62382C9 ,
0xFA9E400DC4889995 , 0xD1823ECB45721568 ,
0xDAFD983B8206082F , 0xAA7D29082386A8CB ,
0x269FCD4403B87588 , 0x1B91F5F728BDD1E0 ,
0xE4669F39040201F6 , 0x7A1D7C218CF04ADE ,
0x65623C29D79CE5CE , 0x2368449096C00BB1 ,
0xAB9BF1879DA503BA , 0xBC23ECB1A458058E ,
0x9A58DF01BB401ECC , 0xA070E868A85F143D ,
0x4FF188307DF2239E , 0x14D565B41A641183 ,
0xEE13337452701602 , 0x950E3DCF3F285E09 ,
0x59930254B9C80953 , 0x3BF299408930DA6D ,
0xA955943F53691387 , 0xA15EDECAA9CB8784 ,
0x29142127352BE9A0 , 0x76F0371FFF4E7AFB ,
0x0239F450274F2228 , 0xBB073AF01D5E868B ,
0xBFC80571C10E96C1 , 0xD267088568222E23 ,
0x9671A3D48E80B5B0 , 0x55B5D38AE193BB81 ,
0x693AE2D0A18B04B8 , 0x5C48B4ECADD5335F ,
0xFD743B194916A1CA , 0x2577018134BE98C4 ,
0xE77987E83C54A4AD , 0x28E11014DA33E1B9 ,
0x270CC59E226AA213 , 0x71495F756D1A5F60 ,
0x9BE853FB60AFEF77 , 0xADC786A7F7443DBF ,
0x0904456173B29A82 , 0x58BC7A66C232BD5E ,
0xF306558C673AC8B2 , 0x41F639C6B6C9772A ,
0x216DEFE99FDA35DA , 0x11640CC71C7BE615 ,
0x93C43694565C5527 , 0xEA038E6246777839 ,
0xF9ABF3CE5A3E2469 , 0x741E768D0FD312D2 ,
0x0144B883CED652C6 , 0xC20B5A5BA33F8552 ,
0x1AE69633C3435A9D , 0x97A28CA4088CFDEC ,
0x8824A43C1E96F420 , 0x37612FA66EEEA746 ,
0x6B4CB165F9CF0E5A , 0x43AA1C06A0ABFB4A ,
0x7F4DC26FF162796B , 0x6CBACC8E54ED9B0F ,
0xA6B7FFEFD2BB253E , 0x2E25BC95B0A29D4F ,
0x86D6A58BDEF1388C , 0xDED74AC576B6F054 ,
0x8030BDBC2B45805D , 0x3C81AF70E94D9289 ,
0x3EFF6DDA9E3100DB , 0xB38DC39FDFCC8847 ,
0x123885528D17B87E , 0xF2DA0ED240B1B642 ,
0x44CEFADCD54BF9A9 , 0x1312200E433C7EE6 ,
0x9FFCC84F3A78C748 , 0xF0CD1F72248576BB ,
0xEC6974053638CFE4 , 0x2BA7B67C0CEC4E4C ,
0xAC2F4DF3E5CE32ED , 0xCB33D14326EA4C11 ,
0xA4E9044CC77E58BC , 0x5F513293D934FCEF ,
0x5DC9645506E55444 , 0x50DE418F317DE40A ,
0x388CB31A69DDE259 , 0x2DB4A83455820A86 ,
0x9010A91E84711AE9 , 0x4DF7F0B7B1498371 ,
0xD62A2EABC0977179 , 0x22FAC097AA8D5C0E ]
t3 = [ 0xF49FCC2FF1DAF39B , 0x487FD5C66FF29281 ,
0xE8A30667FCDCA83F , 0x2C9B4BE3D2FCCE63 ,
0xDA3FF74B93FBBBC2 , 0x2FA165D2FE70BA66 ,
0xA103E279970E93D4 , 0xBECDEC77B0E45E71 ,
0xCFB41E723985E497 , 0xB70AAA025EF75017 ,
0xD42309F03840B8E0 , 0x8EFC1AD035898579 ,
0x96C6920BE2B2ABC5 , 0x66AF4163375A9172 ,
0x2174ABDCCA7127FB , 0xB33CCEA64A72FF41 ,
0xF04A4933083066A5 , 0x8D970ACDD7289AF5 ,
0x8F96E8E031C8C25E , 0xF3FEC02276875D47 ,
0xEC7BF310056190DD , 0xF5ADB0AEBB0F1491 ,
0x9B50F8850FD58892 , 0x4975488358B74DE8 ,
0xA3354FF691531C61 , 0x0702BBE481D2C6EE ,
0x89FB24057DEDED98 , 0xAC3075138596E902 ,
0x1D2D3580172772ED , 0xEB738FC28E6BC30D ,
0x5854EF8F63044326 , 0x9E5C52325ADD3BBE ,
0x90AA53CF325C4623 , 0xC1D24D51349DD067 ,
0x2051CFEEA69EA624 , 0x13220F0A862E7E4F ,
0xCE39399404E04864 , 0xD9C42CA47086FCB7 ,
0x685AD2238A03E7CC , 0x066484B2AB2FF1DB ,
0xFE9D5D70EFBF79EC , 0x5B13B9DD9C481854 ,
0x15F0D475ED1509AD , 0x0BEBCD060EC79851 ,
0xD58C6791183AB7F8 , 0xD1187C5052F3EEE4 ,
0xC95D1192E54E82FF , 0x86EEA14CB9AC6CA2 ,
0x3485BEB153677D5D , 0xDD191D781F8C492A ,
0xF60866BAA784EBF9 , 0x518F643BA2D08C74 ,
0x8852E956E1087C22 , 0xA768CB8DC410AE8D ,
0x38047726BFEC8E1A , 0xA67738B4CD3B45AA ,
0xAD16691CEC0DDE19 , 0xC6D4319380462E07 ,
0xC5A5876D0BA61938 , 0x16B9FA1FA58FD840 ,
0x188AB1173CA74F18 , 0xABDA2F98C99C021F ,
0x3E0580AB134AE816 , 0x5F3B05B773645ABB ,
0x2501A2BE5575F2F6 , 0x1B2F74004E7E8BA9 ,
0x1CD7580371E8D953 , 0x7F6ED89562764E30 ,
0xB15926FF596F003D , 0x9F65293DA8C5D6B9 ,
0x6ECEF04DD690F84C , 0x4782275FFF33AF88 ,
0xE41433083F820801 , 0xFD0DFE409A1AF9B5 ,
0x4325A3342CDB396B , 0x8AE77E62B301B252 ,
0xC36F9E9F6655615A , 0x85455A2D92D32C09 ,
0xF2C7DEA949477485 , 0x63CFB4C133A39EBA ,
0x83B040CC6EBC5462 , 0x3B9454C8FDB326B0 ,
0x56F56A9E87FFD78C , 0x2DC2940D99F42BC6 ,
0x98F7DF096B096E2D , 0x19A6E01E3AD852BF ,
0x42A99CCBDBD4B40B , 0xA59998AF45E9C559 ,
0x366295E807D93186 , 0x6B48181BFAA1F773 ,
0x1FEC57E2157A0A1D , 0x4667446AF6201AD5 ,
0xE615EBCACFB0F075 , 0xB8F31F4F68290778 ,
0x22713ED6CE22D11E , 0x3057C1A72EC3C93B ,
0xCB46ACC37C3F1F2F , 0xDBB893FD02AAF50E ,
0x331FD92E600B9FCF , 0xA498F96148EA3AD6 ,
0xA8D8426E8B6A83EA , 0xA089B274B7735CDC ,
0x87F6B3731E524A11 , 0x118808E5CBC96749 ,
0x9906E4C7B19BD394 , 0xAFED7F7E9B24A20C ,
0x6509EADEEB3644A7 , 0x6C1EF1D3E8EF0EDE ,
0xB9C97D43E9798FB4 , 0xA2F2D784740C28A3 ,
0x7B8496476197566F , 0x7A5BE3E6B65F069D ,
0xF96330ED78BE6F10 , 0xEEE60DE77A076A15 ,
0x2B4BEE4AA08B9BD0 , 0x6A56A63EC7B8894E ,
0x02121359BA34FEF4 , 0x4CBF99F8283703FC ,
0x398071350CAF30C8 , 0xD0A77A89F017687A ,
0xF1C1A9EB9E423569 , 0x8C7976282DEE8199 ,
0x5D1737A5DD1F7ABD , 0x4F53433C09A9FA80 ,
0xFA8B0C53DF7CA1D9 , 0x3FD9DCBC886CCB77 ,
0xC040917CA91B4720 , 0x7DD00142F9D1DCDF ,
0x8476FC1D4F387B58 , 0x23F8E7C5F3316503 ,
0x032A2244E7E37339 , 0x5C87A5D750F5A74B ,
0x082B4CC43698992E , 0xDF917BECB858F63C ,
0x3270B8FC5BF86DDA , 0x10AE72BB29B5DD76 ,
0x576AC94E7700362B , 0x1AD112DAC61EFB8F ,
0x691BC30EC5FAA427 , 0xFF246311CC327143 ,
0x3142368E30E53206 , 0x71380E31E02CA396 ,
0x958D5C960AAD76F1 , 0xF8D6F430C16DA536 ,
0xC8FFD13F1BE7E1D2 , 0x7578AE66004DDBE1 ,
0x05833F01067BE646 , 0xBB34B5AD3BFE586D ,
0x095F34C9A12B97F0 , 0x247AB64525D60CA8 ,
0xDCDBC6F3017477D1 , 0x4A2E14D4DECAD24D ,
0xBDB5E6D9BE0A1EEB , 0x2A7E70F7794301AB ,
0xDEF42D8A270540FD , 0x01078EC0A34C22C1 ,
0xE5DE511AF4C16387 , 0x7EBB3A52BD9A330A ,
0x77697857AA7D6435 , 0x004E831603AE4C32 ,
0xE7A21020AD78E312 , 0x9D41A70C6AB420F2 ,
0x28E06C18EA1141E6 , 0xD2B28CBD984F6B28 ,
0x26B75F6C446E9D83 , 0xBA47568C4D418D7F ,
0xD80BADBFE6183D8E , 0x0E206D7F5F166044 ,
0xE258A43911CBCA3E , 0x723A1746B21DC0BC ,
0xC7CAA854F5D7CDD3 , 0x7CAC32883D261D9C ,
0x7690C26423BA942C , 0x17E55524478042B8 ,
0xE0BE477656A2389F , 0x4D289B5E67AB2DA0 ,
0x44862B9C8FBBFD31 , 0xB47CC8049D141365 ,
0x822C1B362B91C793 , 0x4EB14655FB13DFD8 ,
0x1ECBBA0714E2A97B , 0x6143459D5CDE5F14 ,
0x53A8FBF1D5F0AC89 , 0x97EA04D81C5E5B00 ,
0x622181A8D4FDB3F3 , 0xE9BCD341572A1208 ,
0x1411258643CCE58A , 0x9144C5FEA4C6E0A4 ,
0x0D33D06565CF620F , 0x54A48D489F219CA1 ,
0xC43E5EAC6D63C821 , 0xA9728B3A72770DAF ,
0xD7934E7B20DF87EF , 0xE35503B61A3E86E5 ,
0xCAE321FBC819D504 , 0x129A50B3AC60BFA6 ,
0xCD5E68EA7E9FB6C3 , 0xB01C90199483B1C7 ,
0x3DE93CD5C295376C , 0xAED52EDF2AB9AD13 ,
0x2E60F512C0A07884 , 0xBC3D86A3E36210C9 ,
0x35269D9B163951CE , 0x0C7D6E2AD0CDB5FA ,
0x59E86297D87F5733 , 0x298EF221898DB0E7 ,
0x55000029D1A5AA7E , 0x8BC08AE1B5061B45 ,
0xC2C31C2B6C92703A , 0x94CC596BAF25EF42 ,
0x0A1D73DB22540456 , 0x04B6A0F9D9C4179A ,
0xEFFDAFA2AE3D3C60 , 0xF7C8075BB49496C4 ,
0x9CC5C7141D1CD4E3 , 0x78BD1638218E5534 ,
0xB2F11568F850246A , 0xEDFABCFA9502BC29 ,
0x796CE5F2DA23051B , 0xAAE128B0DC93537C ,
0x3A493DA0EE4B29AE , 0xB5DF6B2C416895D7 ,
0xFCABBD25122D7F37 , 0x70810B58105DC4B1 ,
0xE10FDD37F7882A90 , 0x524DCAB5518A3F5C ,
0x3C9E85878451255B , 0x4029828119BD34E2 ,
0x74A05B6F5D3CECCB , 0xB610021542E13ECA ,
0x0FF979D12F59E2AC , 0x6037DA27E4F9CC50 ,
0x5E92975A0DF1847D , 0xD66DE190D3E623FE ,
0x5032D6B87B568048 , 0x9A36B7CE8235216E ,
0x80272A7A24F64B4A , 0x93EFED8B8C6916F7 ,
0x37DDBFF44CCE1555 , 0x4B95DB5D4B99BD25 ,
0x92D3FDA169812FC0 , 0xFB1A4A9A90660BB6 ,
0x730C196946A4B9B2 , 0x81E289AA7F49DA68 ,
0x64669A0F83B1A05F , 0x27B3FF7D9644F48B ,
0xCC6B615C8DB675B3 , 0x674F20B9BCEBBE95 ,
0x6F31238275655982 , 0x5AE488713E45CF05 ,
0xBF619F9954C21157 , 0xEABAC46040A8EAE9 ,
0x454C6FE9F2C0C1CD , 0x419CF6496412691C ,
0xD3DC3BEF265B0F70 , 0x6D0E60F5C3578A9E ]
t4 = [ 0x5B0E608526323C55 , 0x1A46C1A9FA1B59F5 ,
0xA9E245A17C4C8FFA , 0x65CA5159DB2955D7 ,
0x05DB0A76CE35AFC2 , 0x81EAC77EA9113D45 ,
0x528EF88AB6AC0A0D , 0xA09EA253597BE3FF ,
0x430DDFB3AC48CD56 , 0xC4B3A67AF45CE46F ,
0x4ECECFD8FBE2D05E , 0x3EF56F10B39935F0 ,
0x0B22D6829CD619C6 , 0x17FD460A74DF2069 ,
0x6CF8CC8E8510ED40 , 0xD6C824BF3A6ECAA7 ,
0x61243D581A817049 , 0x048BACB6BBC163A2 ,
0xD9A38AC27D44CC32 , 0x7FDDFF5BAAF410AB ,
0xAD6D495AA804824B , 0xE1A6A74F2D8C9F94 ,
0xD4F7851235DEE8E3 , 0xFD4B7F886540D893 ,
0x247C20042AA4BFDA , 0x096EA1C517D1327C ,
0xD56966B4361A6685 , 0x277DA5C31221057D ,
0x94D59893A43ACFF7 , 0x64F0C51CCDC02281 ,
0x3D33BCC4FF6189DB , 0xE005CB184CE66AF1 ,
0xFF5CCD1D1DB99BEA , 0xB0B854A7FE42980F ,
0x7BD46A6A718D4B9F , 0xD10FA8CC22A5FD8C ,
0xD31484952BE4BD31 , 0xC7FA975FCB243847 ,
0x4886ED1E5846C407 , 0x28CDDB791EB70B04 ,
0xC2B00BE2F573417F , 0x5C9590452180F877 ,
0x7A6BDDFFF370EB00 , 0xCE509E38D6D9D6A4 ,
0xEBEB0F00647FA702 , 0x1DCC06CF76606F06 ,
0xE4D9F28BA286FF0A , 0xD85A305DC918C262 ,
0x475B1D8732225F54 , 0x2D4FB51668CCB5FE ,
0xA679B9D9D72BBA20 , 0x53841C0D912D43A5 ,
0x3B7EAA48BF12A4E8 , 0x781E0E47F22F1DDF ,
0xEFF20CE60AB50973 , 0x20D261D19DFFB742 ,
0x16A12B03062A2E39 , 0x1960EB2239650495 ,
0x251C16FED50EB8B8 , 0x9AC0C330F826016E ,
0xED152665953E7671 , 0x02D63194A6369570 ,
0x5074F08394B1C987 , 0x70BA598C90B25CE1 ,
0x794A15810B9742F6 , 0x0D5925E9FCAF8C6C ,
0x3067716CD868744E , 0x910AB077E8D7731B ,
0x6A61BBDB5AC42F61 , 0x93513EFBF0851567 ,
0xF494724B9E83E9D5 , 0xE887E1985C09648D ,
0x34B1D3C675370CFD , 0xDC35E433BC0D255D ,
0xD0AAB84234131BE0 , 0x08042A50B48B7EAF ,
0x9997C4EE44A3AB35 , 0x829A7B49201799D0 ,
0x263B8307B7C54441 , 0x752F95F4FD6A6CA6 ,
0x927217402C08C6E5 , 0x2A8AB754A795D9EE ,
0xA442F7552F72943D , 0x2C31334E19781208 ,
0x4FA98D7CEAEE6291 , 0x55C3862F665DB309 ,
0xBD0610175D53B1F3 , 0x46FE6CB840413F27 ,
0x3FE03792DF0CFA59 , 0xCFE700372EB85E8F ,
0xA7BE29E7ADBCE118 , 0xE544EE5CDE8431DD ,
0x8A781B1B41F1873E , 0xA5C94C78A0D2F0E7 ,
0x39412E2877B60728 , 0xA1265EF3AFC9A62C ,
0xBCC2770C6A2506C5 , 0x3AB66DD5DCE1CE12 ,
0xE65499D04A675B37 , 0x7D8F523481BFD216 ,
0x0F6F64FCEC15F389 , 0x74EFBE618B5B13C8 ,
0xACDC82B714273E1D , 0xDD40BFE003199D17 ,
0x37E99257E7E061F8 , 0xFA52626904775AAA ,
0x8BBBF63A463D56F9 , 0xF0013F1543A26E64 ,
0xA8307E9F879EC898 , 0xCC4C27A4150177CC ,
0x1B432F2CCA1D3348 , 0xDE1D1F8F9F6FA013 ,
0x606602A047A7DDD6 , 0xD237AB64CC1CB2C7 ,
0x9B938E7225FCD1D3 , 0xEC4E03708E0FF476 ,
0xFEB2FBDA3D03C12D , 0xAE0BCED2EE43889A ,
0x22CB8923EBFB4F43 , 0x69360D013CF7396D ,
0x855E3602D2D4E022 , 0x073805BAD01F784C ,
0x33E17A133852F546 , 0xDF4874058AC7B638 ,
0xBA92B29C678AA14A , 0x0CE89FC76CFAADCD ,
0x5F9D4E0908339E34 , 0xF1AFE9291F5923B9 ,
0x6E3480F60F4A265F , 0xEEBF3A2AB29B841C ,
0xE21938A88F91B4AD , 0x57DFEFF845C6D3C3 ,
0x2F006B0BF62CAAF2 , 0x62F479EF6F75EE78 ,
0x11A55AD41C8916A9 , 0xF229D29084FED453 ,
0x42F1C27B16B000E6 , 0x2B1F76749823C074 ,
0x4B76ECA3C2745360 , 0x8C98F463B91691BD ,
0x14BCC93CF1ADE66A , 0x8885213E6D458397 ,
0x8E177DF0274D4711 , 0xB49B73B5503F2951 ,
0x10168168C3F96B6B , 0x0E3D963B63CAB0AE ,
0x8DFC4B5655A1DB14 , 0xF789F1356E14DE5C ,
0x683E68AF4E51DAC1 , 0xC9A84F9D8D4B0FD9 ,
0x3691E03F52A0F9D1 , 0x5ED86E46E1878E80 ,
0x3C711A0E99D07150 , 0x5A0865B20C4E9310 ,
0x56FBFC1FE4F0682E , 0xEA8D5DE3105EDF9B ,
0x71ABFDB12379187A , 0x2EB99DE1BEE77B9C ,
0x21ECC0EA33CF4523 , 0x59A4D7521805C7A1 ,
0x3896F5EB56AE7C72 , 0xAA638F3DB18F75DC ,
0x9F39358DABE9808E , 0xB7DEFA91C00B72AC ,
0x6B5541FD62492D92 , 0x6DC6DEE8F92E4D5B ,
0x353F57ABC4BEEA7E , 0x735769D6DA5690CE ,
0x0A234AA642391484 , 0xF6F9508028F80D9D ,
0xB8E319A27AB3F215 , 0x31AD9C1151341A4D ,
0x773C22A57BEF5805 , 0x45C7561A07968633 ,
0xF913DA9E249DBE36 , 0xDA652D9B78A64C68 ,
0x4C27A97F3BC334EF , 0x76621220E66B17F4 ,
0x967743899ACD7D0B , 0xF3EE5BCAE0ED6782 ,
0x409F753600C879FC , 0x06D09A39B5926DB6 ,
0x6F83AEB0317AC588 , 0x01E6CA4A86381F21 ,
0x66FF3462D19F3025 , 0x72207C24DDFD3BFB ,
0x4AF6B6D3E2ECE2EB , 0x9C994DBEC7EA08DE ,
0x49ACE597B09A8BC4 , 0xB38C4766CF0797BA ,
0x131B9373C57C2A75 , 0xB1822CCE61931E58 ,
0x9D7555B909BA1C0C , 0x127FAFDD937D11D2 ,
0x29DA3BADC66D92E4 , 0xA2C1D57154C2ECBC ,
0x58C5134D82F6FE24 , 0x1C3AE3515B62274F ,
0xE907C82E01CB8126 , 0xF8ED091913E37FCB ,
0x3249D8F9C80046C9 , 0x80CF9BEDE388FB63 ,
0x1881539A116CF19E , 0x5103F3F76BD52457 ,
0x15B7E6F5AE47F7A8 , 0xDBD7C6DED47E9CCF ,
0x44E55C410228BB1A , 0xB647D4255EDB4E99 ,
0x5D11882BB8AAFC30 , 0xF5098BBB29D3212A ,
0x8FB5EA14E90296B3 , 0x677B942157DD025A ,
0xFB58E7C0A390ACB5 , 0x89D3674C83BD4A01 ,
0x9E2DA4DF4BF3B93B , 0xFCC41E328CAB4829 ,
0x03F38C96BA582C52 , 0xCAD1BDBD7FD85DB2 ,
0xBBB442C16082AE83 , 0xB95FE86BA5DA9AB0 ,
0xB22E04673771A93F , 0x845358C9493152D8 ,
0xBE2A488697B4541E , 0x95A2DC2DD38E6966 ,
0xC02C11AC923C852B , 0x2388B1990DF2A87B ,
0x7C8008FA1B4F37BE , 0x1F70D0C84D54E503 ,
0x5490ADEC7ECE57D4 , 0x002B3C27D9063A3A ,
0x7EAEA3848030A2BF , 0xC602326DED2003C0 ,
0x83A7287D69A94086 , 0xC57A5FCB30F57A8A ,
0xB56844E479EBE779 , 0xA373B40F05DCBCE9 ,
0xD71A786E88570EE2 , 0x879CBACDBDE8F6A0 ,
0x976AD1BCC164A32F , 0xAB21E25E9666D78B ,
0x901063AAE5E5C33C , 0x9818B34448698D90 ,
0xE36487AE3E1E8ABB , 0xAFBDF931893BDCB4 ,
0x6345A0DC5FBBD519 , 0x8628FE269B9465CA ,
0x1E5D01603F9C51EC , 0x4DE44006A15049B7 ,
0xBF6C70E5F776CBB1 , 0x411218F2EF552BED ,
0xCB0C0708705A36A3 , 0xE74D14754F986044 ,
0xCD56D9430EA8280E , 0xC12591D7535F5065 ,
0xC83223F1720AEF96 , 0xC3A0396F7363A51F ]
|
|
#!/usr/bin/env python
# -*-coding: utf8 -*-
'''
GitHub API Python SDK. (Python >= 2.5)
Michael Liao (askxuefeng@gmail.com)
Usage:
>>> gh = GitHub(username='githubpy', password='test-githubpy-1234')
>>> L = gh.users('githubpy').followers.get()
>>> L[0].id
470058
>>> L[0].login
u'michaelliao'
>>> x_ratelimit_remaining = gh.x_ratelimit_remaining
>>> x_ratelimit_limit = gh.x_ratelimit_limit
>>> L = gh.users('githubpy').following.get()
>>> L[0]._links.self.href
u'https://api.github.com/users/michaelliao'
>>> L = gh.repos('githubpy')('testgithubpy').issues.get(state='closed', sort='created')
>>> L[0].title
u'sample issue for test'
>>> L[0].number
1
>>> I = gh.repos('githubpy')('testgithubpy').issues(1).get()
>>> I.url
u'https://api.github.com/repos/githubpy/testgithubpy/issues/1'
>>> gh = GitHub(username='githubpy', password='test-githubpy-1234')
>>> r = gh.repos('githubpy')('testgithubpy').issues.post(title='test create issue', body='just a test')
>>> r.title
u'test create issue'
>>> r.state
u'open'
>>> gh.repos.thisisabadurl.get()
Traceback (most recent call last):
...
ApiNotFoundError: https://api.github.com/repos/thisisabadurl
>>> gh.users('github-not-exist-user').followers.get()
Traceback (most recent call last):
...
ApiNotFoundError: https://api.github.com/users/github-not-exist-user/followers
'''
import re, os, sha, time, hmac, base64, hashlib, urllib, urllib2, mimetypes
try:
import json
except ImportError:
import simplejson as json
from collections import Iterable
from datetime import datetime, timedelta, tzinfo
from StringIO import StringIO
_URL = 'https://api.github.com'
_METHOD_MAP = dict(
GET=lambda: 'GET',
PUT=lambda: 'PUT',
POST=lambda: 'POST',
PATCH=lambda: 'PATCH',
DELETE=lambda: 'DELETE')
DEFAULT_SCOPE = None
RW_SCOPE = 'user,public_repo,repo,repo:status,gist'
class GitHub(object):
'''
GitHub client.
'''
def __init__(self, username=None, password=None, access_token=None, client_id=None, client_secret=None, redirect_uri=None, scope=None):
self.x_ratelimit_remaining = (-1)
self.x_ratelimit_limit = (-1)
self._authorization = None
if username and password:
self._authorization = 'Basic %s' % base64.b64encode('%s:%s' % (username, password))
elif access_token:
self._authorization = 'token %s' % access_token
self._client_id = client_id
self._client_secret = client_secret
self._redirect_uri = redirect_uri
self._scope = scope
def authorize_url(self, state=None):
'''
Generate authorize_url.
>>> GitHub(client_id='3ebf94c5776d565bcf75').authorize_url()
'https://github.com/login/oauth/authorize?client_id=3ebf94c5776d565bcf75'
'''
if not self._client_id:
raise ApiAuthError('No client id.')
kw = dict(client_id=self._client_id)
if self._redirect_uri:
kw['redirect_uri'] = self._redirect_uri
if self._scope:
kw['scope'] = self._scope
if state:
kw['state'] = state
return 'https://github.com/login/oauth/authorize?%s' % _encode_params(kw)
def get_access_token(self, code, state=None):
'''
In callback url: http://host/callback?code=123&state=xyz
use code and state to get an access token.
'''
kw = dict(client_id=self._client_id, client_secret=self._client_secret, code=code)
if self._redirect_uri:
kw['redirect_uri'] = self._redirect_uri
if state:
kw['state'] = state
opener = urllib2.build_opener(urllib2.HTTPSHandler)
request = urllib2.Request('https://github.com/login/oauth/access_token', data=_encode_params(kw))
request.get_method = _METHOD_MAP['POST']
request.add_header('Accept', 'application/json')
try:
response = opener.open(request)
r = _parse_json(response.read())
if 'error' in r:
raise ApiAuthError(str(r.error))
return str(r.access_token)
except urllib2.HTTPError, e:
raise ApiAuthError('HTTPError when get access token')
def __getattr__(self, attr):
return _Callable(self, '/%s' % attr)
def _http(self, method, path, **kw):
data = None
params = None
if method=='GET' and kw:
path = '%s?%s' % (path, _encode_params(kw))
if method=='POST':
data = _encode_json(kw)
url = '%s%s' % (_URL, path)
opener = urllib2.build_opener(urllib2.HTTPSHandler)
request = urllib2.Request(url, data=data)
request.get_method = _METHOD_MAP[method]
if self._authorization:
request.add_header('Authorization', self._authorization)
if method=='POST':
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
try:
response = opener.open(request)
is_json = self._process_resp(response.headers)
if is_json:
return _parse_json(response.read())
except urllib2.HTTPError, e:
is_json = self._process_resp(e.headers)
if is_json:
json = _parse_json(e.read())
req = JsonObject(method=method, url=url)
resp = JsonObject(code=e.code, json=json)
if resp.code==404:
raise ApiNotFoundError(url, req, resp)
raise ApiError(url, req, resp)
def _process_resp(self, headers):
is_json = False
if headers:
for k in headers:
h = k.lower()
if h=='x-ratelimit-remaining':
self.x_ratelimit_remaining = int(headers[k])
elif h=='x-ratelimit-limit':
self.x_ratelimit_limit = int(headers[k])
elif h=='content-type':
is_json = headers[k].startswith('application/json')
return is_json
class _Executable(object):
def __init__(self, gh, method, path):
self._gh = gh
self._method = method
self._path = path
def __call__(self, **kw):
return self._gh._http(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, gh, name):
self._gh = gh
self._name = name
def __call__(self, *args):
if len(args)==0:
return self
name = '%s/%s' % (self._name, '/'.join([str(arg) for arg in args]))
return _Callable(self._gh, name)
def __getattr__(self, attr):
if attr=='get':
return _Executable(self._gh, 'GET', self._name)
if attr=='put':
return _Executable(self._gh, 'PUT', self._name)
if attr=='post':
return _Executable(self._gh, 'POST', self._name)
if attr=='delete':
return _Executable(self._gh, 'DELETE', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._gh, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
def _encode_params(kw):
'''
Encode parameters.
'''
args = []
for k, v in kw.iteritems():
qv = v.encode('utf-8') if isinstance(v, unicode) else str(v)
args.append('%s=%s' % (k, urllib.quote(qv)))
return '&'.join(args)
def _encode_json(obj):
'''
Encode object as json str.
'''
def _dump_obj(obj):
if isinstance(obj, dict):
return obj
d = dict()
for k in dir(obj):
if not k.startswith('_'):
d[k] = getattr(obj, k)
return d
return json.dumps(obj, default=_dump_obj)
def _parse_json(jsonstr):
def _obj_hook(pairs):
o = JsonObject()
for k, v in pairs.iteritems():
o[str(k)] = v
return o
return json.loads(jsonstr, object_hook=_obj_hook)
class ApiError(BaseException):
def __init__(self, url, request, response):
super(ApiError, self).__init__(url)
self.request = request
self.response = response
class ApiAuthError(ApiError):
def __init__(self, msg):
super(ApiAuthError, self).__init__(msg, None, None)
class ApiNotFoundError(ApiError):
pass
class JsonObject(dict):
'''
general json object that can bind any fields but also act as a dict.
'''
def __getattr__(self, attr):
return self[attr]
def __setattr__(self, attr, value):
self[attr] = value
def __getstate__(self):
return self.copy()
def __setstate__(self, state):
self.update(state)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
|
#!/usr/bin/env python
#Copyright (c) 2011 Walter Bender
#Copyright (c) 2010 Jamie Boisture
#Copyright (c) 2011 Collabora Ltd. <http://www.collabora.co.uk/>
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
#!/usr/bin/python
try:
import pycurl
import xmlrpclib
_UPLOAD_AVAILABLE = True
except ImportError, e:
print "Import Error: %s. Project upload is disabled." % (e)
_UPLOAD_AVAILABLE = False
import os
import gtk
from plugin import Plugin
from util.menubuilder import MenuBuilder, MENUBAR
from gettext import gettext as _
class Uploader_plugin(Plugin):
MAX_FILE_SIZE = 950000
UPLOAD_SERVER = 'http://turtleartsite.appspot.com'
def __init__(self, parent, upload_server=None, max_file_size=None):
self._parent = parent
self.uploading = False
if upload_server is None:
self._upload_server = self.UPLOAD_SERVER
if max_file_size is None:
self._max_file_size = self.MAX_FILE_SIZE
else:
self._max_file_size = max_file_size
def set_tw(self, turtleart_window):
self.tw = turtleart_window
def get_menu(self):
if _('Upload') in MENUBAR:
menu, upload_menu = MENUBAR[_('Upload')]
else:
upload_menu = None
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Upload to Web'),
self.do_upload_to_web)
if upload_menu is not None:
return None # We don't have to add it since it already exists
else:
upload_menu = MenuBuilder.make_sub_menu(menu, _('Upload'))
return upload_menu
def enabled(self):
return _UPLOAD_AVAILABLE
def do_upload_to_web(self, widget=None):
if self.uploading:
return
self.uploading = False
self.pop_up = gtk.Window()
self.pop_up.set_default_size(600, 400)
self.pop_up.connect('delete_event', self._stop_uploading)
table = gtk.Table(8, 1, False)
self.pop_up.add(table)
login_label = gtk.Label(_('You must have an account at \
http://turtleartsite.sugarlabs.org to upload your project.'))
table.attach(login_label, 0, 1, 0, 1)
self.login_message = gtk.Label('')
table.attach(self.login_message, 0, 1, 1, 2)
self.Hbox1 = gtk.HBox()
table.attach(self.Hbox1, 0, 1, 2, 3, xpadding=5, ypadding=3)
self.username_entry = gtk.Entry()
username_label = gtk.Label(_('Username:') + ' ')
username_label.set_size_request(150, 25)
username_label.set_alignment(1.0, 0.5)
self.username_entry.set_size_request(450, 25)
self.Hbox1.add(username_label)
self.Hbox1.add(self.username_entry)
self.Hbox2 = gtk.HBox()
table.attach(self.Hbox2, 0, 1, 3, 4, xpadding=5, ypadding=3)
self.password_entry = gtk.Entry()
password_label = gtk.Label(_('Password:') + ' ')
self.password_entry.set_visibility(False)
password_label.set_size_request(150, 25)
password_label.set_alignment(1.0, 0.5)
self.password_entry.set_size_request(450, 25)
self.Hbox2.add(password_label)
self.Hbox2.add(self.password_entry)
self.Hbox3 = gtk.HBox()
table.attach(self.Hbox3, 0, 1, 4, 5, xpadding=5, ypadding=3)
self.title_entry = gtk.Entry()
title_label = gtk.Label(_('Title:') + ' ')
title_label.set_size_request(150, 25)
title_label.set_alignment(1.0, 0.5)
self.title_entry.set_size_request(450, 25)
self.Hbox3.add(title_label)
self.Hbox3.add(self.title_entry)
self.Hbox4 = gtk.HBox()
table.attach(self.Hbox4, 0, 1, 5, 6, xpadding=5, ypadding=3)
self.description_entry = gtk.TextView()
description_label = gtk.Label(_('Description:') + ' ')
description_label.set_size_request(150, 25)
description_label.set_alignment(1.0, 0.5)
self.description_entry.set_wrap_mode(gtk.WRAP_WORD)
self.description_entry.set_size_request(450, 50)
self.Hbox4.add(description_label)
self.Hbox4.add(self.description_entry)
self.Hbox5 = gtk.HBox()
table.attach(self.Hbox5, 0, 1, 6, 7, xpadding=5, ypadding=3)
self.submit_button = gtk.Button(_('Submit to Web'))
self.submit_button.set_size_request(300, 25)
self.submit_button.connect('pressed', self._do_remote_logon)
self.Hbox5.add(self.submit_button)
self.cancel_button = gtk.Button(_('Cancel'))
self.cancel_button.set_size_request(300, 25)
self.cancel_button.connect('pressed', self._stop_uploading)
self.Hbox5.add(self.cancel_button)
self.pop_up.show_all()
def _stop_uploading(self, widget, event=None):
""" Hide the popup when the upload is complte """
self.uploading = False
self.pop_up.hide()
def _do_remote_logon(self, widget):
""" Log into the upload server """
username = self.username_entry.get_text()
password = self.password_entry.get_text()
server = xmlrpclib.ServerProxy(self._upload_server + '/call/xmlrpc')
logged_in = server.login_remote(username, password)
if logged_in:
upload_key = logged_in
self._do_submit_to_web(upload_key)
else:
self.login_message.set_text(_('Login failed'))
def _do_submit_to_web(self, key):
""" Submit project to the server """
title = self.title_entry.get_text()
description = self.description_entry.get_buffer().get_text(
*self.description_entry.get_buffer().get_bounds())
tafile, imagefile = self.tw.save_for_upload(title)
# Set a maximum file size for image to be uploaded.
if int(os.path.getsize(imagefile)) > self._max_file_size:
import Image
while int(os.path.getsize(imagefile)) > self._max_file_size:
big_file = Image.open(imagefile)
smaller_file = big_file.resize(int(0.9 * big_file.size[0]),
int(0.9 * big_file.size[1]),
Image.ANTIALIAS)
smaller_file.save(imagefile, quality=100)
c = pycurl.Curl()
c.setopt(c.POST, 1)
c.setopt(c.FOLLOWLOCATION, 1)
c.setopt(c.URL, self._upload_server + '/upload')
c.setopt(c.HTTPHEADER, ["Expect:"])
c.setopt(c.HTTPPOST, [('file', (c.FORM_FILE, tafile)),
('newimage', (c.FORM_FILE, imagefile)),
('small_image', (c.FORM_FILE, imagefile)),
('title', title),
('description', description),
('upload_key', key), ('_formname',
'image_create')])
c.perform()
error_code = c.getinfo(c.HTTP_CODE)
c.close
os.remove(imagefile)
os.remove(tafile)
if error_code == 400:
self.login_message.set_text(_('Failed to upload!'))
else:
self.pop_up.hide()
self.uploading = False
if __name__ == "__main__":
# TODO: create test data...
u = Uploader_plugin(None)
if u.enabled():
print "Uploader is enabled... trying to upload"
u.do_upload_to_web()
gtk.main()
else:
print "Uploader is not enabled... exiting"
|
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import csv
import os
import random
NOT_AVAILABLE_STRING = 'not available'
class MediaTestMatrix:
"""This class reads video test matrix file and stores them.
Video test matrix contains different video titles as column and different
video codec (such as "webm" as columns. The matrix entry contains video
specification such as "320x240 50 fps VP8 / no audio 1 MBit/s PSNR 36.70"
or "not available". It also provides methods to generate information based
on the client's needs. For example, "video" tag or "audio" tag is returned
based on what kinds of media it is. This matrix is for managing different
kinds of media files in media testing.
TODO(imasaki@chromium.org): remove the code duplication in Generate methods.
"""
# Video extensions such as "webm".
_exts = []
# Video specification dictionary of list of video descriptions
# (the key is video title).
_specs = {}
# Video titles such as "bear".
_video_titles = []
# Tag dictionary (the key is extension).
_tags = {}
def ReadData(self, csv_file):
"""Reads CSV file and stores in list specs and extensions.
CSV file should have the following format:
"ext","tag","title1","title2",....
"0.webm", "video","description1","descrption2", "not available"
(when it is not available).
Args:
csv_file : CSV file name contains video matrix information described
above.
Raises:
csv.Error if the number of columns is not the same as the number of
tiles.
"""
# Clean up all data.
self._exts = []
self._specs = {}
self._video_titles = []
self._tags = {}
file = open(csv_file, 'rb')
reader = csv.reader(file)
for counter, row in enumerate(reader):
if counter == 0:
# First row is comment. So, skip it.
pass
elif counter == 1:
# Second row is for header (video titles).
for title in row[1:]:
self._video_titles.append(title)
self._specs[title] = []
else:
# Error checking is done here based on the number of titles
if len(self._video_titles) != len(row) - 1:
print "Row %d should have %d columns but has %d columns" % (counter,
len(self._video_titles), len(row))
raise csv.Error
# First column should contain extension.
self._exts.append(row[0])
# Second column should contain tag (audio or video).
self._tags[row[0]] = row[1]
for i in range(len(row) - 2):
self._specs[self._video_titles[i]].append(row[i + 2])
file.close()
def _GenerateMediaInfo(self, sub_type, name, media_test_matrix_home_url):
"""Generate media information from matrix generated by CSV file.
Args:
sub_type: index of the extensions (rows in the CSV file).
name: the name of the video file (column name in the CSV file).
media_test_matrix_home_url: url of the matrix home that used
to generate link.
Returns:
a tuple of (info, url, link, tag, is_video, nickname).
info: description of the video (an entry in the matrix).
url: URL of the video or audio.
link: href tag for video or audio.
tag: a tag string can be used to display video or audio.
is_video: True if the file is video; False otherwise.
nickname: nickname of the video for presentation.
"""
if name is 'none':
return
cur_video = name
is_video = self._tags[self._exts[sub_type]] == 'video'
file = os.path.join(cur_video, cur_video + self._exts[sub_type])
# Specs were generated from CSV file in readFile().
info = self._specs[cur_video][sub_type]
url = media_test_matrix_home_url + file
link = '<b><a href="%s">%s</a> %s</b>' % (url, file, info)
type = ['audio', 'video'][is_video]
tag_attr = 'id="v" controls autoplay playbackRate=1 loop valign=top'
tag = '<%s %s src="%s">' % (type, tag_attr, file)
nickname = cur_video + self._exts[sub_type]
return (info, url, link, tag, is_video, nickname)
def GenerateRandomMediaInfo(self, number_of_tries=10,
video_matrix_home_url=''):
"""Generate random video info that can be used for playing this media.
Args:
video_only: True if generate random video only.
video_matrix_home_url: url of the matrix home that is used
to generate link.
Returns:
a list of a tuples (info, url, link, tag, is_video, nickname).
info: description of the video (an entry in the matrix).
url: URL of the video/audio.
link: href tag for video or audio.
tag: a tag string can be used to display video or audio.
is_video: True if the file is video; False otherwise.
nickname: nickname of the video for presentation.
"""
# Try number_of_tries times to find available video/audio.
for i in range(number_of_tries):
sub_type = random.randint(0, len(self._exts))
name = self._video_titles[random.randint(0, len(self._video_titles)-1)]
(info, url, link, tag, is_video, nickname) = (
self._GenerateMediaInfo(sub_type, name, video_matrix_home_url))
if NOT_AVAILABLE_STRING not in info:
return (info, url, link, tag, is_video, nickname)
# Gives up after that (very small chance).
return None
def GenerateAllMediaInfos(self, video_only, media_matrix_home_url=''):
"""Generate all video infos that can be used for playing this media.
Args:
video_only: True if generate random video only (not audio).
media_matrix_home_url: url of the matrix home that is used
to generate link.
Returns:
a list of a tuples (info, url, link, tag, is_video, nickname).
info: description of the video (an entry in the matrix).
url: URL of the video/audio.
link: href tag for video or audio.
tag: a tag string can be used to display video or audio.
is_video: True if the file is video; False otherwise.
nickname: nickname of the video for presentation (such as bear.webm).
"""
media_infos = []
for i in range(0, len(self._video_titles)-1):
name = self._video_titles[i]
for sub_type in range(len(self._exts)):
(info, url, link, tag, is_video, nickname) = (
self._GenerateMediaInfo(sub_type, name, media_matrix_home_url))
if ((NOT_AVAILABLE_STRING not in info) and
((not video_only) or (video_only and is_video))):
media_infos.append([info, url, link, tag, is_video, nickname])
return media_infos
def GenerateAllMediaInfosInCompactForm(self, video_only,
media_matrix_home_url=''):
"""Generate all media information in compact form.
Compact form contains only url, nickname and tag.
Args:
video_only: True if generate random video only.
media_matrix_home_url: url of the matrix home that is used
to generate link.
Returns:
a list of a tuples (url, nickname, tag).
url: URL of the video/audio.
nickname: nickname of the video/audio for presentation
(such as bear.webm).
tag: HTML5 tag for the video/audio.
"""
media_infos = []
for i in range(0, len(self._video_titles)-1):
name = self._video_titles[i]
for sub_type in range(len(self._exts)):
(info, url, link, tag, is_video, nickname) = (
self._GenerateMediaInfo(sub_type, name, media_matrix_home_url))
tag = ['audio', 'video'][is_video]
if ((not NOT_AVAILABLE_STRING in info) and
((not video_only) or (video_only and is_video))):
media_infos.append([url, nickname, tag])
return media_infos
@staticmethod
def LookForMediaInfoInCompactFormByNickName(compact_list, target):
"""Look for video by its nickname in the compact_list.
Args:
compact_list: a list generated by GenerateAllMediaInfosInCompactForm.
target: a target nickname string to look for.
Returns:
A tuple (url, nickname, tag) where nickname is a target string.
url: URL of the video/audio.
nickname: nickname of the video/audio for presentation
(such as bear.webm).
tag: HTML5 tag for the video/audio.
"""
for url, nickname, tag in compact_list:
if target == nickname:
return (tag, url, nickname)
|
|
#!/usr/bin/env python
# Google Code Jam
# Google Code Jam 2017
# Round 1A 2017
# Problem C. Play the Dragon
from __future__ import print_function, division
import math
import sys
import random
class Game(object):
def __init__(self, hd, ad, hk, ak, b, d):
self.hd = hd
self.ad = ad
self.hk = hk
self.ak = ak
self.b = b
self.d = d
self.dragon = Dragon(hd, ad, b, d)
self.knight = Knight(hk, ak)
self.turn = 0
self.actions = []
def buff(self):
self.dragon.ad += self.dragon.b
self.actions.append('B')
self.turn += 1
def debuff(self):
self.knight.ak += -self.dragon.d
self.knight.ak = max(self.knight.ak, 0)
self.actions.append('D')
self.turn += 1
def cure(self):
self.dragon.hd = self.dragon.max_hd
self.actions.append('C')
self.turn += 1
def d_attack(self):
self.knight.hk += -self.dragon.ad
self.actions.append('A')
self.turn += 1
def k_attack(self):
self.dragon.hd += -self.knight.ak
def is_dragon_alive(self):
return self.dragon.hd > 0
def is_knight_alive(self):
return self.knight.hk > 0
def reset(self):
self.dragon = Dragon(self.hd, self.ad, self.b, self.d)
self.knight = Knight(self.hk, self.ak)
self.turn = 0
self.actions = []
def calc_optimal_buff(self):
if self.dragon.b == 0:
return 0
min_buff_plus_attack_turn = sys.maxint
optimal_buff_turn = sys.maxint
buff_turn = 0
while True:
buff_plus_attack = self.dragon.ad + buff_turn * self.dragon.b
buff_plus_attack_turn = math.ceil(self.knight.hk / buff_plus_attack) + buff_turn
# print(buff_plus_attack_turn)
if buff_plus_attack_turn <= min_buff_plus_attack_turn:
min_buff_plus_attack_turn = buff_plus_attack_turn
optimal_buff_turn = buff_turn
if min_buff_plus_attack_turn <= 1:
break
buff_turn += 1
else:
break
return optimal_buff_turn
def calc_debuff_thesholds(self):
if self.dragon.d == 0:
return [0]
turn_to_kill_dragon = math.ceil(self.dragon.max_hd / self.knight.ak)
debuff_thesholds = [0]
while True:
knight_debuffed_attack = int(math.ceil(self.dragon.max_hd / turn_to_kill_dragon - 1))
debuff_theshold = int(math.ceil(max(self.knight.ak - knight_debuffed_attack, 0) / self.dragon.d))
debuff_thesholds.append(debuff_theshold)
if knight_debuffed_attack <= 0:
break
turn_to_kill_dragon += 1
return sorted(list(set(debuff_thesholds)))
# return list(range(100))
def play(self):
min_turn = sys.maxint
max_turn = 10000
actions = []
last_action = ''
optimal_buff = self.calc_optimal_buff()
print('b', optimal_buff)
debuff_thesholds = self.calc_debuff_thesholds()
print('d', debuff_thesholds)
for debuff_theshold in debuff_thesholds:
for _ in range(max_turn):
if self.is_dragon_alive() and self.is_knight_alive():
turn_to_kill_knight_now = math.ceil(self.knight.hk / self.dragon.ad)
turn_to_kill_knight_buff = math.ceil(self.knight.hk / (self.dragon.ad + self.dragon.b)) + 1
try:
turn_to_kill_dragon_now = math.ceil(self.dragon.hd / self.knight.ak)
except ZeroDivisionError:
turn_to_kill_dragon_now = sys.maxint
try:
turn_to_kill_dragon_debuff = math.ceil(self.dragon.hd / max(self.knight.ak - self.dragon.d, 0))
except ZeroDivisionError:
turn_to_kill_dragon_debuff = sys.maxint
# print('turn', self.turn, 'A')
# print(self.dragon.ad, self.knight.hk, math.ceil(self.dragon.ad / self.knight.hk))
# print('turn_to_kill_knight_now', turn_to_kill_knight_now)
# print('turn_to_kill_dragon_now', turn_to_kill_dragon_now)
# print('turn_to_kill_knight_buff', turn_to_kill_knight_buff)
# print('turn_to_kill_dragon_debuff', turn_to_kill_dragon_debuff)
if turn_to_kill_knight_now <= 1:
# print('ATTACK')
self.d_attack()
elif turn_to_kill_dragon_debuff > 1 and turn_to_kill_dragon_now == 1:
# print('DEBUFF')
self.debuff()
elif turn_to_kill_dragon_now == 1:
# print('CURE')
self.cure()
elif self.actions.count('D') < debuff_theshold:
# print('DEBUFF')
self.debuff()
elif self.actions.count('B') < optimal_buff:
# print('BUFF')
self.buff()
else:
# print('ATTACK')
self.d_attack()
if last_action == 'C' and self.actions[-1] == 'C':
self.turn = sys.maxint
break
last_action = self.actions[-1]
else:
break
if self.is_knight_alive():
self.k_attack()
else:
break
# print('turn', self.turn, 'B')
# print('dragon', self.dragon.hd, self.dragon.ad, self.knight.hk, self.knight.ak)
# print()
if not self.is_dragon_alive():
self.turn = sys.maxint
break
print(debuff_theshold, self.turn)
if self.turn <= min_turn:
min_turn = self.turn
actions = self.actions
self.reset()
print(actions)
return min_turn
class Dragon(object):
def __init__(self, hd, ad, b, d):
self.max_hd = hd
self.hd = hd
self.ad = ad
self.b = b
self.d = d
class Knight(object):
def __init__(self, hk, ak):
self.hk = hk
self.ak = ak
if __name__ == '__main__':
import os
samples = [
(11, 5, 16, 5, 0, 0),
(3, 1, 3, 2, 2, 0),
(3, 1, 3, 2, 1, 0),
(2, 1, 5, 1, 1, 1)
]
# for sample in samples:
# min_turn = sys.maxint
# for _ in range(1):
# game = Game(*sample)
# turn = game.play()
# min_turn = min(turn, min_turn)
# if min_turn == sys.maxint:
# print('IMPOSSIBLE')
# else:
# print(min_turn)
data_files = ['C-small-practice',]
# 'B-large-practice']
for f in data_files:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.in'.format(f)), 'r') as input_file:
lines = input_file.readlines()
input_count = int(lines[0].replace('\n' ,''))
inputs = [line.replace('\n', '') for line in lines[1:]]
i = 1
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
'{0}.out'.format(f)), 'w') as output_file:
for in_ in inputs:
print('Case #{0}'.format(i))
test_case = tuple([int(_) for _ in in_.split(' ')])
print(test_case)
min_turn = sys.maxint
for _ in range(1):
game = Game(*test_case)
turn = game.play()
min_turn = min(turn, min_turn)
if min_turn == sys.maxint:
output_file.write('Case #{0}: IMPOSSIBLE\n'.format(i))
print('IMPOSSIBLE')
else:
output_file.write('Case #{0}: {1}\n'.format(i, min_turn))
print(min_turn)
print()
i += 1
|
|
import logging
from logging import makeLogRecord
from cee_syslog_handler import NamedCeeLogger, RegexFilter, RegexRedactFilter
_DUMMY_HOST = ("localhost", 1337)
_DUMMY_PROTOCOL = 2
class CollectingNamedCeeLogger(NamedCeeLogger):
def __init__(self, *args, **kwargs):
super(CollectingNamedCeeLogger, self).__init__(*args, **kwargs)
self.emitted_records = []
def emit(self, record):
self.emitted_records.append(record)
return super(CollectingNamedCeeLogger, self).emit(record)
def test_filter():
filter_regex = r"/health|/metrics"
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(RegexFilter(filter_regex))
handler.handle(makeLogRecord({"msg": "POST /important/endpoint 200 OK"}))
assert len(handler.emitted_records) == 1 # record emitted
handler.handle(makeLogRecord({"msg": "GET /health 200 OK"}))
assert len(handler.emitted_records) == 1 # no new record emitted
handler.handle(makeLogRecord({"msg": "GET /metrics 404"}))
assert len(handler.emitted_records) == 1 # no new record emitted
handler.handle(
makeLogRecord(
{"msg": "Eating vegetables is healthy and improves blood metrics"}
)
)
assert len(handler.emitted_records) == 2 # new record emitted
def test_redacting_filter():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
record = makeLogRecord(
{"name": "my.package.logger", "msg": "Connect by IP 172.24.41.42"}
)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
handler.handle(record)
assert len(handler.emitted_records) == 1
message = handler.emitted_records[0].getMessage()
assert string_to_be_redacted not in message
assert replace_string in message
def test_redact_injected_variables():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
arbitrary_object = SomeClass("some 172.24.41.42 arbitrary object")
record = makeLogRecord(
{
"name": "my.package.logger",
"msg": "Connect %d by IP %s %s",
"args": (42, "172.24.41.42", arbitrary_object),
}
)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
handler.handle(record)
assert len(handler.emitted_records) == 1
message = handler.emitted_records[0].getMessage()
assert string_to_be_redacted not in message
assert replace_string in message
def test_redact_exception():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
logger = logging.getLogger(__name__)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
logger.addHandler(handler)
arbitrary_object = SomeClass("some 172.24.41.42 arbitrary object")
try:
raise MemoryError("something bad: ", string_to_be_redacted, arbitrary_object)
except MemoryError as e:
logger.exception(e)
assert len(handler.emitted_records) == 1
message = handler.emitted_records[0].getMessage()
assert string_to_be_redacted not in message
assert replace_string in message
def test_redact_objects():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
logger = logging.getLogger(__name__)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
logger.addHandler(handler)
arbitrary = SomeClass("some message containing" + string_to_be_redacted)
logger.error(arbitrary)
assert len(handler.emitted_records) == 1
message = handler.emitted_records[0].getMessage()
assert string_to_be_redacted not in message
assert replace_string in message
def test_exception_not_in_message():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
logger = logging.getLogger(__name__)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
logger.addHandler(handler)
try:
raise MemoryError("something bad: ", string_to_be_redacted)
except MemoryError:
logger.exception("Failed to do something")
assert len(handler.emitted_records) == 1
message = handler.emitted_records[0].getMessage()
assert string_to_be_redacted not in message
assert replace_string in message
def test_exc_text_redaction():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
replace_string = "-#sensitive#-"
string_to_be_redacted = "172.24.41.42"
record = makeLogRecord(
{
"name": "my.package.logger",
"msg": "Connect by IP 172.24.41.42",
"exc_text": "something 192.168.0.1 something",
}
)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(
RegexRedactFilter(filter_regex=regex, replace_string=replace_string)
)
handler.handle(record)
assert len(handler.emitted_records) == 1
exc_text = handler.emitted_records[0].exc_text
assert string_to_be_redacted not in exc_text
assert replace_string in exc_text
def test_filter_bypass():
regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"
record = makeLogRecord(
{
"name": "my.package.logger",
"msg": "Attempted connect to %s failed %d times",
"args": ("web.service", 42),
}
)
handler = CollectingNamedCeeLogger(_DUMMY_HOST, _DUMMY_PROTOCOL, "myname")
handler.addFilter(RegexRedactFilter(filter_regex=regex))
handler.handle(record)
assert len(handler.emitted_records) == 1
log_record = handler.emitted_records[0]
assert "Attempted connect to web.service failed 42 times" == log_record.getMessage()
assert ("web.service", 42) == log_record.args
class SomeClass:
"""
generic helper class
"""
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
def __repr__(self):
return self.message
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for MobileNet v1."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from nets import mobilenet_v1
slim = tf.contrib.slim
class MobilenetV1Test(tf.test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
net, end_points = mobilenet_v1.mobilenet_v1_base(inputs)
self.assertTrue(net.op.name.startswith('MobilenetV1/Conv2d_13'))
self.assertListEqual(net.get_shape().as_list(),
[batch_size, 7, 7, 1024])
expected_endpoints = ['Conv2d_0',
'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
'Conv2d_3_depthwise', 'Conv2d_3_pointwise',
'Conv2d_4_depthwise', 'Conv2d_4_pointwise',
'Conv2d_5_depthwise', 'Conv2d_5_pointwise',
'Conv2d_6_depthwise', 'Conv2d_6_pointwise',
'Conv2d_7_depthwise', 'Conv2d_7_pointwise',
'Conv2d_8_depthwise', 'Conv2d_8_pointwise',
'Conv2d_9_depthwise', 'Conv2d_9_pointwise',
'Conv2d_10_depthwise', 'Conv2d_10_pointwise',
'Conv2d_11_depthwise', 'Conv2d_11_pointwise',
'Conv2d_12_depthwise', 'Conv2d_12_pointwise',
'Conv2d_13_depthwise', 'Conv2d_13_pointwise']
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 224, 224
endpoints = ['Conv2d_0',
'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
'Conv2d_3_depthwise', 'Conv2d_3_pointwise',
'Conv2d_4_depthwise', 'Conv2d_4_pointwise',
'Conv2d_5_depthwise', 'Conv2d_5_pointwise',
'Conv2d_6_depthwise', 'Conv2d_6_pointwise',
'Conv2d_7_depthwise', 'Conv2d_7_pointwise',
'Conv2d_8_depthwise', 'Conv2d_8_pointwise',
'Conv2d_9_depthwise', 'Conv2d_9_pointwise',
'Conv2d_10_depthwise', 'Conv2d_10_pointwise',
'Conv2d_11_depthwise', 'Conv2d_11_pointwise',
'Conv2d_12_depthwise', 'Conv2d_12_pointwise',
'Conv2d_13_depthwise', 'Conv2d_13_pointwise']
for index, endpoint in enumerate(endpoints):
with tf.Graph().as_default():
inputs = tf.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, final_endpoint=endpoint)
self.assertTrue(out_tensor.op.name.startswith(
'MobilenetV1/' + endpoint))
self.assertItemsEqual(endpoints[:index+1], end_points)
def testBuildCustomNetworkUsingConvDefs(self):
batch_size = 5
height, width = 224, 224
conv_defs = [
mobilenet_v1.Conv(kernel=[3, 3], stride=2, depth=32),
mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=1, depth=64),
mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=2, depth=128),
mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=1, depth=512)
]
inputs = tf.random_uniform((batch_size, height, width, 3))
net, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, final_endpoint='Conv2d_3_pointwise', conv_defs=conv_defs)
self.assertTrue(net.op.name.startswith('MobilenetV1/Conv2d_3'))
self.assertListEqual(net.get_shape().as_list(),
[batch_size, 56, 56, 512])
expected_endpoints = ['Conv2d_0',
'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
'Conv2d_3_depthwise', 'Conv2d_3_pointwise']
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildAndCheckAllEndPointsUptoConv2d_13(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
normalizer_fn=slim.batch_norm):
_, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, final_endpoint='Conv2d_13_pointwise')
endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
'Conv2d_6_depthwise': [batch_size, 14, 14, 256],
'Conv2d_6_pointwise': [batch_size, 14, 14, 512],
'Conv2d_7_depthwise': [batch_size, 14, 14, 512],
'Conv2d_7_pointwise': [batch_size, 14, 14, 512],
'Conv2d_8_depthwise': [batch_size, 14, 14, 512],
'Conv2d_8_pointwise': [batch_size, 14, 14, 512],
'Conv2d_9_depthwise': [batch_size, 14, 14, 512],
'Conv2d_9_pointwise': [batch_size, 14, 14, 512],
'Conv2d_10_depthwise': [batch_size, 14, 14, 512],
'Conv2d_10_pointwise': [batch_size, 14, 14, 512],
'Conv2d_11_depthwise': [batch_size, 14, 14, 512],
'Conv2d_11_pointwise': [batch_size, 14, 14, 512],
'Conv2d_12_depthwise': [batch_size, 7, 7, 512],
'Conv2d_12_pointwise': [batch_size, 7, 7, 1024],
'Conv2d_13_depthwise': [batch_size, 7, 7, 1024],
'Conv2d_13_pointwise': [batch_size, 7, 7, 1024]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name, expected_shape in endpoints_shapes.iteritems():
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testOutputStride16BuildAndCheckAllEndPointsUptoConv2d_13(self):
batch_size = 5
height, width = 224, 224
output_stride = 16
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
normalizer_fn=slim.batch_norm):
_, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, output_stride=output_stride,
final_endpoint='Conv2d_13_pointwise')
endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
'Conv2d_6_depthwise': [batch_size, 14, 14, 256],
'Conv2d_6_pointwise': [batch_size, 14, 14, 512],
'Conv2d_7_depthwise': [batch_size, 14, 14, 512],
'Conv2d_7_pointwise': [batch_size, 14, 14, 512],
'Conv2d_8_depthwise': [batch_size, 14, 14, 512],
'Conv2d_8_pointwise': [batch_size, 14, 14, 512],
'Conv2d_9_depthwise': [batch_size, 14, 14, 512],
'Conv2d_9_pointwise': [batch_size, 14, 14, 512],
'Conv2d_10_depthwise': [batch_size, 14, 14, 512],
'Conv2d_10_pointwise': [batch_size, 14, 14, 512],
'Conv2d_11_depthwise': [batch_size, 14, 14, 512],
'Conv2d_11_pointwise': [batch_size, 14, 14, 512],
'Conv2d_12_depthwise': [batch_size, 14, 14, 512],
'Conv2d_12_pointwise': [batch_size, 14, 14, 1024],
'Conv2d_13_depthwise': [batch_size, 14, 14, 1024],
'Conv2d_13_pointwise': [batch_size, 14, 14, 1024]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name, expected_shape in endpoints_shapes.iteritems():
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testOutputStride8BuildAndCheckAllEndPointsUptoConv2d_13(self):
batch_size = 5
height, width = 224, 224
output_stride = 8
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
normalizer_fn=slim.batch_norm):
_, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, output_stride=output_stride,
final_endpoint='Conv2d_13_pointwise')
endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
'Conv2d_6_depthwise': [batch_size, 28, 28, 256],
'Conv2d_6_pointwise': [batch_size, 28, 28, 512],
'Conv2d_7_depthwise': [batch_size, 28, 28, 512],
'Conv2d_7_pointwise': [batch_size, 28, 28, 512],
'Conv2d_8_depthwise': [batch_size, 28, 28, 512],
'Conv2d_8_pointwise': [batch_size, 28, 28, 512],
'Conv2d_9_depthwise': [batch_size, 28, 28, 512],
'Conv2d_9_pointwise': [batch_size, 28, 28, 512],
'Conv2d_10_depthwise': [batch_size, 28, 28, 512],
'Conv2d_10_pointwise': [batch_size, 28, 28, 512],
'Conv2d_11_depthwise': [batch_size, 28, 28, 512],
'Conv2d_11_pointwise': [batch_size, 28, 28, 512],
'Conv2d_12_depthwise': [batch_size, 28, 28, 512],
'Conv2d_12_pointwise': [batch_size, 28, 28, 1024],
'Conv2d_13_depthwise': [batch_size, 28, 28, 1024],
'Conv2d_13_pointwise': [batch_size, 28, 28, 1024]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name, expected_shape in endpoints_shapes.iteritems():
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testBuildAndCheckAllEndPointsApproximateFaceNet(self):
batch_size = 5
height, width = 128, 128
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
normalizer_fn=slim.batch_norm):
_, end_points = mobilenet_v1.mobilenet_v1_base(
inputs, final_endpoint='Conv2d_13_pointwise', depth_multiplier=0.75)
# For the Conv2d_0 layer FaceNet has depth=16
endpoints_shapes = {'Conv2d_0': [batch_size, 64, 64, 24],
'Conv2d_1_depthwise': [batch_size, 64, 64, 24],
'Conv2d_1_pointwise': [batch_size, 64, 64, 48],
'Conv2d_2_depthwise': [batch_size, 32, 32, 48],
'Conv2d_2_pointwise': [batch_size, 32, 32, 96],
'Conv2d_3_depthwise': [batch_size, 32, 32, 96],
'Conv2d_3_pointwise': [batch_size, 32, 32, 96],
'Conv2d_4_depthwise': [batch_size, 16, 16, 96],
'Conv2d_4_pointwise': [batch_size, 16, 16, 192],
'Conv2d_5_depthwise': [batch_size, 16, 16, 192],
'Conv2d_5_pointwise': [batch_size, 16, 16, 192],
'Conv2d_6_depthwise': [batch_size, 8, 8, 192],
'Conv2d_6_pointwise': [batch_size, 8, 8, 384],
'Conv2d_7_depthwise': [batch_size, 8, 8, 384],
'Conv2d_7_pointwise': [batch_size, 8, 8, 384],
'Conv2d_8_depthwise': [batch_size, 8, 8, 384],
'Conv2d_8_pointwise': [batch_size, 8, 8, 384],
'Conv2d_9_depthwise': [batch_size, 8, 8, 384],
'Conv2d_9_pointwise': [batch_size, 8, 8, 384],
'Conv2d_10_depthwise': [batch_size, 8, 8, 384],
'Conv2d_10_pointwise': [batch_size, 8, 8, 384],
'Conv2d_11_depthwise': [batch_size, 8, 8, 384],
'Conv2d_11_pointwise': [batch_size, 8, 8, 384],
'Conv2d_12_depthwise': [batch_size, 4, 4, 384],
'Conv2d_12_pointwise': [batch_size, 4, 4, 768],
'Conv2d_13_depthwise': [batch_size, 4, 4, 768],
'Conv2d_13_pointwise': [batch_size, 4, 4, 768]}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name, expected_shape in endpoints_shapes.iteritems():
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 224, 224
inputs = tf.random_uniform((batch_size, height, width, 3))
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
normalizer_fn=slim.batch_norm):
mobilenet_v1.mobilenet_v1_base(inputs)
total_params, _ = slim.model_analyzer.analyze_vars(
slim.get_model_variables())
self.assertAlmostEqual(3217920L, total_params)
def testBuildEndPointsWithDepthMultiplierLessThanOne(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
endpoint_keys = [key for key in end_points.keys() if key.startswith('Conv')]
_, end_points_with_multiplier = mobilenet_v1.mobilenet_v1(
inputs, num_classes, scope='depth_multiplied_net',
depth_multiplier=0.5)
for key in endpoint_keys:
original_depth = end_points[key].get_shape().as_list()[3]
new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
self.assertEqual(0.5 * original_depth, new_depth)
def testBuildEndPointsWithDepthMultiplierGreaterThanOne(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
endpoint_keys = [key for key in end_points.keys()
if key.startswith('Mixed') or key.startswith('Conv')]
_, end_points_with_multiplier = mobilenet_v1.mobilenet_v1(
inputs, num_classes, scope='depth_multiplied_net',
depth_multiplier=2.0)
for key in endpoint_keys:
original_depth = end_points[key].get_shape().as_list()[3]
new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
self.assertEqual(2.0 * original_depth, new_depth)
def testRaiseValueErrorWithInvalidDepthMultiplier(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
with self.assertRaises(ValueError):
_ = mobilenet_v1.mobilenet_v1(
inputs, num_classes, depth_multiplier=-0.1)
with self.assertRaises(ValueError):
_ = mobilenet_v1.mobilenet_v1(
inputs, num_classes, depth_multiplier=0.0)
def testHalfSizeImages(self):
batch_size = 5
height, width = 112, 112
num_classes = 1000
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Conv2d_13_pointwise']
self.assertListEqual(pre_pool.get_shape().as_list(),
[batch_size, 4, 4, 1024])
def testUnknownImageShape(self):
tf.reset_default_graph()
batch_size = 2
height, width = 224, 224
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = tf.placeholder(tf.float32, shape=(batch_size, None, None, 3))
logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Conv2d_13_pointwise']
feed_dict = {inputs: input_np}
tf.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])
def testUnknowBatchSize(self):
batch_size = 1
height, width = 224, 224
num_classes = 1000
inputs = tf.placeholder(tf.float32, (None, height, width, 3))
logits, _ = mobilenet_v1.mobilenet_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[None, num_classes])
images = tf.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
is_training=False)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 150, 150
num_classes = 1000
train_inputs = tf.random_uniform((train_batch_size, height, width, 3))
mobilenet_v1.mobilenet_v1(train_inputs, num_classes)
eval_inputs = tf.random_uniform((eval_batch_size, height, width, 3))
logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
reuse=True)
predictions = tf.argmax(logits, 1)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = tf.random_uniform([1, 224, 224, 3])
logits, _ = mobilenet_v1.mobilenet_v1(images,
num_classes=num_classes,
spatial_squeeze=False)
with self.test_session() as sess:
tf.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
tf.test.main()
|
|
kENVIRONMENT_LOOKUP = {
'ACTION': 'String',
'ADDITIONAL_SDKS': 'String',
'ALTERNATE_GROUP': 'String',
'ALTERNATE_MODE': 'String',
'ALTERNATE_OWNER': 'String',
'ALTERNATE_PERMISSIONS_FILES': 'StringList',
'ALWAYS_SEARCH_USER_PATHS': 'Boolean',
'APPLE_INTERNAL_DEVELOPER_DIR': 'Path',
'APPLE_INTERNAL_DIR': 'Path',
'APPLE_INTERNAL_DOCUMENTATION_DIR': 'Path',
'APPLE_INTERNAL_LIBRARY_DIR': 'Path',
'APPLE_INTERNAL_TOOLS': 'Path',
'APPLICATION_EXTENSION_API_ONLY': 'Boolean',
'APPLY_RULES_IN_COPY_FILES': 'Boolean',
'ARCHS': 'String',
'ARCHS_STANDARD': 'StringList',
'ARCHS_STANDARD_32_64_BIT': 'StringList',
'ARCHS_STANDARD_32_BIT': 'StringList',
'ARCHS_STANDARD_64_BIT': 'StringList',
'ARCHS_STANDARD_INCLUDING_64_BIT': 'StringList',
'ARCHS_UNIVERSAL_IPHONE_OS': 'StringList',
'ASSETCATALOG_COMPILER_APPICON_NAME': 'String',
'ASSETCATALOG_COMPILER_LAUNCHIMAGE_NAME': 'String',
'ASSETCATALOG_NOTICES': 'Boolean',
'ASSETCATALOG_OTHER_FLAGS': 'StringList',
'ASSETCATALOG_WARNINGS': 'Boolean',
'AVAILABLE_PLATFORMS': 'String',
'BUILD_COMPONENTS': 'String',
'BUILD_DIR': 'Path',
'BUILD_ROOT': 'Path',
'BUILD_STYLE': 'String',
'BUILD_VARIANTS': 'String',
'BUILT_PRODUCTS_DIR': 'Path',
'BUNDLE_LOADER': 'String',
'CACHE_ROOT': 'Path',
'CCHROOT': 'Path',
'CHMOD': 'Path',
'CHOWN': 'Path',
'CLANG_ALLOW_NON_MODULAR_INCLUDES_IN_FRAMEWORK_MODULES': 'Boolean',
'CLANG_ANALYZER_DEADCODE_DEADSTORES': 'Boolean',
'CLANG_ANALYZER_GCD': 'Boolean',
'CLANG_ANALYZER_MALLOC': 'Boolean',
'CLANG_ANALYZER_MEMORY_MANAGEMENT': 'Boolean',
'CLANG_ANALYZER_OBJC_ATSYNC': 'Boolean',
'CLANG_ANALYZER_OBJC_COLLECTIONS': 'Boolean',
'CLANG_ANALYZER_OBJC_INCOMP_METHOD_TYPES': 'Boolean',
'CLANG_ANALYZER_OBJC_NSCFERROR': 'Boolean',
'CLANG_ANALYZER_OBJC_RETAIN_COUNT': 'Boolean',
'CLANG_ANALYZER_OBJC_SELF_INIT': 'Boolean',
'CLANG_ANALYZER_OBJC_UNUSED_IVARS': 'Boolean',
'CLANG_ANALYZER_SECURITY_FLOATLOOPCOUNTER': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_GETPW_GETS': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_MKSTEMP': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_RAND': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_STRCPY': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_UNCHECKEDRETURN': 'Boolean',
'CLANG_ANALYZER_SECURITY_INSECUREAPI_VFORK': 'Boolean',
'CLANG_ANALYZER_SECURITY_KEYCHAIN_API': 'Boolean',
'CLANG_ARC_MIGRATE_DIR': 'Path',
'CLANG_ARC_MIGRATE_EMIT_ERROR': 'Boolean',
'CLANG_ARC_MIGRATE_PRECHECK': 'Enumeration',
'CLANG_ARC_MIGRATE_REPORT_OUTPUT': 'Path',
'CLANG_COLOR_DIAGNOSTICS': 'Boolean',
'CLANG_CXX_LANGUAGE_STANDARD': 'Enumeration',
'CLANG_CXX_LIBRARY': 'Enumeration',
'CLANG_DEBUG_INFORMATION_LEVEL': 'Enumeration',
'CLANG_ENABLE_APP_EXTENSION': 'Boolean',
'CLANG_ENABLE_MODULES': 'Boolean',
'CLANG_ENABLE_MODULE_IMPLEMENTATION_OF': 'Boolean',
'CLANG_ENABLE_OBJC_ARC': 'Boolean',
'CLANG_INSTRUMENT_FOR_OPTIMIZATION_PROFILING': 'Boolean',
'CLANG_LINK_OBJC_RUNTIME': 'Boolean',
'CLANG_MACRO_BACKTRACE_LIMIT': 'Integer',
'CLANG_MODULES_AUTOLINK': 'Boolean',
'CLANG_MODULES_IGNORE_MACROS': 'StringList',
'CLANG_MODULES_VALIDATE_SYSTEM_HEADERS': 'Boolean',
'CLANG_MODULES_VALIDATION_TIMESTAMP': 'String',
'CLANG_MODULE_CACHE_PATH': 'Path',
'CLANG_OBJC_MIGRATE_DIR': 'Path',
'CLANG_OPTIMIZATION_PROFILE_FILE': 'Path',
'CLANG_RETAIN_COMMENTS_FROM_SYSTEM_HEADERS': 'Boolean',
'CLANG_STATIC_ANALYZER_MODE': 'Enumeration',
'CLANG_STATIC_ANALYZER_MODE_ON_ANALYZE_ACTION': 'Enumeration',
'CLANG_USE_OPTIMIZATION_PROFILE': 'Boolean',
'CLANG_WARN_ASSIGN_ENUM': 'Boolean',
'CLANG_WARN_BOOL_CONVERSION': 'Boolean',
'CLANG_WARN_CONSTANT_CONVERSION': 'Boolean',
'CLANG_WARN_CXX0X_EXTENSIONS': 'Boolean',
'CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS': 'Boolean',
'CLANG_WARN_DIRECT_OBJC_ISA_USAGE': 'Enumeration',
'CLANG_WARN_DOCUMENTATION_COMMENTS': 'Boolean',
'CLANG_WARN_EMPTY_BODY': 'Boolean',
'CLANG_WARN_ENUM_CONVERSION': 'Boolean',
'CLANG_WARN_IMPLICIT_SIGN_CONVERSION': 'Boolean',
'CLANG_WARN_INT_CONVERSION': 'Boolean',
'CLANG_WARN_OBJC_EXPLICIT_OWNERSHIP_TYPE': 'Boolean',
'CLANG_WARN_OBJC_IMPLICIT_ATOMIC_PROPERTIES': 'Boolean',
'CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF': 'Boolean',
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'Boolean',
'CLANG_WARN_OBJC_RECEIVER_WEAK': 'Boolean',
'CLANG_WARN_OBJC_REPEATED_USE_OF_WEAK': 'Boolean',
'CLANG_WARN_OBJC_ROOT_CLASS': 'Enumeration',
'CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION': 'Boolean',
'CLANG_WARN_UNREACHABLE_CODE': 'Boolean',
'CLANG_WARN__ARC_BRIDGE_CAST_NONARC': 'Boolean',
'CLANG_WARN__DUPLICATE_METHOD_MATCH': 'Boolean',
'CLANG_WARN__EXIT_TIME_DESTRUCTORS': 'Boolean',
'CLANG_X86_VECTOR_INSTRUCTIONS': 'Enumeration',
'CODE_SIGN_ENTITLEMENTS': 'String',
'CODE_SIGN_IDENTITY': 'String',
'CODE_SIGN_RESOURCE_RULES_PATH': 'Path',
'COLOR_DIAGNOSTICS': 'Boolean',
'COMBINE_HIDPI_IMAGES': 'Boolean',
'COMPOSITE_SDK_DIRS': 'PathList',
'COMPRESS_PNG_FILES': 'Boolean',
'CONFIGURATION': 'String',
'CONFIGURATION_BUILD_DIR': 'Path',
'CONFIGURATION_TEMP_DIR': 'Path',
'COPYING_PRESERVES_HFS_DATA': 'Boolean',
'COPY_PHASE_STRIP': 'Boolean',
'CP': 'Path',
'CPP_HEADERMAP_FILE': 'Path',
'CPP_HEADERMAP_FILE_FOR_ALL_NON_FRAMEWORK_TARGET_HEADERS': 'Path',
'CPP_HEADERMAP_FILE_FOR_ALL_TARGET_HEADERS': 'Path',
'CPP_HEADERMAP_FILE_FOR_GENERATED_FILES': 'Path',
'CPP_HEADERMAP_FILE_FOR_OWN_TARGET_HEADERS': 'Path',
'CPP_HEADERMAP_FILE_FOR_PROJECT_FILES': 'Path',
'CPP_HEADERMAP_PRODUCT_HEADERS_VFS_FILE': 'Path',
'CPP_HEADER_SYMLINKS_DIR': 'Path',
'CREATE_INFOPLIST_SECTION_IN_BINARY': 'Boolean',
'CURRENT_ARCH': 'String',
'CURRENT_PROJECT_VERSION': 'String',
'CURRENT_VARIANT': 'String',
'DEAD_CODE_STRIPPING': 'Boolean',
'DEBUG_INFORMATION_FORMAT': 'String',
'DEFAULT_COMPILER': 'String',
'DEFAULT_KEXT_INSTALL_PATH': 'String',
'DEFAULT_SSE_LEVEL_3_NO': 'String',
'DEFAULT_SSE_LEVEL_3_YES': 'String',
'DEFAULT_SSE_LEVEL_3_SUPPLEMENTAL_NO': 'String',
'DEFAULT_SSE_LEVEL_3_SUPPLEMENTAL_YES': 'String',
'DEFAULT_SSE_LEVEL_4_1_NO': 'String',
'DEFAULT_SSE_LEVEL_4_1_YES': 'String',
'DEFAULT_SSE_LEVEL_4_2_NO': 'String',
'DEFAULT_SSE_LEVEL_4_2_YES': 'String',
'DEFINES_MODULE': 'Boolean',
'DEPLOYMENT_LOCATION': 'Boolean',
'DEPLOYMENT_POSTPROCESSING': 'Boolean',
'DERIVED_FILE_DIR': 'Path',
'DERIVED_FILES_DIR': 'Path',
'DERIVED_SOURCES_DIR': 'Path',
'DEVELOPER_APPLICATIONS_DIR': 'Path',
'DEVELOPER_BIN_DIR': 'Path',
'DEVELOPER_DIR': 'Path',
'DEVELOPER_FRAMEWORKS_DIR': 'Path',
'DEVELOPER_FRAMEWORKS_DIR_QUOTED': 'Path',
'DEVELOPER_LIBRARY_DIR': 'Path',
'DEVELOPER_SDK_DIR': 'Path',
'DEVELOPER_TOOLS_DIR': 'Path',
'DEVELOPER_USR_DIR': 'Path',
'DSTROOT': 'Path',
'DT_TOOLCHAIN_DIR': 'Path',
'DYLIB_COMPATIBILITY_VERSION': 'String',
'DYLIB_CURRENT_VERSION': 'String',
'DYLIB_INSTALL_NAME_BASE': 'StringList',
'EFFECTIVE_PLATFORM_NAME': 'String',
'EMBEDDED_CONTENT_CONTAINS_SWIFT': 'Boolean',
'EMBEDDED_PROFILE_NAME': 'String',
'ENABLE_APPLE_KEXT_CODE_GENERATION': 'Boolean',
'ENABLE_HEADER_DEPENDENCIES': 'Boolean',
'ENABLE_NS_ASSERTIONS': 'Boolean',
'ENABLE_STRICT_OBJC_MSGSEND': 'Boolean',
'EXCLUDED_INSTALLSRC_SUBDIRECTORY_PATTERNS': 'StringList',
'EXCLUDED_RECURSIVE_SEARCH_PATH_SUBDIRECTORIES': 'StringList',
'EXECUTABLE_EXTENSION': 'String',
'EXECUTABLE_PREFIX': 'String',
'EXECUTABLE_SUFFIX': 'String',
'EXECUTABLE_VARIANT_SUFFIX': 'String',
'EXPORTED_SYMBOLS_FILE': 'Path',
'FILE_LIST': 'Path',
'FRAMEWORK_SEARCH_PATHS': 'PathList',
'FRAMEWORK_VERSION': 'String',
'GCC3_VERSION': 'String',
'GCC_CHAR_IS_UNSIGNED_CHAR': 'Boolean',
'GCC_CW_ASM_SYNTAX': 'Boolean',
'GCC_C_LANGUAGE_STANDARD': 'Enumeration',
'GCC_DEBUG_INFORMATION_FORMAT': 'Enumeration',
'GCC_DYNAMIC_NO_PIC': 'Boolean',
'GCC_ENABLE_ASM_KEYWORD': 'Boolean',
'GCC_ENABLE_BUILTIN_FUNCTIONS': 'Boolean',
'GCC_ENABLE_CPP_EXCEPTIONS': 'Boolean',
'GCC_ENABLE_CPP_RTTI': 'Boolean',
'GCC_ENABLE_EXCEPTIONS': 'Boolean',
'GCC_ENABLE_FLOATING_POINT_LIBRARY_CALLS': 'Boolean',
'GCC_ENABLE_KERNEL_DEVELOPMENT': 'Boolean',
'GCC_ENABLE_OBJC_EXCEPTIONS': 'Boolean',
'GCC_ENABLE_OBJC_GC': 'Enumeration',
'GCC_ENABLE_PASCAL_STRINGS': 'Boolean',
'GCC_ENABLE_SSE3_EXTENSIONS': 'Boolean',
'GCC_ENABLE_SSE41_EXTENSIONS': 'Boolean',
'GCC_ENABLE_SSE42_EXTENSIONS': 'Boolean',
'GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS': 'Boolean',
'GCC_ENABLE_TRIGRAPHS': 'Boolean',
'GCC_FAST_MATH': 'Boolean',
'GCC_GENERATE_DEBUGGING_SYMBOLS': 'Boolean',
'GCC_GENERATE_TEST_COVERAGE_FILES': 'Boolean',
'GCC_INCREASE_PRECOMPILED_HEADER_SHARING': 'Boolean',
'GCC_INLINES_ARE_PRIVATE_EXTERN': 'Boolean',
'GCC_INPUT_FILETYPE': 'Enumeration',
'GCC_INSTRUMENT_PROGRAM_FLOW_ARCS': 'Boolean',
'GCC_LINK_WITH_DYNAMIC_LIBRARIES': 'Boolean',
'GCC_MACOSX_VERSION_MIN': 'String',
'GCC_NO_COMMON_BLOCKS': 'Boolean',
'GCC_OBJC_ABI_VERSION': 'Enumeration',
'GCC_OBJC_LEGACY_DISPATCH': 'Boolean',
'GCC_OPERATION': 'Enumeration',
'GCC_OPTIMIZATION_LEVEL': 'Enumeration',
'GCC_PFE_FILE_C_DIALECTS': 'StringList',
'GCC_PRECOMPILE_PREFIX_HEADER': 'Boolean',
'GCC_PREFIX_HEADER': 'String',
'GCC_PREPROCESSOR_DEFINITIONS': 'StringList',
'GCC_PREPROCESSOR_DEFINITIONS_NOT_USED_IN_PRECOMPS': 'StringList',
'GCC_PRODUCT_TYPE_PREPROCESSOR_DEFINITIONS': 'StringList',
'GCC_REUSE_STRINGS': 'Boolean',
'GCC_SHORT_ENUMS': 'Boolean',
'GCC_STRICT_ALIASING': 'Boolean',
'GCC_SYMBOLS_PRIVATE_EXTERN': 'Boolean',
'GCC_THREADSAFE_STATICS': 'Boolean',
'GCC_TREAT_IMPLICIT_FUNCTION_DECLARATIONS_AS_ERRORS': 'Boolean',
'GCC_TREAT_INCOMPATIBLE_POINTER_TYPE_WARNINGS_AS_ERRORS': 'Boolean',
'GCC_TREAT_WARNINGS_AS_ERRORS': 'Boolean',
'GCC_UNROLL_LOOPS': 'Boolean',
'GCC_USE_GCC3_PFE_SUPPORT': 'Boolean',
'GCC_USE_STANDARD_INCLUDE_SEARCHING': 'Boolean',
'GCC_VERSION': 'String',
'GCC_WARN_64_TO_32_BIT_CONVERSION': 'Boolean',
'GCC_WARN_ABOUT_DEPRECATED_FUNCTIONS': 'Boolean',
'GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO': 'Boolean',
'GCC_WARN_ABOUT_MISSING_FIELD_INITIALIZERS': 'Boolean',
'GCC_WARN_ABOUT_MISSING_NEWLINE': 'Boolean',
'GCC_WARN_ABOUT_MISSING_PROTOTYPES': 'Boolean',
'GCC_WARN_ABOUT_POINTER_SIGNEDNESS': 'Boolean',
'GCC_WARN_ABOUT_RETURN_TYPE': 'Enumeration',
'GCC_WARN_ALLOW_INCOMPLETE_PROTOCOL': 'Boolean',
'GCC_WARN_CHECK_SWITCH_STATEMENTS': 'Boolean',
'GCC_WARN_FOUR_CHARACTER_CONSTANTS': 'Boolean',
'GCC_WARN_HIDDEN_VIRTUAL_FUNCTIONS': 'Boolean',
'GCC_WARN_INHIBIT_ALL_WARNINGS': 'Boolean',
'GCC_WARN_INITIALIZER_NOT_FULLY_BRACKETED': 'Boolean',
'GCC_WARN_MISSING_PARENTHESES': 'Boolean',
'GCC_WARN_MULTIPLE_DEFINITION_TYPES_FOR_SELECTOR': 'Boolean',
'GCC_WARN_NON_VIRTUAL_DESTRUCTOR': 'Boolean',
'GCC_WARN_PEDANTIC': 'Boolean',
'GCC_WARN_SHADOW': 'Boolean',
'GCC_WARN_SIGN_COMPARE': 'Boolean',
'GCC_WARN_STRICT_SELECTOR_MATCH': 'Boolean',
'GCC_WARN_TYPECHECK_CALLS_TO_PRINTF': 'Boolean',
'GCC_WARN_UNDECLARED_SELECTOR': 'Boolean',
'GCC_WARN_UNINITIALIZED_AUTOS': 'Enumeration',
'GCC_WARN_UNKNOWN_PRAGMAS': 'Boolean',
'GCC_WARN_UNUSED_FUNCTION': 'Boolean',
'GCC_WARN_UNUSED_LABEL': 'Boolean',
'GCC_WARN_UNUSED_PARAMETER': 'Boolean',
'GCC_WARN_UNUSED_VALUE': 'Boolean',
'GCC_WARN_UNUSED_VARIABLE': 'Boolean',
'GENERATE_MASTER_OBJECT_FILE': 'Boolean',
'GENERATE_PKGINFO_FILE': 'Boolean',
'GENERATE_PROFILING_CODE': 'Boolean',
'GID': 'String',
'GROUP': 'String',
'HEADERMAP_FILE_FORMAT': 'Enumeration',
'HEADERMAP_INCLUDES_FLAT_ENTRIES_FOR_TARGET_BEING_BUILT': 'Boolean',
'HEADERMAP_INCLUDES_FRAMEWORK_ENTRIES_FOR_ALL_PRODUCT_TYPES': 'Boolean',
'HEADERMAP_INCLUDES_NONPUBLIC_NONPRIVATE_HEADERS': 'Boolean',
'HEADERMAP_INCLUDES_PROJECT_HEADERS': 'Boolean',
'HEADERMAP_USES_FRAMEWORK_PREFIX_ENTRIES': 'Boolean',
'HEADERMAP_USES_VFS': 'Boolean',
'HEADER_SEARCH_PATHS': 'PathList',
'IBC_COMPILER_AUTO_ACTIVATE_CUSTOM_FONTS': 'Boolean',
'IBC_ERRORS': 'Boolean',
'IBC_FLATTEN_NIBS': 'Boolean',
'IBC_NOTICES': 'Boolean',
'IBC_OTHER_FLAGS': 'StringList',
'IBC_WARNINGS': 'Boolean',
'ICONV': 'Path',
'INCLUDED_RECURSIVE_SEARCH_PATH_SUBDIRECTORIES': 'StringList',
'INFOPLIST_EXPAND_BUILD_SETTINGS': 'Boolean',
'INFOPLIST_FILE': 'Path',
'INFOPLIST_OTHER_PREPROCESSOR_FLAGS': 'StringList',
'INFOPLIST_OUTPUT_FORMAT': 'Enumeration',
'INFOPLIST_PREFIX_HEADER': 'String',
'INFOPLIST_PREPROCESS': 'Boolean',
'INFOPLIST_PREPROCESSOR_DEFINITIONS': 'StringList',
'INIT_ROUTINE': 'String',
'INSTALL_DIR': 'Path',
'INSTALL_GROUP': 'String',
'INSTALL_MODE_FLAG': 'String',
'INSTALL_OWNER': 'String',
'INSTALL_PATH': 'Path',
'INSTALL_ROOT': 'Path',
'IPHONEOS_DEPLOYMENT_TARGET': 'String',
'JAVAC_DEFAULT_FLAGS': 'String',
'JAVA_APP_STUB': 'Path',
'JAVA_ARCHIVE_CLASSES': 'Boolean',
'JAVA_ARCHIVE_TYPE': 'Enumeration',
'JAVA_COMPILER': 'Path',
'JAVA_FRAMEWORK_RESOURCES_DIRS': 'PathList',
'JAVA_JAR_FLAGS': 'StringList',
'JAVA_SOURCE_SUBDIR': 'Path',
'JAVA_USE_DEPENDENCIES': 'Boolean',
'JAVA_ZIP_FLAGS': 'StringList',
'KEEP_PRIVATE_EXTERNS': 'Boolean',
'LD_DEPENDENCY_INFO_FILE': 'Path',
'LD_DYLIB_INSTALL_NAME': 'Path',
'LD_GENERATE_MAP_FILE': 'Boolean',
'LD_MAP_FILE_PATH': 'Path',
'LD_NO_PIE': 'Boolean',
'LD_QUOTE_LINKER_ARGUMENTS_FOR_COMPILER_DRIVER': 'Boolean',
'LD_RUNPATH_SEARCH_PATHS': 'StringList',
'LEGACY_DEVELOPER_DIR': 'Path',
'LEX': 'Path',
'LEXFLAGS': 'StringList',
'LIBRARY_FLAG_NOSPACE': 'Boolean',
'LIBRARY_FLAG_PREFIX': 'String',
'LIBRARY_KEXT_INSTALL_PATH': 'Path',
'LIBRARY_SEARCH_PATHS': 'PathList',
'LINKER_DISPLAYS_MANGLED_NAMES': 'Boolean',
'LINK_WITH_STANDARD_LIBRARIES': 'Boolean',
'LLVM_IMPLICIT_AGGRESSIVE_OPTIMIZATIONS': 'Boolean',
'LLVM_LTO': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_0': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_1': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_2': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_3': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_fast': 'Boolean',
'LLVM_OPTIMIZATION_LEVEL_VAL_s': 'Boolean',
'LOCAL_ADMIN_APPS_DIR': 'Path',
'LOCAL_APPS_DIR': 'Path',
'LOCAL_DEVELOPER_DIR': 'Path',
'LOCAL_LIBRARY_DIR': 'Path',
'MACH_O_TYPE': 'Enumeration',
'MACOSX_DEPLOYMENT_TARGET': 'Enumeration',
'MODULEMAP_FILE': 'String',
'MODULEMAP_PRIVATE_FILE': 'String',
'MODULE_CACHE_DIR': 'Path',
'MODULE_NAME': 'String',
'MODULE_START': 'String',
'MODULE_STOP': 'String',
'MODULE_VERSION': 'String',
'MTL_ENABLE_DEBUG_INFO': 'Boolean',
'NATIVE_ARCH': 'String',
'OBJC_ABI_VERSION': 'String',
'OBJECT_FILE_DIR': 'Path',
'OBJECT_FILE_DIR_': 'Path',
'OBJROOT': 'Path',
'ONLY_ACTIVE_ARCH': 'Boolean',
'ORDER_FILE': 'String',
'OS': 'String',
'OSAC': 'Path',
'OSACOMPILE_EXECUTE_ONLY': 'Boolean',
'OTHER_CFLAGS': 'StringList',
'OTHER_CODE_SIGN_FLAGS': 'StringList',
'OTHER_CPLUSPLUSFLAGS': 'StringList',
'OTHER_LDFLAGS': 'StringList',
'OTHER_LIBTOOLFLAGS': 'StringList',
'OTHER_OSACOMPILEFLAGS': 'String',
'PATH_PREFIXES_EXCLUDED_FROM_HEADER_DEPENDENCIES': 'PathList',
'PLATFORM_DEVELOPER_APPLICATIONS_DIR': 'Path',
'PLATFORM_DEVELOPER_BIN_DIR': 'Path',
'PLATFORM_DEVELOPER_LIBRARY_DIR': 'Path',
'PLATFORM_DEVELOPER_SDK_DIR': 'Path',
'PLATFORM_DEVELOPER_TOOLS_DIR': 'String',
'PLATFORM_DEVELOPER_USR_DIR': 'Path',
'PLATFORM_DIR': 'Path',
'PLATFORM_NAME': 'String',
'PLATFORM_PREFERRED_ARCH': 'String',
'PLATFORM_PRODUCT_BUILD_VERSION': 'String',
'PLIST_FILE_OUTPUT_FORMAT': 'Enumeration',
'PRECOMPILE_PREFIX_HEADER': 'Boolean',
'PRECOMPS_INCLUDE_HEADERS_FROM_BUILT_PRODUCTS_DIR': 'Boolean',
'PREFIX_HEADER': 'Path',
'PRELINK_FLAGS': 'StringList',
'PRELINK_LIBS': 'StringList',
'PRESERVE_DEAD_CODE_INITS_AND_TERMS': 'Boolean',
'PRIVATE_HEADERS_FOLDER_PATH': 'Path',
'PRODUCT_DEFINITION_PLIST': 'String',
'PRODUCT_MODULE_NAME': 'String',
'PRODUCT_NAME': 'String',
'PROJECT': 'String',
'PROJECT_DERIVED_FILE_DIR': 'Path',
'PROJECT_DIR': 'Path',
'PROJECT_FILE_PATH': 'Path',
'PROJECT_NAME': 'String',
'PROJECT_TEMP_DIR': 'Path',
'PROJECT_TEMP_ROOT': 'Path',
'PROVISIONING_PROFILE': 'String',
'PUBLIC_HEADERS_FOLDER_PATH': 'Path',
'REMOVE_CVS_FROM_RESOURCES': 'Boolean',
'REMOVE_GIT_FROM_RESOURCES': 'Boolean',
'REMOVE_HEADERS_FROM_EMBEDDED_BUNDLES': 'Boolean',
'REMOVE_HG_FROM_RESOURCES': 'Boolean',
'REMOVE_SVN_FROM_RESOURCES': 'Boolean',
'RETAIN_RAW_BINARIES': 'Boolean',
'REZ_COLLECTOR_DIR': 'Path',
'REZ_OBJECTS_DIR': 'Path',
'REZ_SEARCH_PATHS': 'String',
'RUN_CLANG_STATIC_ANALYZER': 'Boolean',
'SCAN_ALL_SOURCE_FILES_FOR_INCLUDES': 'Boolean',
'SDKROOT': 'String',
'SDK_DIR': 'Path',
'SDK_NAME': 'String',
'SDK_PRODUCT_BUILD_VERSION': 'String',
'SECTORDER_FLAGS': 'StringList',
'SED': 'Path',
'SEPARATE_STRIP': 'Boolean',
'SEPARATE_SYMBOL_EDIT': 'Boolean',
'SHARED_PRECOMPS_DIR': 'Path',
'SKIP_INSTALL': 'Boolean',
'SOURCE_ROOT': 'Path',
'SRCROOT': 'Path',
'STRINGS_FILE_OUTPUT_ENCODING': 'String',
'STRIPFLAGS': 'Boolean',
'STRIP_INSTALLED_PRODUCT': 'Boolean',
'STRIP_STYLE': 'Enumeration',
'SUPPORTED_PLATFORMS': 'StringList',
'SWIFT_OPTIMIZATION_LEVEL': 'Enumeration',
'SYMROOT': 'Path',
'SYSTEM_ADMIN_APPS_DIR': 'Path',
'SYSTEM_APPS_DIR': 'Path',
'SYSTEM_CORE_SERVICES_DIR': 'Path',
'SYSTEM_DEMOS_DIR': 'Path',
'SYSTEM_DEVELOPER_APPS_DIR': 'Path',
'SYSTEM_DEVELOPER_BIN_DIR': 'Path',
'SYSTEM_DEVELOPER_DEMOS_DIR': 'Path',
'SYSTEM_DEVELOPER_DIR': 'Path',
'SYSTEM_DEVELOPER_DOC_DIR': 'Path',
'SYSTEM_DEVELOPER_GRAPHICS_TOOLS_DIR': 'Path',
'SYSTEM_DEVELOPER_JAVA_TOOLS_DIR': 'Path',
'SYSTEM_DEVELOPER_PERFORMANCE_TOOLS_DIR': 'Path',
'SYSTEM_DEVELOPER_RELEASENOTES_DIR': 'Path',
'SYSTEM_DEVELOPER_TOOLS': 'Path',
'SYSTEM_DEVELOPER_TOOLS_DOC_DIR': 'Path',
'SYSTEM_DEVELOPER_TOOLS_RELEASENOTES_DIR': 'Path',
'SYSTEM_DEVELOPER_USR_DIR': 'Path',
'SYSTEM_DEVELOPER_UTILITIES_DIR': 'Path',
'SYSTEM_DOCUMENTATION_DIR': 'Path',
'SYSTEM_KEXT_INSTALL_PATH': 'Path',
'SYSTEM_LIBRARY_DIR': 'Path',
'TARGETNAME': 'String',
'TARGET_BUILD_DIR': 'Path',
'TARGET_NAME': 'String',
'TARGET_TEMP_DIR': 'Path',
'TARGETED_DEVICE_FAMILY': 'String',
'TEMP_DIR': 'Path',
'TEMP_FILES_DIR': 'Path',
'TEMP_FILE_DIR': 'Path',
'TEMP_ROOT': 'Path',
'TEST_HOST': 'String',
'TREAT_MISSING_BASELINES_AS_TEST_FAILURES': 'Boolean',
'UID': 'String',
'UNEXPORTED_SYMBOLS_FILE': 'String',
'UNSTRIPPED_PRODUCT': 'Boolean',
'USER': 'String',
'USER_APPS_DIR': 'Path',
'USER_HEADER_SEARCH_PATHS': 'PathList',
'USER_LIBRARY_DIR': 'Path',
'USE_HEADERMAP': 'Boolean',
'USE_HEADER_SYMLINKS': 'Boolean',
'VALIDATE_PRODUCT': 'Boolean',
'VALID_ARCHS': 'StringList',
'VERSIONING_SYSTEM': 'String',
'VERSION_INFO_BUILDER': 'String',
'VERSION_INFO_EXPORT_DECL': 'String',
'VERSION_INFO_FILE': 'String',
'VERSION_INFO_PREFIX': 'String',
'VERSION_INFO_SUFFIX': 'String',
'WARNING_CFLAGS': 'StringList',
'WARNING_LDFLAGS': 'StringList',
'WRAPPER_EXTENSION': 'String',
'XCODE_APP_SUPPORT_DIR': 'Path',
'XCODE_PRODUCT_BUILD_VERSION': 'String',
'XCODE_VERSION_ACTUAL': 'String',
'XCODE_VERSION_MAJOR': 'String',
'XCODE_VERSION_MINOR': 'String',
'YACC': 'Path',
'YACCFLAGS': 'StringList',
};
|
|
# -*- coding: utf-8 -*-
""" Test for USA address parser """
import re
import pytest
from pyap import utils
from pyap.packages import six
import pyap.source_US.data as data_us
def execute_matching_test(input, expected, pattern):
match = utils.match(pattern, input, re.VERBOSE)
is_found = match is not None
if expected:
assert is_found == expected and match.group(0) == input
else:
"""we check that:
- input should not to match our regex
- our match should be partial if regex matches some part of string
"""
assert (is_found == expected) or (match.group(0) != input)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("ZERO ", True),
("one ", True),
("two ", True),
("Three ", True),
("FoUr ", True),
("FivE ", True),
("six ", True),
("SEvEn ", True),
("Eight ", True),
("Nine ", True),
# negative assertions
("Nidnes", False),
("One", False),
("two", False),
("onetwothree ", False),
])
def test_zero_to_nine(input, expected):
''' test string match for zero_to_nine '''
execute_matching_test(input, expected, data_us.zero_to_nine)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("tEN ", True),
("TWENTY ", True),
("tHirtY ", True),
("FOUrty ", True),
("fifty ", True),
("sixty ", True),
("seventy ", True),
("eighty ", True),
("NINety ", True),
# negative assertions
("ten", False),
("twenTY", False),
("sixtysixsty ", False),
("one twenty ", False),
])
def test_ten_to_ninety(input, expected):
''' test string match for ten_to_ninety '''
execute_matching_test(input, expected, data_us.ten_to_ninety)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("Hundred ", True),
("HuNdred ", True),
# negative assertions
("HuNDdred", False),
("HuNDdred hundred ", False),
])
def test_hundred(input, expected):
''' tests string match for a hundred '''
execute_matching_test(input, expected, data_us.hundred)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("Thousand ", True),
("thOUSAnd ", True),
# negative assertions
("thousand", False),
("THoussand ", False),
("THoussand", False),
("THOUssand THoussand ", False),
])
def test_thousand(input, expected):
''' tests string match for a thousand '''
execute_matching_test(input, expected, data_us.thousand)
@pytest.mark.parametrize("input,expected", [
# positive assertions (words)
("One Thousand And Fifty Nine ", True),
("Two hundred and fifty ", True),
("Three hundred four ", True),
("Thirty seven ", True),
("FIFTY One ", True),
("Three hundred Ten ", True),
# positive assertions (numbers)
("1 ", True),
("15 ", True),
("44 ", True),
("256 ", True),
("256 ", True),
("1256 ", True),
("32457 ", True),
# negative assertions (words)
("ONE THousszz22and FIFTY and four onde", False),
("ONE one oNe and onE Three", False),
# negative assertions (numbers)
("536233", False),
("111111", False),
("1111ss11", False),
("123 456", False),
])
def test_street_number(input, expected):
''' tests string match for a street number '''
execute_matching_test(input, expected, data_us.street_number)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("Northeast Kentucky Industrial ", True),
("One ", True),
("First ", True),
("Ave 123 ", True),
("Northeast 5 ", True),
# negative assertions
("Northeast Kentucky Industrial Maple ", False),
("a", False),
("ab", False),
])
def test_street_name(input, expected):
''' tests positive string match for a street name '''
execute_matching_test(input, expected, data_us.street_name)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("N. ", True),
("N ", True),
("S ", True),
("West ", True),
("eASt ", True),
("NW ", True),
("SE ", True),
# negative assertions
("NW.", False),
("NW. ", False),
("NS ", False),
("EW ", False),
])
def test_post_direction(input, expected):
''' tests string match for a post_direction '''
execute_matching_test(input, expected, data_us.post_direction)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("Street ", True),
("St. ", True),
("St.", True),
("Blvd.", True),
("Blvd. ", True),
("LN ", True),
("RD", True),
("Cir", True),
("Highway ", True),
("Hwy ", True),
("Ct", True),
("Sq.", True),
("LP. ", True),
("LP. (Route A1 )", True),
("Street route 5 ", True),
("blvd", True),
("Estate", True),
("Manor", True),
# negative assertions
# TODO
])
def test_street_type(input, expected):
''' tests string match for a street id '''
execute_matching_test(input, expected, data_us.street_type)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("floor 3 ", True),
("floor 11 ", True),
("floor 15 ", True),
("1st floor ", True),
("2nd floor ", True),
("15th floor ", True),
("16th. floor ", True),
# negative assertions
("16th.floor ", False),
("1stfloor ", False),
])
def test_floor(input, expected):
''' tests string match for a floor '''
execute_matching_test(input, expected, data_us.floor)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("bldg m ", True),
("Building F ", True),
("bldg 2 ", True),
("building 3 ", True),
("building 100 ", True),
("building 1000 ", True),
("Building ", True),
("building one ", True),
("Building three ", True),
# negative assertions
("bldg", False),
("bldgm", False),
("bldg100 ", False),
("building 10000 ", False),
])
def test_building(input, expected):
''' tests string match for a building '''
execute_matching_test(input, expected, data_us.building)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("suite 900 ", True),
("Suite #2 ", True),
("suite #218 ", True),
("suite J7 ", True),
("suite 102A ", True),
("suite a&b ", True),
("Suite J#200 ", True),
("suite 710-327 ", True),
("Suite A ", True),
("ste A ", True),
("Ste 101 ", True),
("ste 502b ", True),
("ste 14-15 ", True),
("ste E ", True),
("ste 9E ", True),
("Suite 1800 ", True),
("Apt 1B ", True),
("Rm. 52 ", True),
("#2b ", True),
# positive assertions
("suite900 ", False),
("Suite#2", False),
("suite218 ", False),
])
def test_occupancy(input, expected):
''' tests string match for a place id '''
execute_matching_test(input, expected, data_us.occupancy)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("po box 108", True),
("Po Box 53485", True),
("P.O. box 119", True),
("PO box 1070", True),
# negative assertions
("po box108 ", False),
("PoBox53485 ", False),
("P.O. box119", False),
("POb ox1070 ", False),
])
def test_po_box_positive(input, expected):
''' tests exact string match for a po box '''
execute_matching_test(input, expected, data_us.po_box)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("9652 Loiret Boulevard", True),
("101 MacIntosh Boulevard", True),
("1 West Hegeler Lane", True),
("1270 Leeds Avenue", True),
("85-1190 Ranchview Rd. NW ", True),
("62 Portland Road (Route 1)", True),
("200 N. Pine Avenue Suite 514", True),
("200 S. Alloy Drive", True),
("Two Hundred S. Alloy Drive", True),
("Two Hundred South Alloy Drive", True),
("Two Hundred South Alloy Dr.", True),
("11001 Fondren Rd,", True),
("9606 North Mopac Expressway Suite 500", True),
("9692 East Arapahoe Road,", True),
("9 Grand Avenue, Suite 2", True),
("9 Grand Avenue Building 2, Suite 2", True),
("9 Grand Avenue Building 2, Suite 2A", True),
("233 Richmond Highway Suite 1800", True),
("354 Eisenhower Parkway P.O. Box 472", True),
("6645 N Ensign St", True),
("1200 Old Fairhaven Pkwy Apt 106", True),
("1659 Scott Blvd Ste 26", True),
("377 Fisher Rd Ste C", True),
("1833 Stearman Ave", True),
("1737 S Lumpkin St Ste B", True),
("101 N Court Sq Ste 16", True),
("1790 Yardley Langhorne Rd, Suite #205", True),
("280 West Main Street", True),
("701 Tennessee Walk", True),
("7457 Harwin Dr", True),
("700 Davis Avenue", True),
("1 W 47th St", True),
("832 Seward St", True),
("2740 Timber Ridge Lane", True),
("810 E Western Ave", True),
("6223 Richmond Ave Ste 105", True),
("400 Middle Street", True),
("81 N Main St", True),
("3705 West Memorial Road", True),
("4911 Matterhorn Dr", True),
("5830 Yahl Street, #2b", True),
("9400 Doliver Dr Apt 13", True),
("10701 Stirling Road", True),
("1865 Corporate Dr Ste 225", True),
("80 Beaman Rd", True),
("9691 Spratley Ave", True),
("10835 New Haven Rd NW ", True),
("320 W Broussard Rd", True),
("9001 Any Old Way", True),
("8967 Market St.", True),
("3724 Oxford Blvd.", True),
("901 Rainier Ave S ", True),
])
def test_full_street_positive(input, expected):
''' tests exact string match for a full street '''
execute_matching_test(input, expected, data_us.full_street)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("0 OLD MILL RD, Maynard, MA 01754", True),
("103 Morgan Lane, Suite 102 Plainsboro, NJ 08536", True),
("3409 16th St Metairie, LA 70002", True),
("1505 NW 14th Street Miami, FL 33125", True),
("01 Main Rd. Newfield, NJ", True),
("28 Gorgo Lane Newfield, NJ", True),
("1720 HARDING HWY NEWFIELD, NJ", True),
("4409 N DELSEA DR NEWFIELD, NJ", True),
("742 FORSYTHIA DR NEWFIELD, NJ", True),
("9 N EAST BLVD NEWFIELD, NJ 10000", True),
("1640 Harding Hwy Newfield, NJ", True),
("1720 Harding Highway NEWFIELD, NJ", True),
("1014 CATAWBA AVE NEWFIELD, NJ", True),
("11 ARCH AVE NEWFIELD, NJ", True),
("133 TAYLOR RD NEWFIELD, NJ", True),
("4409 N Delsea Drive Newfield, NJ", True),
("8 TAYLOR RD NEWFIELD, NJ", True),
("28 GORGO LN NEWFIELD, NJ", True),
("900 COLUMBIA AVE. NEWFIELD, NJ", True),
("3201 MAIN RD NEWFIELD, NJ", True),
("4421 N DELSEA DR NEWFIELD, NJ", True),
("742 Forsythia Drive Newfield, NJ", True),
("1450 E. Chestnut Avenue, Vineland NJ,", True),
("50 Harry S Truman Parkway Annapolis, MD 21401", True),
("420 Crompton Street Charlotte , North Carolina 28273", True),
("204 East 3rd Ave Cheyenne, WY 82001", True),
("1806 Dominion Way Ste B Colorado Spgs, CO 80918-8409", True),
("2600 South Shore Blvd Ste. 300 League City, TX 77573", True),
("2675 Antler Drive Carson City, NV 89701-1451", True),
("3719 Lockwood Dr., Houston, TX 77026", True),
("154 Grand Street New York, NY 10013", True),
("3655 Torrance Blvd Suite 230 Torrance CA 90503", True),
("800 Sixth Ave #31A New York, NY 10001", True),
("8861 Research Drive, Ste. 200, Irvine, CA 92618", True),
("317 N. Mission St. Ste. 200 Wenatchee, WA 98801", True),
("2709 Bickford Avenue, Suite A Snohomish, WA 98290", True),
("7307 N. Division Street, Suite 102 Spokane, WA 99208", True),
("1530 South Union Avenue, Suite 7 Tacoma, WA 98405", True),
("3131 Smokey Point Drive, Suite 14 A Arlington, WA 98223", True),
("1603 Grove Street Marysville, WA 98270", True),
("15701 E. Sprague Avenue, Suite F Spokane Valley, WA 99037", True),
("18204 Bothell Everett Hwy, Suite E Bothell, WA 98012", True),
("3505 188th Street SW Lynnwood, WA 98037", True),
("3218 NE 12th Street, Suite B Renton, WA 98056", True),
("22035 SE Wax Road, Suite 5 Maple Valley, WA 98038", True),
("8861 Research Drive, Ste. 200 Irvine, CA 92618", True),
("4031 University Drive Suite 200 Fairfax, Virginia 22030", True),
("586 W. 207 St. New York, NY 10034", True),
("85 Newbury St, Boston, MA 02116", True),
("1827 Union St, San Francisco, CA 94123", True),
("1636 Main St Sarasota, FL 34236", True),
("1015 South Western Avenue, Chicago, IL 60649", True),
("510 W 7th St. Los Angeles, CA 90014", True),
("225 North Larchmont Blvd Los Angeles, CA 90004", True),
("3760 E. Tremont Ave. Throgsneck, NY 10465", True),
("8126 S. Stony Island Ave Chicago, IL 60617", True),
("68116 HEM 908 B WEST 12th St. Austin, TX 78703", True),
("546 West Colorado Street Glendale CA 91204", True),
("2210 N Halsted St, Chicago, IL 60614", True),
("4090 Westown Pkwy Ste B2 Chicago, IL 60614", True),
("7000 Peachtree Dunwoody Rd NE Bldg 7, Miami, FL, USA", True),
("98-025 Hekaha St Ste 221A, Cityville, Arizona", True),
("225 E. John Carpenter Freeway, Suite 1500 Irving, Texas 75062 U.S.A.", True),
("643 Lincoln Rd. Miami Beach, FL 33139", True),
("300 Market St. Harrisburg, PA 17101", True),
("2 Kings Hwy Shreveport, LA 71104", True),
("1500 Westlake Avenue North Suite 108 Seattle, WA 98109", True),
("840 Garrison Brooks Suite 985, New Sarah, OH 38255", True),
("840 Garrison Brooks Suite 985 New Sarah, OH 38255", True),
# negative assertions
("85 STEEL REGULAR SHAFT - NE", False),
("3 STRUCTURE WITH PE", False),
("2013 Courtesy of DONNA LUPI, PR", False),
("44 sq. ft. 000 Columbia Ave. See Remarks, Newfield, NJ 08344", False),
("7901 SILVER CONDUCTIVE HOLE FILL MA", False),
("3 THIRD PARTY LIST IN", False),
("9 STORAGE OF INDIVIDUAL IN", False),
("4 BODY WAVE MODEL MO", False),
("4060 AUTOMATIC STRAPPING MACHINE KZB-II STRAPPING MA", False),
("130 AUTOMATIC STRAPPING MACHINE CO", False),
("6060 AUTOMATIC STRAPPING MACHINE SK", False),
("500 AUTO BLISTER PACKING SEALING MA", False),
("23 ELECTRICAL COLOURED-TAPE PR", False),
("1900 TRANSISTOR ELECTROMAGNETIC INDUCTION AL", False),
("3131 DR. MATTHEW WI", False),
("ONE FOR ANY DIRECT, INDIRECT, IN", False),
("2 TRACTOR HEAD Actros MP", False),
("00 Straight Fit Jean, USA", False),
])
def test_full_address(input, expected):
''' tests exact string match for a full address '''
execute_matching_test(input, expected, data_us.full_address)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("75062", True),
("15032", True),
("95130-6482", True),
# negative assertions
("1", False),
("23", False),
("456", False),
("4567", False),
("750621", False),
("95130-642", False),
("95130-64212", False),
])
def test_postal_code(input, expected):
''' test exact string match for postal code '''
execute_matching_test(input, expected, data_us.postal_code)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("Montana", True),
("Nebraska", True),
("NJ", True),
("DC", True),
("PuErTO RIco", True),
("oregon", True),
])
def test_region1(input, expected):
''' test exact string match for province '''
execute_matching_test(input, expected, data_us.region1)
@pytest.mark.parametrize("input,expected", [
# positive assertions
("USA", True),
("U.S.A", True),
("United States", True),
])
def test_country(input, expected):
''' test exact string match for country '''
execute_matching_test(input, expected, data_us.country)
|
|
#!/usr/bin/env python
"""
The JS Shell Test Harness.
See the adjacent README.txt for more details.
"""
import os, sys, textwrap
from copy import copy
from subprocess import list2cmdline, call
from lib.results import NullTestOutput
from lib.tests import TestCase
from lib.results import ResultsSink
from lib.progressbar import ProgressBar
if (sys.platform.startswith('linux') or
sys.platform.startswith('darwin')
):
from lib.tasks_unix import run_all_tests
else:
from lib.tasks_win import run_all_tests
def run_tests(options, tests, results):
"""Run the given tests, sending raw results to the given results accumulator."""
try:
completed = run_all_tests(tests, results, options)
except KeyboardInterrupt:
completed = False
results.finish(completed)
def get_cpu_count():
"""
Guess at a reasonable parallelism count to set as the default for the
current machine and run.
"""
# Python 2.6+
try:
import multiprocessing
return multiprocessing.cpu_count()
except (ImportError,NotImplementedError):
pass
# POSIX
try:
res = int(os.sysconf('SC_NPROCESSORS_ONLN'))
if res > 0:
return res
except (AttributeError,ValueError):
pass
# Windows
try:
res = int(os.environ['NUMBER_OF_PROCESSORS'])
if res > 0:
return res
except (KeyError, ValueError):
pass
return 1
def parse_args():
"""
Parse command line arguments.
Returns a tuple of: (options, js_shell, requested_paths, excluded_paths)
options :object: The raw OptionParser output.
js_shell :str: The absolute location of the shell to test with.
requested_paths :set<str>: Test paths specially requested on the CLI.
excluded_paths :set<str>: Test paths specifically excluded by the CLI.
"""
from optparse import OptionParser, OptionGroup
op = OptionParser(usage=textwrap.dedent("""
%prog [OPTIONS] JS_SHELL [TESTS]
Shell output format: [ pass | fail | timeout | skip ] progress | time
""").strip())
op.add_option('--xul-info', dest='xul_info_src',
help='config data for xulRuntime (avoids search for config/autoconf.mk)')
harness_og = OptionGroup(op, "Harness Controls", "Control how tests are run.")
harness_og.add_option('-j', '--worker-count', type=int, default=max(1, get_cpu_count()),
help='Number of tests to run in parallel (default %default)')
harness_og.add_option('-t', '--timeout', type=float, default=150.0,
help='Set maximum time a test is allows to run (in seconds).')
harness_og.add_option('-a', '--args', dest='shell_args', default='',
help='Extra args to pass to the JS shell.')
harness_og.add_option('--jitflags', default='',
help='Example: --jitflags=m,amd to run each test with -m, -a -m -d [default=%default]')
harness_og.add_option('-g', '--debug', action='store_true', help='Run a test in debugger.')
harness_og.add_option('--debugger', default='gdb -q --args', help='Debugger command.')
harness_og.add_option('--valgrind', action='store_true', help='Run tests in valgrind.')
harness_og.add_option('--valgrind-args', default='', help='Extra args to pass to valgrind.')
op.add_option_group(harness_og)
input_og = OptionGroup(op, "Inputs", "Change what tests are run.")
input_og.add_option('-f', '--file', dest='test_file', action='append',
help='Get tests from the given file.')
input_og.add_option('-x', '--exclude-file', action='append',
help='Exclude tests from the given file.')
input_og.add_option('-d', '--exclude-random', dest='random', action='store_false',
help='Exclude tests marked as "random."')
input_og.add_option('--run-skipped', action='store_true', help='Run tests marked as "skip."')
input_og.add_option('--run-only-skipped', action='store_true', help='Run only tests marked as "skip."')
input_og.add_option('--run-slow-tests', action='store_true',
help='Do not skip tests marked as "slow."')
input_og.add_option('--no-extensions', action='store_true',
help='Run only tests conforming to the ECMAScript 5 standard.')
op.add_option_group(input_og)
output_og = OptionGroup(op, "Output", "Modify the harness and tests output.")
output_og.add_option('-s', '--show-cmd', action='store_true',
help='Show exact commandline used to run each test.')
output_og.add_option('-o', '--show-output', action='store_true',
help="Print each test's output to the file given by --output-file.")
output_og.add_option('-F', '--failed-only', action='store_true',
help="If a --show-* option is given, only print output for failed tests.")
output_og.add_option('-O', '--output-file',
help='Write all output to the given file (default: stdout).')
output_og.add_option('--failure-file',
help='Write all not-passed tests to the given file.')
output_og.add_option('--no-progress', dest='hide_progress', action='store_true',
help='Do not show the progress bar.')
output_og.add_option('--tinderbox', action='store_true',
help='Use tinderbox-parseable output format.')
op.add_option_group(output_og)
special_og = OptionGroup(op, "Special", "Special modes that do not run tests.")
special_og.add_option('--make-manifests', metavar='BASE_TEST_PATH',
help='Generate reftest manifest files.')
op.add_option_group(special_og)
options, args = op.parse_args()
# Acquire the JS shell given on the command line.
options.js_shell = None
requested_paths = set()
if len(args) > 0:
options.js_shell = os.path.abspath(args[0])
requested_paths |= set(args[1:])
# If we do not have a shell, we must be in a special mode.
if options.js_shell is None and not options.make_manifests:
op.error('missing JS_SHELL argument')
# Valgrind and gdb are mutually exclusive.
if options.valgrind and options.debug:
op.error("--valgrind and --debug are mutually exclusive.")
# Fill the debugger field, as needed.
prefix = options.debugger.split() if options.debug else []
if options.valgrind:
prefix = ['valgrind'] + options.valgrind_args.split()
if os.uname()[0] == 'Darwin':
prefix.append('--dsymutil=yes')
options.show_output = True
TestCase.set_js_cmd_prefix(options.js_shell, options.shell_args.split(), prefix)
# If files with lists of tests to run were specified, add them to the
# requested tests set.
if options.test_file:
for test_file in options.test_file:
requested_paths |= set([line.strip() for line in open(test_file).readlines()])
# If files with lists of tests to exclude were specified, add them to the
# excluded tests set.
excluded_paths = set()
if options.exclude_file:
for filename in options.exclude_file:
try:
fp = open(filename, 'r')
for line in fp:
if line.startswith('#'): continue
line = line.strip()
if not line: continue
excluded_paths |= set((line,))
finally:
fp.close()
# Handle output redirection, if requested and relevant.
options.output_fp = sys.stdout
if options.output_file:
if not options.show_cmd:
options.show_output = True
try:
options.output_fp = open(options.output_file, 'w')
except IOError, ex:
raise SystemExit("Failed to open output file: " + str(ex))
options.show = options.show_cmd or options.show_output
# Hide the progress bar if it will get in the way of other output.
options.hide_progress = ((options.show and
options.output_fp == sys.stdout) or
options.tinderbox or
ProgressBar.conservative_isatty() or
options.hide_progress)
return (options, requested_paths, excluded_paths)
def parse_jitflags(op_jitflags):
jitflags = [ [ '-' + flag for flag in flags ]
for flags in op_jitflags.split(',') ]
for flags in jitflags:
for flag in flags:
if flag not in ('-m', '-a', '-p', '-d', '-n'):
print('Invalid jit flag: "%s"'%flag)
sys.exit(1)
return jitflags
def load_tests(options, requested_paths, excluded_paths):
"""
Returns a tuple: (skipped_tests, test_list)
skip_list: [iterable<Test>] Tests found but skipped.
test_list: [iterable<Test>] Tests found that should be run.
"""
import lib.manifest as manifest
if options.js_shell is None:
xul_tester = manifest.NullXULInfoTester()
else:
if options.xul_info_src is None:
xul_info = manifest.XULInfo.create(options.js_shell)
else:
xul_abi, xul_os, xul_debug = options.xul_info_src.split(r':')
xul_debug = xul_debug.lower() is 'true'
xul_info = manifest.XULInfo(xul_abi, xul_os, xul_debug)
xul_tester = manifest.XULInfoTester(xul_info, options.js_shell)
test_dir = os.path.dirname(os.path.abspath(__file__))
test_list = manifest.load(test_dir, xul_tester)
skip_list = []
if options.make_manifests:
manifest.make_manifests(options.make_manifests, test_list)
sys.exit()
# Create a new test list. Apply each JIT configuration to every test.
if options.jitflags:
new_test_list = []
jitflags_list = parse_jitflags(options.jitflags)
for test in test_list:
for jitflags in jitflags_list:
tmp_test = copy(test)
tmp_test.options = copy(test.options)
tmp_test.options.extend(jitflags)
new_test_list.append(tmp_test)
test_list = new_test_list
if options.test_file:
paths = set()
for test_file in options.test_file:
paths |= set([ line.strip() for line in open(test_file).readlines()])
test_list = [ _ for _ in test_list if _.path in paths ]
if requested_paths:
def p(path):
for arg in requested_paths:
if path.find(arg) != -1:
return True
return False
test_list = [ _ for _ in test_list if p(_.path) ]
if options.exclude_file:
test_list = [_ for _ in test_list if _.path not in excluded_paths]
if options.no_extensions:
pattern = os.sep + 'extensions' + os.sep
test_list = [_ for _ in test_list if pattern not in _.path]
if not options.random:
test_list = [ _ for _ in test_list if not _.random ]
if options.run_only_skipped:
options.run_skipped = True
test_list = [ _ for _ in test_list if not _.enable ]
if not options.run_slow_tests:
test_list = [ _ for _ in test_list if not _.slow ]
if not options.run_skipped:
skip_list = [ _ for _ in test_list if not _.enable ]
test_list = [ _ for _ in test_list if _.enable ]
return skip_list, test_list
def main():
options, requested_paths, excluded_paths = parse_args()
skip_list, test_list = load_tests(options, requested_paths, excluded_paths)
if not test_list:
print 'no tests selected'
return 1
test_dir = os.path.dirname(os.path.abspath(__file__))
if options.debug:
if len(test_list) > 1:
print('Multiple tests match command line arguments, debugger can only run one')
for tc in test_list:
print(' %s'%tc.path)
return 2
cmd = test_list[0].get_command(TestCase.js_cmd_prefix)
if options.show_cmd:
print list2cmdline(cmd)
if test_dir not in ('', '.'):
os.chdir(test_dir)
call(cmd)
return 0
curdir = os.getcwd()
if test_dir not in ('', '.'):
os.chdir(test_dir)
results = None
try:
results = ResultsSink(options, len(skip_list) + len(test_list))
for t in skip_list:
results.push(NullTestOutput(t))
run_tests(options, test_list, results)
finally:
os.chdir(curdir)
if results is None or not results.all_passed():
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
|
|
# Uses the HID API introduced in Mac OS X version 10.5
# http://developer.apple.com/library/mac/#technotes/tn2007/tn2187.html
from ctypes import *
from ctypes import util
# Load frameworks
iokit = cdll.LoadLibrary(util.find_library('IOKit'))
cf = cdll.LoadLibrary(util.find_library('CoreFoundation'))
# Core Foundation constants
kCFStringEncodingASCII = 0x0600
kCFStringEncodingUnicode = 0x0100
kCFStringEncodingUTF8 = 0x08000100
kCFNumberIntType = 9
kCFRunLoopDefaultMode = c_void_p.in_dll(iokit, 'kCFRunLoopDefaultMode')
# IOKit constants from
# /System/Library/Frameworks/IOKit.framework/Headers/hid/IOHIDKeys.h
kIOHIDOptionsTypeNone = 0x00
kIOHIDOptionsTypeSeizeDevice = 0x01
kIOHIDElementTypeInput_Misc = 1
kIOHIDElementTypeInput_Button = 2
kIOHIDElementTypeInput_Axis = 3
kIOHIDElementTypeInput_ScanCodes = 4
kIOHIDElementTypeOutput = 129
kIOHIDElementTypeFeature = 257
kIOHIDElementTypeCollection = 513
# /System/Library/Frameworks/IOKit.framework/Headers/hid/IOHIDUsageTables.h
kHIDPage_GenericDesktop = 0x01
kHIDPage_Consumer = 0x0C
kHIDUsage_GD_SystemSleep = 0x82
kHIDUsage_GD_SystemWakeUp = 0x83
kHIDUsage_GD_SystemAppMenu = 0x86
kHIDUsage_GD_SystemMenu = 0x89
kHIDUsage_GD_SystemMenuRight = 0x8A
kHIDUsage_GD_SystemMenuLeft = 0x8B
kHIDUsage_GD_SystemMenuUp = 0x8C
kHIDUsage_GD_SystemMenuDown = 0x8D
kHIDUsage_Csmr_Menu = 0x40
kHIDUsage_Csmr_FastForward = 0xB3
kHIDUsage_Csmr_Rewind = 0xB4
kHIDUsage_Csmr_Eject = 0xB8
kHIDUsage_Csmr_Mute = 0xE2
kHIDUsage_Csmr_VolumeIncrement = 0xE9
kHIDUsage_Csmr_VolumeDecrement = 0xEA
# Setup return types for functions that return pointers.
# (Otherwise ctypes returns 32-bit int which breaks on 64-bit systems.)
# Note that you must also wrap the return value with c_void_p before
# you use it as an argument to another function, otherwise ctypes will
# automatically convert it back to a 32-bit int again.
cf.CFStringCreateWithCString.restype = c_void_p
cf.CFArrayGetValueAtIndex.restype = c_void_p
cf.CFRunLoopGetCurrent.restype = c_void_p
cf.CFRunLoopGetMain.restype = c_void_p
iokit.IOHIDDeviceGetProperty.restype = c_void_p
iokit.IOHIDDeviceCopyMatchingElements.restype = c_void_p
iokit.IOHIDValueGetElement.restype = c_void_p
iokit.IOHIDElementGetName.restype = c_void_p
iokit.IOHIDManagerCreate.restype = c_void_p
iokit.IOHIDManagerCopyDevices.restype = c_void_p
# Callback function types
HIDManagerCallback = CFUNCTYPE(None, c_void_p, c_int, c_void_p, c_void_p)
HIDDeviceCallback = CFUNCTYPE(None, c_void_p, c_int, c_void_p)
HIDDeviceValueCallback = CFUNCTYPE(None, c_void_p, c_int, c_void_p, c_void_p)
######################################################################
# Core Foundation type to Python type conversion functions
def CFSTR(text):
return c_void_p(cf.CFStringCreateWithCString(None, text.encode('utf8'), kCFStringEncodingUTF8))
def cfstring_to_string(cfstring):
length = cf.CFStringGetLength(cfstring)
size = cf.CFStringGetMaximumSizeForEncoding(length, kCFStringEncodingUTF8)
buffer = c_buffer(size + 1)
result = cf.CFStringGetCString(cfstring, buffer, len(buffer), kCFStringEncodingUTF8)
if result:
return buffer.value
def cfnumber_to_int(cfnumber):
result = c_int()
if cf.CFNumberGetValue(cfnumber, kCFNumberIntType, byref(result)):
return result.value
def cfset_to_set(cfset):
count = cf.CFSetGetCount(cfset)
buffer = (c_void_p * count)()
cf.CFSetGetValues(cfset, byref(buffer))
return set([ cftype_to_value(c_void_p(buffer[i])) for i in range(count) ])
def cfarray_to_list(cfarray):
count = cf.CFArrayGetCount(cfarray)
return [ cftype_to_value(c_void_p(cf.CFArrayGetValueAtIndex(cfarray, i)))
for i in range(count) ]
def cftype_to_value(cftype):
if not cftype:
return None
typeID = cf.CFGetTypeID(cftype)
if typeID == cf.CFStringGetTypeID():
return cfstring_to_string(cftype)
elif typeID == cf.CFNumberGetTypeID():
return cfnumber_to_int(cftype)
elif typeID == iokit.IOHIDDeviceGetTypeID():
return HIDDevice.get_device(cftype)
elif typeID == iokit.IOHIDElementGetTypeID():
return HIDDeviceElement.get_element(cftype)
else:
return cftype
######################################################################
# HID Class Wrappers
# Lookup tables cache python objects for the devices and elements so that
# we can avoid creating multiple wrapper objects for the same device.
_device_lookup = {} # IOHIDDeviceRef to python HIDDevice object
_element_lookup = {} # IOHIDElementRef to python HIDDeviceElement object
class HIDValue:
def __init__(self, valueRef):
# Check that this is a valid IOHIDValue.
assert(valueRef)
assert(cf.CFGetTypeID(valueRef) == iokit.IOHIDValueGetTypeID())
self.valueRef = valueRef
self.timestamp = iokit.IOHIDValueGetTimeStamp(valueRef)
self.intvalue = iokit.IOHIDValueGetIntegerValue(valueRef)
elementRef = c_void_p(iokit.IOHIDValueGetElement(valueRef))
self.element = HIDDeviceElement.get_element(elementRef)
class HIDDevice:
@classmethod
def get_device(cls, deviceRef):
# deviceRef is a c_void_p pointing to an IOHIDDeviceRef
if deviceRef.value in _device_lookup:
return _device_lookup[deviceRef.value]
else:
device = HIDDevice(deviceRef)
return device
def __init__(self, deviceRef):
# Check that we've got a valid IOHIDDevice.
assert(deviceRef)
assert(cf.CFGetTypeID(deviceRef) == iokit.IOHIDDeviceGetTypeID())
_device_lookup[deviceRef.value] = self
self.deviceRef = deviceRef
# Set attributes from device properties.
self.transport = self.get_property("Transport")
self.vendorID = self.get_property("VendorID")
self.vendorIDSource = self.get_property("VendorIDSource")
self.productID = self.get_property("ProductID")
self.versionNumber = self.get_property("VersionNumber")
self.manufacturer = self.get_property("Manufacturer")
self.product = self.get_property("Product")
self.serialNumber = self.get_property("SerialNumber") # always returns None; apple bug?
self.locationID = self.get_property("LocationID")
self.primaryUsage = self.get_property("PrimaryUsage")
self.primaryUsagePage = self.get_property("PrimaryUsagePage")
# Populate self.elements with our device elements.
self.get_elements()
# Set up callback functions.
self.value_observers = set()
self.removal_observers = set()
self.register_removal_callback()
self.register_input_value_callback()
def dump_info(self):
for x in ('manufacturer', 'product', 'transport', 'vendorID', 'vendorIDSource', 'productID',
'versionNumber', 'serialNumber', 'locationID', 'primaryUsage', 'primaryUsagePage'):
value = getattr(self, x)
print x + ":", value
def unique_identifier(self):
# Since we can't rely on the serial number, create our own identifier.
# Can use this to find devices when they are plugged back in.
return (self.manufacturer, self.product, self.vendorID, self.productID,
self.versionNumber, self.primaryUsage, self.primaryUsagePage)
def get_property(self, name):
cfvalue = c_void_p(iokit.IOHIDDeviceGetProperty(self.deviceRef, CFSTR(name)))
return cftype_to_value(cfvalue)
def open(self, exclusive_mode=False):
if exclusive_mode: options = kIOHIDOptionsTypeSeizeDevice
else: options = kIOHIDOptionsTypeNone
return bool(iokit.IOHIDDeviceOpen(self.deviceRef, options))
def close(self):
return bool(iokit.IOHIDDeviceClose(self.deviceRef, kIOHIDOptionsTypeNone))
def schedule_with_run_loop(self):
iokit.IOHIDDeviceScheduleWithRunLoop(
self.deviceRef,
c_void_p(cf.CFRunLoopGetCurrent()),
kCFRunLoopDefaultMode)
def unschedule_from_run_loop(self):
iokit.IOHIDDeviceUnscheduleFromRunLoop(
self.deviceRef,
c_void_p(cf.CFRunLoopGetCurrent()),
kCFRunLoopDefaultMode)
def get_elements(self):
cfarray = c_void_p(iokit.IOHIDDeviceCopyMatchingElements(self.deviceRef, None, 0))
self.elements = cfarray_to_list(cfarray)
cf.CFRelease(cfarray)
# Page and usage IDs are from the HID usage tables located at
# http://www.usb.org/developers/devclass_docs/Hut1_12.pdf
def conforms_to(self, page, usage):
return bool(iokit.IOHIDDeviceConformsTo(self.deviceRef, page, usage))
def is_pointer(self): return self.conforms_to(0x01, 0x01)
def is_mouse(self): return self.conforms_to(0x01, 0x02)
def is_joystick(self): return self.conforms_to(0x01, 0x04)
def is_gamepad(self): return self.conforms_to(0x01, 0x05)
def is_keyboard(self): return self.conforms_to(0x01, 0x06)
def is_keypad(self): return self.conforms_to(0x01, 0x07)
def is_multi_axis(self): return self.conforms_to(0x01, 0x08)
def py_removal_callback(self, context, result, sender):
self = _device_lookup[sender] # avoid wonky python context issues
# Dispatch removal message to all observers.
for x in self.removal_observers:
if hasattr(x, 'device_removed'):
x.device_removed(self)
# Remove self from device lookup table.
del _device_lookup[sender]
# Remove device elements from lookup table.
for key, value in _element_lookup.items():
if value in self.elements:
del _element_lookup[key]
def register_removal_callback(self):
self.removal_callback = HIDDeviceCallback(self.py_removal_callback)
iokit.IOHIDDeviceRegisterRemovalCallback(
self.deviceRef,
self.removal_callback,
None)
def add_removal_observer(self, observer):
self.removal_observers.add(observer)
def py_value_callback(self, context, result, sender, value):
v = HIDValue(c_void_p(value))
# Dispatch value changed message to all observers.
for x in self.value_observers:
if hasattr(x, 'device_value_changed'):
x.device_value_changed(self, v)
def register_input_value_callback(self):
self.value_callback = HIDDeviceValueCallback(self.py_value_callback)
iokit.IOHIDDeviceRegisterInputValueCallback(
self.deviceRef,
self.value_callback,
None)
def add_value_observer(self, observer):
self.value_observers.add(observer)
class HIDDeviceElement:
@classmethod
def get_element(cls, elementRef):
# elementRef is a c_void_p pointing to an IOHIDDeviceElementRef
if elementRef.value in _element_lookup:
return _element_lookup[elementRef.value]
else:
element = HIDDeviceElement(elementRef)
return element
def __init__(self, elementRef):
# Check that we've been passed a valid IOHIDElement.
assert(elementRef)
assert(cf.CFGetTypeID(elementRef) == iokit.IOHIDElementGetTypeID())
_element_lookup[elementRef.value] = self
self.elementRef = elementRef
# Set element properties as attributes.
self.cookie = iokit.IOHIDElementGetCookie(elementRef)
self.type = iokit.IOHIDElementGetType(elementRef)
if self.type == kIOHIDElementTypeCollection:
self.collectionType = iokit.IOHIDElementGetCollectionType(elementRef)
else:
self.collectionType = None
self.usagePage = iokit.IOHIDElementGetUsagePage(elementRef)
self.usage = iokit.IOHIDElementGetUsage(elementRef)
self.isVirtual = bool(iokit.IOHIDElementIsVirtual(elementRef))
self.isRelative = bool(iokit.IOHIDElementIsRelative(elementRef))
self.isWrapping = bool(iokit.IOHIDElementIsWrapping(elementRef))
self.isArray = bool(iokit.IOHIDElementIsArray(elementRef))
self.isNonLinear = bool(iokit.IOHIDElementIsNonLinear(elementRef))
self.hasPreferredState = bool(iokit.IOHIDElementHasPreferredState(elementRef))
self.hasNullState = bool(iokit.IOHIDElementHasNullState(elementRef))
self.name = cftype_to_value(iokit.IOHIDElementGetName(elementRef))
self.reportID = iokit.IOHIDElementGetReportID(elementRef)
self.reportSize = iokit.IOHIDElementGetReportSize(elementRef)
self.reportCount = iokit.IOHIDElementGetReportCount(elementRef)
self.unit = iokit.IOHIDElementGetUnit(elementRef)
self.unitExponent = iokit.IOHIDElementGetUnitExponent(elementRef)
self.logicalMin = iokit.IOHIDElementGetLogicalMin(elementRef)
self.logicalMax = iokit.IOHIDElementGetLogicalMax(elementRef)
self.physicalMin = iokit.IOHIDElementGetPhysicalMin(elementRef)
self.physicalMax = iokit.IOHIDElementGetPhysicalMax(elementRef)
class HIDManager:
def __init__(self):
# Create the HID Manager.
self.managerRef = c_void_p(iokit.IOHIDManagerCreate(None, kIOHIDOptionsTypeNone))
assert(self.managerRef)
assert cf.CFGetTypeID(self.managerRef) == iokit.IOHIDManagerGetTypeID()
self.schedule_with_run_loop()
self.matching_observers = set()
self.register_matching_callback()
self.get_devices()
def get_devices(self):
# Tell manager that we are willing to match *any* device.
# (Alternatively, we could restrict by device usage, or usage page.)
iokit.IOHIDManagerSetDeviceMatching(self.managerRef, None)
# Copy the device set and convert it to python.
cfset = c_void_p(iokit.IOHIDManagerCopyDevices(self.managerRef))
self.devices = cfset_to_set(cfset)
cf.CFRelease(cfset)
def open(self):
iokit.IOHIDManagerOpen(self.managerRef, kIOHIDOptionsTypeNone)
def close(self):
iokit.IOHIDManagerClose(self.managerRef, kIOHIDOptionsTypeNone)
def schedule_with_run_loop(self):
iokit.IOHIDManagerScheduleWithRunLoop(
self.managerRef,
c_void_p(cf.CFRunLoopGetCurrent()),
kCFRunLoopDefaultMode)
def unschedule_from_run_loop(self):
iokit.IOHIDManagerUnscheduleFromRunLoop(
self.managerRef,
c_void_p(cf.CFRunLoopGetCurrent()),
kCFRunLoopDefaultMode)
def py_matching_callback(self, context, result, sender, device):
d = HIDDevice.get_device(c_void_p(device))
if d not in self.devices:
self.devices.add(d)
for x in self.matching_observers:
if hasattr(x, 'device_discovered'):
x.device_discovered(d)
def register_matching_callback(self):
self.matching_callback = HIDManagerCallback(self.py_matching_callback)
iokit.IOHIDManagerRegisterDeviceMatchingCallback(
self.managerRef,
self.matching_callback,
None)
######################################################################
# Pyglet interface to HID
from base import Device, Control, AbsoluteAxis, RelativeAxis, Button
from base import Joystick, AppleRemote
from base import DeviceExclusiveException
_axis_names = {
(0x01, 0x30): 'x',
(0x01, 0x31): 'y',
(0x01, 0x32): 'z',
(0x01, 0x33): 'rx',
(0x01, 0x34): 'ry',
(0x01, 0x35): 'rz',
(0x01, 0x38): 'wheel',
(0x01, 0x39): 'hat',
}
_button_names = {
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemSleep): 'sleep',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemWakeUp): 'wakeup',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemAppMenu): 'menu',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemMenu): 'select',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemMenuRight): 'right',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemMenuLeft): 'left',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemMenuUp): 'up',
(kHIDPage_GenericDesktop, kHIDUsage_GD_SystemMenuDown): 'down',
(kHIDPage_Consumer, kHIDUsage_Csmr_FastForward): 'right_hold',
(kHIDPage_Consumer, kHIDUsage_Csmr_Rewind): 'left_hold',
(kHIDPage_Consumer, kHIDUsage_Csmr_Menu): 'menu_hold',
(0xff01, 0x23): 'select_hold',
(kHIDPage_Consumer, kHIDUsage_Csmr_Eject): 'eject',
(kHIDPage_Consumer, kHIDUsage_Csmr_Mute): 'mute',
(kHIDPage_Consumer, kHIDUsage_Csmr_VolumeIncrement): 'volume_up',
(kHIDPage_Consumer, kHIDUsage_Csmr_VolumeDecrement): 'volume_down'
}
class PygletDevice(Device):
def __init__(self, display, device, manager):
super(PygletDevice, self).__init__(display, device.product)
self.device = device
self.device_identifier = self.device.unique_identifier()
self.device.add_value_observer(self)
self.device.add_removal_observer(self)
manager.matching_observers.add(self)
self._create_controls()
self._is_open = False
self._is_exclusive = False
def open(self, window=None, exclusive=False):
super(PygletDevice, self).open(window, exclusive)
self.device.open(exclusive)
self.device.schedule_with_run_loop()
self._is_open = True
self._is_exclusive = exclusive
def close(self):
super(PygletDevice, self).close()
self.device.close()
self._is_open = False
def get_controls(self):
return self._controls.values()
def device_removed(self, hid_device):
# Called by device when it is unplugged.
# Set device to None, but Keep self._controls around
# in case device is plugged back in.
self.device = None
def device_discovered(self, hid_device):
# Called by HID manager when new device is found.
# If our device was disconnected, reconnect when it is plugged back in.
if not self.device and self.device_identifier == hid_device.unique_identifier():
self.device = hid_device
self.device.add_value_observer(self)
self.device.add_removal_observer(self)
# Don't need to recreate controls since this is same device.
# They are indexed by cookie, which is constant.
if self._is_open:
self.device.open(self._is_exclusive)
self.device.schedule_with_run_loop()
def device_value_changed(self, hid_device, hid_value):
# Called by device when input value changes.
control = self._controls[hid_value.element.cookie]
control._set_value(hid_value.intvalue)
def _create_controls(self):
self._controls = {}
for element in self.device.elements:
raw_name = element.name or '0x%x:%x' % (element.usagePage, element.usage)
if element.type in (kIOHIDElementTypeInput_Misc, kIOHIDElementTypeInput_Axis):
name = _axis_names.get((element.usagePage, element.usage))
if element.isRelative:
control = RelativeAxis(name, raw_name)
else:
control = AbsoluteAxis(name, element.logicalMin, element.logicalMax, raw_name)
elif element.type == kIOHIDElementTypeInput_Button:
name = _button_names.get((element.usagePage, element.usage))
control = Button(name, raw_name)
else:
continue
control._cookie = element.cookie
self._controls[control._cookie] = control
######################################################################
_manager = HIDManager()
def get_devices(display=None):
return [ PygletDevice(display, device, _manager) for device in _manager.devices ]
def get_joysticks(display=None):
return [ Joystick(PygletDevice(display, device, _manager)) for device in _manager.devices
if device.is_joystick() or device.is_gamepad() or device.is_multi_axis() ]
def get_apple_remote(display=None):
for device in _manager.devices:
if device.product == 'Apple IR':
return AppleRemote(PygletDevice(display, device, _manager))
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SecurityPartnerProvidersOperations(object):
"""SecurityPartnerProvidersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified Security Partner Provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param security_partner_provider_name: The name of the Security Partner Provider.
:type security_partner_provider_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
security_partner_provider_name=security_partner_provider_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityPartnerProvider"
"""Gets the specified Security Partner Provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param security_partner_provider_name: The name of the Security Partner Provider.
:type security_partner_provider_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityPartnerProvider, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProvider
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProvider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityPartnerProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
parameters, # type: "_models.SecurityPartnerProvider"
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityPartnerProvider"
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProvider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'SecurityPartnerProvider')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityPartnerProvider', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SecurityPartnerProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
parameters, # type: "_models.SecurityPartnerProvider"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.SecurityPartnerProvider"]
"""Creates or updates the specified Security Partner Provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param security_partner_provider_name: The name of the Security Partner Provider.
:type security_partner_provider_name: str
:param parameters: Parameters supplied to the create or update Security Partner Provider
operation.
:type parameters: ~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProvider
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either SecurityPartnerProvider or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProvider]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProvider"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
security_partner_provider_name=security_partner_provider_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('SecurityPartnerProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
security_partner_provider_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.SecurityPartnerProvider"
"""Updates tags of a Security Partner Provider resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param security_partner_provider_name: The name of the Security Partner Provider.
:type security_partner_provider_name: str
:param parameters: Parameters supplied to update Security Partner Provider tags.
:type parameters: ~azure.mgmt.network.v2020_05_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityPartnerProvider, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProvider
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProvider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'securityPartnerProviderName': self._serialize.url("security_partner_provider_name", security_partner_provider_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityPartnerProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders/{securityPartnerProviderName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SecurityPartnerProviderListResult"]
"""Lists all Security Partner Providers in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityPartnerProviderListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityPartnerProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/securityPartnerProviders'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.SecurityPartnerProviderListResult"]
"""Gets all the Security Partner Providers in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityPartnerProviderListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_05_01.models.SecurityPartnerProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityPartnerProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityPartnerProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/securityPartnerProviders'} # type: ignore
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
# We will use all operators inside NDArray Module
# If you want to run benchmark for all operators in different namespace,
# for example mxnet.numpy.op, update here. All operators for benchmarks
# will be picked up from this module
MX_OP_MODULE = sys.modules["mxnet.ndarray.op"]
"""Default Input Tensor shapes to use for benchmarking"""
# For operators like concat, ElementWiseSum, squeeze, stack
# argument data is passed as variable arg (*args)
DEFAULT_ARGS = [(1024, 1024)]
# For Unary operators like abs, arccos, arcsin etc..
DEFAULT_DATA = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_DTYPE = ['float32', 'int32', 'float32'] # required parameter for amp_cast, cast
DEFAULT_DTYPE_INT = ['int32', 'int64', 'int32'] # randint works for int* types only
DEFAULT_DTYPE_FLOAT = ['float16', 'float32', 'float64'] # random_exp works for float* types only
DEFAULT_DATA_LARGE_TENSOR = [(2**16, 2**16)]
# For Binary miscellaneous operators like choose_element0_index
# argument data must be indexed via an NDArray.
# NOTE: Data used is DEFAULT_DATA
DEFAULT_INDEX = [(1, 1024), (1, 1), (1, 100)]
DEFAULT_INDEX_LARGE_TENSOR = [(1, 2**16)]
# For Binary broadcast operators like - broadcast_add/sub/mod/logical_and etc..
DEFAULT_LHS = [(1024, 1024), (10000, 10), (10000, 1)]
DEFAULT_RHS = [(1024, 1024), (10000, 10), (10000, 1)]
DEFAULT_LHS_LARGE_TENSOR = [(2**16, 2**16), (2**28, 2**4), (2**32, 1)]
DEFAULT_RHS_LARGE_TENSOR = [(2**16, 2**16), (2**28, 2**4), (2**32, 1)]
# For operators like - random_uniform, random_normal etc..
DEFAULT_SHAPE = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_SAMPLE = [(2,)]
DEFAULT_LOW = [0]
DEFAULT_HIGH = [5]
DEFAULT_K = [1]
DEFAULT_P = [1]
DEFAULT_SHAPE_LARGE_TENSOR = [(2**16, 2**16)]#, (2**32, 1), (2**25, 2**7)]
DEFAULT_SAMPLE_LARGE_TENSOR = [(2**32,)]
DEFAULT_DATA_RPD_LARGE_TENSOR = [(2**32 + 1, 5)]
DEFAULT_ALPHA_RPD_LARGE_TENSOR = [(2**32,)]
DEFAULT_SAMPLE_RPE_LARGE_TENSOR = [(1, 2**32)]
DEFAULT_LAM_RPE_LARGE_TENSOR = [(1,)]
DEFAULT_SAMPLE_RPG_LARGE_TENSOR = [(1, 2**32 + 1)]
DEFAULT_ALPHA_RPG_LARGE_TENSOR = [(1,)]
# For operators like - sample_uniform, sample_normal etc..
# NOTE: There are many overlapping operators in random_* and sample_*,
# Ex: random_uniform, sample_uniform. Parameter names are same, but, for
# random_* operators they are float/int and for sample_* operators they are NDArray.
# Hence, below we append ND to mark the difference.
DEFAULT_LOW_ND = [[0.0, 2.5]]
DEFAULT_HIGH_ND = [[1.0, 3.7]]
DEFAULT_MU_ND = [[2.0, 2.5]]
DEFAULT_SIGMA = [[1.0, 3.7]]
DEFAULT_ALPHA_ND = [[0.0, 2.5]]
DEFAULT_BETA_ND = [[1.0, 0.7]]
DEFAULT_LAM = [[1.0, 8.5]]
DEFAULT_K_ND = [[20, 49]]
DEFAULT_P_ND = [[0.4, 0.77]]
DEFAULT_GRID = [(32, 2, 256, 256)]
DEFAULT_DATA_BILINEAR = [(32, 2, 256, 256)]
DEFAULT_TRANSFORM_TYPE = ['warp', 'affine']
DEFAULT_DATA_GRIDGEN = [(32, 2, 256, 256), (256, 6)]
DEFAULT_TARGET_SHAPE = [(256, 6)]
DEFAULT_DATA_SM = [(32, 32), (64, 64)]
DEFAULT_LOW_ND_LARGE_TENSOR = [[0.0] * 2**16 + [2.5] * 2**16]
DEFAULT_HIGH_ND_LARGE_TENSOR = [[1.0] * 2**16 + [3.7] * 2**16]
DEFAULT_MU_ND_LARGE_TENSOR = [[2.0] * 2**16 + [2.5] * 2**16]
DEFAULT_SIGMA_LARGE_TENSOR = [[1.0] * 2**16 + [3.7] * 2**16]
DEFAULT_ALPHA_ND_LARGE_TENSOR = [[0.0] * 2**16 + [2.5] * 2**16]
DEFAULT_BETA_ND_LARGE_TENSOR = [[1.0] * 2**16 + [0.7] * 2**16]
DEFAULT_LAM_ND_LARGE_TENSOR = [[1.0] * 2**16 + [8.5] * 2**16]
DEFAULT_K_ND_LARGE_TENSOR = [[20] * 2**16 + [49] * 2**16]
DEFAULT_P_ND_LARGE_TENSOR = [[0.4] * 2**16 + [0.77] * 2**16]
DEFAULT_DATA_BILINEAR_LARGE_TENSOR = [(2**32, 1, 1, 1)]
DEFAULT_GRID_LARGE_TENSOR = [(2**32, 2, 1, 1)]
DEFAULT_DATA_GRIDGEN_LARGE_TENSOR = [(2**31, 2, 1, 1), (1, 6)]
DEFAULT_TARGET_SHAPE_LARGE_TENSOR = [(1, 6)]
DEFAULT_DATA_SM_LARGE_TENSOR = [(2**32,)]
DEFAULT_SHAPE_SE_LARGE_TENSOR = [(1,)]
DEFAULT_LAM_SE_LARGE_TENSOR = [(2**32 + 1,)]
DEFAULT_SHAPE_SU_LARGE_TENSOR = [(2**32,)]
# For sorting and searching operators
# NOTE: Data used is DEFAULT_DATA
DEFAULT_AXIS = [0]
# For NN basic operators
# General
DEFAULT_DATA_NN_BASIC = [(32, 3, 256, 256), (32, 3, 10000, 10)]
DEFAULT_NUM_HIDDEN = [64]
DEFAULT_BIAS = [(64,)]
DEFAULT_FLATTEN = [True, False]
DEFAULT_GAMMA = [(3,)]
DEFAULT_BETA = [(3,)]
DEFAULT_MOVING_MEAN = [(3,)]
DEFAULT_MOVING_VAR = [(3,)]
DEFAULT_LABEL_REG = [(32, 3, 256, 256), (32, 3, 10000, 10)]
DEFAULT_GRAD_SCALE = [.5]
DEFAULT_NORMALIZATION = ["batch"]
DEFAULT_MARGIN = [.5]
DEFAULT_REG_COEFF = [.5]
DEFAULT_INPUT_DIM = [3, 16]
DEFAULT_OUTPUT_DIM = [4, 9]
DEFAULT_SPARSE_GRAD = [False]
DEFAULT_KERNEL_SIZE = [3]
DEFAULT_MAX_DISPLACEMENT = [2]
DEFAULT_STRIDE_1 = [2]
DEFAULT_STRIDE_2 = [2]
DEFAULT_ALPHA = [.001]
DEFAULT_NSIZE = [3]
DEFAULT_PARAMETERS = [(7,), (104,)]
DEFAULT_STATE = [(1, 4, 1), (2, 10000, 4)]
DEFAULT_STATE_SIZE = [1, 4]
DEFAULT_NUM_LAYERS = [1, 2]
DEFAULT_NUM_GROUPS = [1, 10]
DEFAULT_TRANSFORM = ["affine"]
DEFAULT_SAMPLER = ["bilinear"]
DEFAULT_DILATE = [(1,), (1, 1)]
DEFAULT_PAD = [(1,), (1, 1)]
DEFAULT_OUTPUT_SIZE = [(64, 16, 1), (32, 8, 1)]
DEFAULT_KERNEL = [(1, 1, 1), (1, 1, 1)]
DEFAULT_STRIDE = [(2, 2, 2), (1, 1, 1)]
DEFAULT_DATA_NN_BASIC_LARGE_TENSOR = [(2**32 + 1, 1)]
DEFAULT_NUM_HIDDEN_LARGE_TENSOR = [(1,)]
DEFAULT_BIAS_LARGE_TENSOR = [(1,)]
DEFAULT_FLATTEN_LARGE_TENSOR = [False]
DEFAULT_GAMMA_LARGE_TENSOR = [(1,)]
DEFAULT_BETA_LARGE_TENSOR = [(1,)]
DEFAULT_MOVING_MEAN_LARGE_TENSOR = [(2**32 + 1,)]
DEFAULT_MOVING_VAR_LARGE_TENSOR = [(2**32 + 1,)]
DEFAULT_INPUT_DIM_LARGE_TENSOR = [2**32]
DEFAULT_OUTPUT_DIM_LARGE_TENSOR = [1]
DEFAULT_KERNEL_SIZE_LARGE_TENSOR = [1]
DEFAULT_MAX_DISPLACEMENT_LARGE_TENSOR = [1]
DEFAULT_STRIDE_1_LARGE_TENSOR = [1]
DEFAULT_STRIDE_2_LARGE_TENSOR = [1]
DEFAULT_DILATE_LARGE_TENSOR = [[]]
DEFAULT_PAD_LARGE_TENSOR = [[]]
DEFAULT_OUTPUT_SIZE_LARGE_TENSOR = [(2, 2, 1)]
DEFAULT_KERNEL_LARGE_TENSOR = [(1, 1, 1)]
DEFAULT_STRIDE_LARGE_TENSOR = [[]]
DEFAULT_PARAMETERS_LARGE_TENSOR = [(7,)]
DEFAULT_STATE_LARGE_TENSOR = [(1, 4, 1)]
DEFAULT_STATE_SIZE_LARGE_TENSOR = [1]
DEFAULT_NUM_LAYERS_LARGE_TENSOR = [1]
# BatchNorm
DEFAULT_AXIS_BN = [1]
# LayerNorm
DEFAULT_GAMMA_LN = [(32,), (32,)]
DEFAULT_BETA_LN = [(32,), (32,)]
# L2Normalization
DEFAULT_MODE_L2 = ['channel', 'instance', 'spatial']
# SVMOutput
DEFAULT_LABEL_SVM = [(32, 3, 256), (32, 3, 10000)]
DEFAULT_DATA_SVM_LARGE_TENSOR = [(2**29, 2, 2, 2)]
DEFAULT_LABEL_SVM_LARGE_TENSOR = [(2**29, 2, 2)]
# SoftmaxOutput
DEFAULT_LABEL_SM = [(32, 3, 256), (32, 3, 10000)]
DEFAULT_DATA_SO_LARGE_TENSOR = [(2**29, 2, 2, 2)]
DEFAULT_LABEL_SO_LARGE_TENSOR = [(2**29, 2, 2)]
# FullyConnected
DEFAULT_WEIGHT_FC = [(64, 3 * 256 * 256), (64, 10)]
DEFAULT_DATA_FC_LARGE_TENSOR = [(2**32, 1)]
DEFAULT_WEIGHT_FC_LARGE_TENSOR = [(1, 1)]
DEFAULT_NUM_HIDDEN_FC_LARGE_TENSOR = [1]
# Embedding
DEFAULT_WEIGHT_EMBEDDING = [(3, 4), (16, 9)]
DEFAULT_WEIGHT_EMBEDDING_LARGE_TENSOR = [(2**32, 1)]
# GroupNorm
DEFAULT_DATA_GN = [(32, 3, 256, 256), (32, 10, 10000, 10)]
DEFAULT_BETA_GAMMA_GN = [(1,), (10,)]
DEFAULT_DATA_GN_LARGE_TENSOR = [(2**27, 4, 4, 2)]
DEFAULT_BETA_GAMMA_GN_LARGE_TENSOR = [(1,)]
# Dropout
DEFAULT_DATA_DROPOUT = [(32, 3, 256, 256), (10000, 10)]
DEFAULT_MODE_DROPOUT = ["always"]
DEFAULT_DATA_DROPOUT_LARGE_TENSOR = [(2**32 + 1,)]
DEFAULT_P_DROPOUT_LARGE_TENSOR = [.5]
DEFAULT_AXES_DROPOUT_LARGE_TENSOR = [[]]
# SpatialTransformer
DEFAULT_DATA_ST = [(32, 3, 256, 6), (256, 3, 10000, 6)]
DEFAULT_LOC_TAR_ST = [(32, 6), (256, 6)]
DEFAULT_DATA_ST_LARGE_TENSOR = [(2, 2**29, 1, 6)]
DEFAULT_LOC_TAR_ST_LARGE_TENSOR = [(2, 6)]
# im2col
DEFAULT_KERNEL_I2C = [(3,), (3, 3)]
DEFAULT_STRIDE_I2C = [(1,), (1, 1)]
DEFAULT_DATA_I2C_LARGE_TENSOR = [(2**29, 2, 2, 6)]
DEFAULT_KERNEL_I2C_LARGE_TENSOR = [(1,)]
DEFAULT_STRIDE_I2C_LARGE_TENSOR = [[]]
# col2im
DEFAULT_DATA_C2I = [(32, 64, 256), (32, 64, 256)]
DEFAULT_DATA_C2I_LARGE_TENSOR = [(1, 2**30, 4)]
# LRN
DEFAULT_BETA_LRN = [.2]
DEFAULT_DATA_LRN_LARGE_TENSOR = [(2**27, 4, 4, 2)]
# Correlation
DEFAULT_DATA1_LARGE_TENSOR = [(2**23, 8, 8, 8)]
DEFAULT_DATA2_LARGE_TENSOR = [(2**23, 8, 8, 8)]
# For regression operators
DEFAULT_DATA_REG_LARGE_TENSOR = [(2**29, 2, 2, 2)]
DEFAULT_LABEL_REG_LARGE_TENSOR = [(2**29, 2, 2, 2)]
# For normalization operators
DEFAULT_DATA_NORM_LARGE_TENSOR = [(2**29, 2, 2, 2)]
DEFAULT_GAMMA_NORM_LARGE_TENSOR = [(2,)]
DEFAULT_BETA_NORM_LARGE_TENSOR = [(2,)]
DEFAULT_AXIS_LARGE_TENSOR = [-1]
# For optimizer operators
DEFAULT_WEIGHT = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_GRAD = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_MOM = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_MEAN = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_VAR = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_N = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_D = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_V = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_Z = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_G = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_R1 = [(1, 1024), (1, 1), (1, 100)]
DEFAULT_R2 = [(1, 1024), (1, 1), (1, 100)]
DEFAULT_DELTA = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_LRS = [(0.1,0.1)]
DEFAULT_LR = [0.1, 0.5, 0.9]
DEFAULT_WD = [0.1, 0.5, 0.9]
DEFAULT_RHO = [0.1, 0.5, 0.9]
DEFAULT_MOMENTUM = [0.1, 0.5, 0.9]
DEFAULT_EPSILON = [1e-05]
DEFAULT_BETA_1 = [0.1, 0.5, 0.9]
DEFAULT_BETA_2 = [0.1, 0.5, 0.9]
DEFAULT_T = [1, 5]
DEFAULT_RESCALE_GRAD = [0.4, 0.77]
DEFAULT_CLIP_GRADIENT = [-1.0, 0.8]
DEFAULT_CLIP_WEIGHTS = [-1.0, 0.8]
DEFAULT_LAZY_UPDATE = [0, 1]
DEFAULT_WEIGHT_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_GRAD_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_MOM_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_MEAN_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_VAR_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_N_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_D_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_V_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_Z_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_G_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
DEFAULT_R1_LARGE_TENSOR = [(1,)]
DEFAULT_R2_LARGE_TENSOR = [(1,)]
DEFAULT_DELTA_LARGE_TENSOR = [(2**16, 2**16), (2**32, 1), (2**25, 2**7)]
# For array manipulation operators
# NOTE: Data needs to be a 4D tensor for operators like space_to_depth, depth_to_space etc
# Hence below we append 4d to mark the difference.
# For depth_to_space, dimension 3 needs to be a multiple of 'block' and 1 should be a multiple of `block^2`
DEFAULT_DATA_4d = [(1, 4, 2, 4), (10, 25, 10, 100)]
DEFAULT_BLOCK_SIZE = [2, 5]
DEFAULT_NUM_OUTPUTS = [1]
DEFAULT_PAD_WIDTH_4d = [(0, 0, 0, 0, 1, 1, 1, 1)]
DEFAULT_MODE_4d = ["constant"]
DEFAULT_REPEATS = [2]
# broadcast_axis needs input array with atleast 1 dim of size 1
# since axis is 0 (default) size(dim0)=1
DEFAULT_DATA_DIM1 = [(1, 1024), (1, 1), (1, 100)]
DEFAULT_SIZE = [2]
DEFAULT_DATA_4d_LARGE_TENSOR = [(1, 4, 2, 2**29), (1,2**4,2**4,2**24)]
DEFAULT_BLOCK_SIZE_LARGE_TENSOR = [2, 4]
# For miscellaneous operators
DEFAULT_DATA_SQUEEZE = [(1, 1024, 1024), (32, 1, 256, 256)]
DEFAULT_AXIS_SQUEEZE = [0, 1]
DEFAULT_A_MIN = [0.1]
DEFAULT_A_MAX = [0.9]
DEFAULT_LRS = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_WSS = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_GSS = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_WDS = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_ETA = [.5]
DEFAULT_STYPE = ['default', 'csr', 'row_sparse']
DEFAULT_A = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_LHS_FEI = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_MHS = [(1024,), (10000,), (10000,)]
DEFAULT_RHS_FEI = [(1024,), (10000,), (10000,)]
DEFAULT_DATA_SQUEEZE_LARGE_TENSOR = [(2**32, 1)]
DEFAULT_AXIS_SQUEEZE_LARGE_TENSOR = [1]
DEFAULT_WSS_LARGE_TENSOR = [(2**32, 1)]
DEFAULT_GSS_LARGE_TENSOR = [(2**32, 1)]
DEFAULT_WDS_LARGE_TENSOR = [(2**32, 1)]
DEFAULT_LHS_FEI_LARGE_TENSOR = [(2, 2**32 + 1)]
DEFAULT_RHS_FEI_LARGE_TENSOR = [(2,)]
DEFAULT_MHS_LARGE_TENSOR = [(2,)]
# For swapaxis operator
DEFAULT_DIM_1 = [0]
DEFAULT_DIM_2 = [1]
# For indexing routines
DEFAULT_INDEX = [(1,1024), (1,1), (1,100)]
DEFAULT_INDICES = [(1, 1)]
DEFAULT_BEGIN = [0] # slice_axis expects int, slice can have tuple/int
DEFAULT_END =[1] # same as above
DEFAULT_SHAPE_LIKE = [(100, 100), (10, 1), (100, 10)]
DEFAULT_X = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_Y = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_COND = [(1024,), (10000,), (10000,)]
DEFAULT_DEPTH = [0]
# For ravel_multi_index op, ndim(shape) = 2; hence data NDArray's first dim = 2
# First dimension of input of ravel operator should match shape parameter dimension
# DEFAULT_SHAPE is reused for ravel_multi_index op
RAVEL_DATA = [(2, 1024)]
RAVEL_DATA_LARGE_TENSOR = [(2, 2**32)]
DEFAULT_X_LARGE_TENSOR = [(2**32, 1)]
# For loss operators
DEFAULT_DATA_3d = [(1024, 100, 100)]
DEFAULT_LABEL = [(100,100)]
DEFAULT_DATA_SMCE = [(1024, 1024)]
DEFAULT_LABEL_SMCE = [(1024,)]
DEFAULT_LABEL_LARGE_TENSOR = [(1, 1)]
DEFAULT_DATA_CTCLOSS = [(2**32, 1, 1)]
DEFAULT_DATA_SMCE_LARGE_TENSOR = [(2**32 + 1, 1)]
DEFAULT_LABEL_SMCE_LARGE_TENSOR = [(2**32 + 1,)]
# For NN operators
DEFAULT_ACT_TYPE_LR = ['leaky', 'elu', 'selu', 'gelu']
DEFAULT_ACT_TYPE_ACTIVATION = ['relu', 'sigmoid', 'softrelu', 'softsign', 'tanh']
DEFAULT_LABEL_SOFTMAX = [(1024, 1024), (10000, 1), (10000, 100)]
DEFAULT_LABEL_SOFTMAX_LARGE_TENSOR = [(2**32, 1)]
# For linalg operators
DEFAULT_A = [(1024, 1024)]
DEFAULT_B = [(1024, 1024)]
DEFAULT_C = [(1024, 1024)]
DEFAULT_A_MT = [(1024, 1035)]
DEFAULT_AXES = [[0, 1]]
DEFAULT_A_LARGE_TENSOR = [(2**16, 2**16)]
DEFAULT_B_LARGE_TENSOR = [(2**16, 2**16)]
DEFAULT_C_LARGE_TENSOR = [(2**16, 2**16)]
DEFAULT_A_MT_LARGE_TENSOR = [(2**32 + 1, 1)]
# Default Inputs. MXNet Op Param Name to Default Input mapping
DEFAULTS_INPUTS = {"data": DEFAULT_DATA,
"dtype": DEFAULT_DTYPE,
"dtype_int": DEFAULT_DTYPE_INT,
"dtype_float": DEFAULT_DTYPE_FLOAT,
"sample": DEFAULT_SAMPLE,
"lhs": DEFAULT_LHS,
"rhs": DEFAULT_RHS,
"shape": DEFAULT_SHAPE,
"low": DEFAULT_LOW,
"high": DEFAULT_HIGH,
"low_nd": DEFAULT_LOW_ND,
"high_nd": DEFAULT_HIGH_ND,
"mu_nd": DEFAULT_MU_ND,
"sigma": DEFAULT_SIGMA,
"alpha_nd": DEFAULT_ALPHA_ND,
"beta_nd": DEFAULT_BETA_ND,
"lam_nd": DEFAULT_LAM,
"k": DEFAULT_K,
"p": DEFAULT_P,
"k_nd": DEFAULT_K_ND,
"p_nd": DEFAULT_P_ND,
"axis": DEFAULT_AXIS,
"weight" : DEFAULT_WEIGHT,
"weight32" : DEFAULT_WEIGHT,
"grad" : DEFAULT_GRAD,
"mean" : DEFAULT_MEAN,
"var" : DEFAULT_VAR,
"mom" : DEFAULT_MOM,
"r1" : DEFAULT_R1,
"r2" : DEFAULT_R2,
"n" : DEFAULT_N,
"d" : DEFAULT_D,
"v" : DEFAULT_V,
"z" : DEFAULT_Z,
"g" : DEFAULT_G,
"delta" : DEFAULT_DELTA,
"lr" : DEFAULT_LR,
"lrs" : DEFAULT_LRS,
"wd" : DEFAULT_WD,
"rho" : DEFAULT_RHO,
"momentum" : DEFAULT_MOMENTUM,
"epsilon" : DEFAULT_EPSILON,
"beta1" : DEFAULT_BETA_1,
"beta2" : DEFAULT_BETA_2,
"t" : DEFAULT_T,
"rescale_grad" : DEFAULT_RESCALE_GRAD,
"clip_grad" : DEFAULT_CLIP_GRADIENT,
"lazy_update" : DEFAULT_LAZY_UPDATE,
"data_4d": DEFAULT_DATA_4d,
"dim1": DEFAULT_DIM_1,
"dim2": DEFAULT_DIM_2,
"block_size": DEFAULT_BLOCK_SIZE,
"args": DEFAULT_ARGS,
"a": DEFAULT_DATA,
"index": DEFAULT_INDEX,
"indices": DEFAULT_INDICES,
"begin": DEFAULT_BEGIN,
"end": DEFAULT_END,
"shape_like": DEFAULT_SHAPE_LIKE,
"x": DEFAULT_X,
"y": DEFAULT_Y,
"condition": DEFAULT_COND,
"depth": DEFAULT_DEPTH,
"ravel_data": RAVEL_DATA,
"data_smce": DEFAULT_DATA_SMCE,
"data_3d": DEFAULT_DATA_3d,
"label_smce": DEFAULT_LABEL_SMCE,
"label": DEFAULT_LABEL,
"num_outputs": DEFAULT_NUM_OUTPUTS,
"data_dim1": DEFAULT_DATA_DIM1,
"size": DEFAULT_SIZE,
"mode_4d": DEFAULT_MODE_4d,
"pad_width_4d": DEFAULT_PAD_WIDTH_4d,
"repeats": DEFAULT_REPEATS,
"reps": DEFAULT_REPEATS,
"grid": DEFAULT_GRID,
"data_bilinearsampler": DEFAULT_DATA_BILINEAR,
"transform_type": DEFAULT_TRANSFORM_TYPE,
"data_gridgenerator": DEFAULT_DATA_GRIDGEN,
"target_shape_gridgenerator": DEFAULT_TARGET_SHAPE,
"data_sample_multinomial": DEFAULT_DATA_SM,
"A": DEFAULT_A,
"B": DEFAULT_B,
"C": DEFAULT_C,
"A_linalg_maketrian": DEFAULT_A_MT,
"axes": DEFAULT_AXES,
"act_type_leakyrelu": DEFAULT_ACT_TYPE_LR,
"label_softmax": DEFAULT_LABEL_SOFTMAX,
"act_type_activation": DEFAULT_ACT_TYPE_ACTIVATION,
"data_squeeze": DEFAULT_DATA_SQUEEZE,
"axis_squeeze": DEFAULT_AXIS_SQUEEZE,
"a_min": DEFAULT_A_MIN,
"a_max": DEFAULT_A_MAX,
"lrs": DEFAULT_LRS,
"weights_sum_sq": DEFAULT_WSS,
"grads_sum_sq": DEFAULT_GSS,
"wds": DEFAULT_WDS,
"eta": DEFAULT_ETA,
"eps": DEFAULT_EPSILON,
"stype": DEFAULT_STYPE,
"a": DEFAULT_A,
"lhs_fill_element_0index": DEFAULT_LHS_FEI,
"rhs_fill_element_0index": DEFAULT_RHS_FEI,
"mhs": DEFAULT_MHS,
"data_spatialtransformer": DEFAULT_DATA_ST,
"loc_spatialtransformer": DEFAULT_LOC_TAR_ST,
"target_shape": DEFAULT_LOC_TAR_ST,
"transform_type_spatialtransformer": DEFAULT_TRANSFORM,
"sampler_type": DEFAULT_SAMPLER,
"data_col2im": DEFAULT_DATA_C2I,
"output_size": DEFAULT_OUTPUT_SIZE,
"kernel_col2im": DEFAULT_KERNEL,
"stride_col2im": DEFAULT_STRIDE,
"parameters": DEFAULT_PARAMETERS,
"state": DEFAULT_STATE,
"state_size": DEFAULT_STATE_SIZE,
"num_layers": DEFAULT_NUM_LAYERS,
"data_groupnorm": DEFAULT_DATA_GN,
"gamma_groupnorm": DEFAULT_BETA_GAMMA_GN,
"beta_groupnorm": DEFAULT_BETA_GAMMA_GN,
"num_groups": DEFAULT_NUM_GROUPS,
"data_dropout": DEFAULT_DATA_DROPOUT,
"mode_dropout": DEFAULT_MODE_DROPOUT,
"p_dropout": DEFAULT_P,
"data_nn_basic": DEFAULT_DATA_NN_BASIC,
"num_hidden": DEFAULT_NUM_HIDDEN,
"data_fullyconnected": DEFAULT_DATA_NN_BASIC,
"weight_fullyconnected": DEFAULT_WEIGHT_FC,
"weight_embedding": DEFAULT_WEIGHT_EMBEDDING,
"bias": DEFAULT_BIAS,
"flatten": DEFAULT_FLATTEN,
"data_batchnorm": DEFAULT_DATA_NN_BASIC,
"gamma_batchnorm": DEFAULT_GAMMA,
"beta_batchnorm": DEFAULT_BETA,
"moving_mean_batchnorm": DEFAULT_MOVING_MEAN,
"moving_var_batchnorm": DEFAULT_MOVING_VAR,
"axis_batchnorm": DEFAULT_AXIS_BN,
"data_softmaxoutput": DEFAULT_DATA_NN_BASIC,
"label_softmaxoutput": DEFAULT_LABEL_SM,
"data_maeregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_maeregressionoutput": DEFAULT_LABEL_REG,
"data_logisticregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_logisticregressionoutput": DEFAULT_LABEL_REG,
"data_linearregressionoutput": DEFAULT_DATA_NN_BASIC,
"label_linearregressionoutput": DEFAULT_LABEL_REG,
"data_svmoutput": DEFAULT_DATA_NN_BASIC,
"label_svmoutput": DEFAULT_LABEL_SVM,
"grad_scale": DEFAULT_GRAD_SCALE,
"normalization": DEFAULT_NORMALIZATION,
"margin": DEFAULT_MARGIN,
"regularization_coefficient": DEFAULT_REG_COEFF,
"data_l2normalization": DEFAULT_DATA_NN_BASIC,
"mode_l2normalization": DEFAULT_MODE_L2,
"gamma_layernorm": DEFAULT_GAMMA_LN,
"beta_layernorm": DEFAULT_BETA_LN,
"data_instancenorm": DEFAULT_DATA_NN_BASIC,
"gamma_instancenorm": DEFAULT_GAMMA,
"beta_instancenorm": DEFAULT_BETA,
"input_dim": DEFAULT_INPUT_DIM,
"output_dim": DEFAULT_OUTPUT_DIM,
"sparse_grad": DEFAULT_SPARSE_GRAD,
"data1": DEFAULT_DATA_NN_BASIC,
"data2": DEFAULT_DATA_NN_BASIC,
"kernel_size": DEFAULT_KERNEL_SIZE,
"max_displacement": DEFAULT_MAX_DISPLACEMENT,
"stride1": DEFAULT_STRIDE_1,
"stride2": DEFAULT_STRIDE_2,
"data_im2col": DEFAULT_DATA_NN_BASIC,
"kernel_im2col": DEFAULT_KERNEL_I2C,
"stride_im2col": DEFAULT_STRIDE_I2C,
"dilate_im2col": DEFAULT_DILATE,
"pad_im2col": DEFAULT_PAD,
"data_lrn": DEFAULT_DATA_NN_BASIC,
"alpha_lrn": DEFAULT_ALPHA,
"beta_lrn": DEFAULT_BETA_LRN,
"nsize": DEFAULT_NSIZE,
"data_layernorm": DEFAULT_DATA_NN_BASIC,
"axis_layernorm": DEFAULT_AXIS}
# Default Inputs for Large Tensor. MXNet Op Param Name to Default Input mapping
DEFAULTS_INPUTS_LARGE_TENSOR = {"data": DEFAULT_DATA_LARGE_TENSOR,
"dtype": DEFAULT_DTYPE,
"dtype_int": DEFAULT_DTYPE_INT,
"dtype_float": DEFAULT_DTYPE_FLOAT,
"sample": DEFAULT_SAMPLE_LARGE_TENSOR,
"lhs": DEFAULT_LHS_LARGE_TENSOR,
"rhs": DEFAULT_RHS_LARGE_TENSOR,
"shape": DEFAULT_SHAPE_LARGE_TENSOR,
"low": DEFAULT_LOW,
"high": DEFAULT_HIGH,
"low_nd": DEFAULT_LOW_ND_LARGE_TENSOR,
"high_nd": DEFAULT_HIGH_ND_LARGE_TENSOR,
"mu_nd": DEFAULT_MU_ND_LARGE_TENSOR,
"sigma": DEFAULT_SIGMA_LARGE_TENSOR,
"alpha_nd": DEFAULT_ALPHA_ND_LARGE_TENSOR,
"beta_nd": DEFAULT_BETA_ND_LARGE_TENSOR,
"lam_nd": DEFAULT_LAM_ND_LARGE_TENSOR,
"lam_random_pdf_exponential": DEFAULT_LAM_RPE_LARGE_TENSOR,
"sample_random_pdf_exponential": DEFAULT_SAMPLE_RPE_LARGE_TENSOR,
"k": DEFAULT_K,
"p": DEFAULT_P,
"k_nd": DEFAULT_K_ND_LARGE_TENSOR,
"p_nd": DEFAULT_P_ND_LARGE_TENSOR,
"axis": DEFAULT_AXIS,
"weight" : DEFAULT_WEIGHT_LARGE_TENSOR,
"weight32" : DEFAULT_WEIGHT_LARGE_TENSOR,
"grad" : DEFAULT_GRAD_LARGE_TENSOR,
"mean" : DEFAULT_MEAN_LARGE_TENSOR,
"var" : DEFAULT_VAR_LARGE_TENSOR,
"mom" : DEFAULT_MOM_LARGE_TENSOR,
"r1": DEFAULT_R1_LARGE_TENSOR,
"r2": DEFAULT_R2_LARGE_TENSOR,
"n" : DEFAULT_N_LARGE_TENSOR,
"d" : DEFAULT_D_LARGE_TENSOR,
"v" : DEFAULT_V_LARGE_TENSOR,
"z" : DEFAULT_Z_LARGE_TENSOR,
"g" : DEFAULT_G_LARGE_TENSOR,
"delta" : DEFAULT_DELTA_LARGE_TENSOR,
"lr" : DEFAULT_LR,
"lrs" : DEFAULT_LRS,
"wd": DEFAULT_WD,
"rho" : DEFAULT_RHO,
"momentum" : DEFAULT_MOMENTUM,
"epsilon" : DEFAULT_EPSILON,
"beta1" : DEFAULT_BETA_1,
"beta2" : DEFAULT_BETA_2,
"t" : DEFAULT_T,
"rescale_grad" : DEFAULT_RESCALE_GRAD,
"clip_grad" : DEFAULT_CLIP_GRADIENT,
"lazy_update" : DEFAULT_LAZY_UPDATE,
"data_4d": DEFAULT_DATA_4d_LARGE_TENSOR,
"dim1": DEFAULT_DIM_1,
"dim2": DEFAULT_DIM_2,
"block_size": DEFAULT_BLOCK_SIZE_LARGE_TENSOR,
"args": DEFAULT_ARGS,
"index": DEFAULT_INDEX_LARGE_TENSOR,
"data_smce": DEFAULT_DATA_SMCE_LARGE_TENSOR,
"label_smce": DEFAULT_LABEL_SMCE_LARGE_TENSOR,
"grid": DEFAULT_GRID_LARGE_TENSOR,
"data_bilinearsampler": DEFAULT_DATA_BILINEAR_LARGE_TENSOR,
"transform_type": DEFAULT_TRANSFORM_TYPE,
"data_gridgenerator": DEFAULT_DATA_GRIDGEN_LARGE_TENSOR,
"target_shape_gridgenerator": DEFAULT_TARGET_SHAPE_LARGE_TENSOR,
"data_sample_multinomial": DEFAULT_DATA_SM_LARGE_TENSOR,
"data_random_pdf_dirichlet": DEFAULT_DATA_RPD_LARGE_TENSOR,
"alpha_random_pdf_dirichlet": DEFAULT_ALPHA_RPD_LARGE_TENSOR,
"sample_random_pdf_gamma": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"alpha_random_pdf_gamma": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"beta_random_pdf_gamma": DEFAULT_BETA_LARGE_TENSOR,
"sample_random_pdf_generalized_negative_binomial": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"mu_random_pdf_generalized_negative_binomial": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"alpha_random_pdf_generalized_negative_binomial": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"sample_random_pdf_negative_binomial": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"k_random_pdf_negative_binomial": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"p_random_pdf_negative_binomial": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"sample_random_pdf_normal": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"mu_random_pdf_normal": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"sigma_random_pdf_normal": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"sample_random_pdf_poisson": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"lam_random_pdf_poisson": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"sample_random_pdf_uniform": DEFAULT_SAMPLE_RPG_LARGE_TENSOR,
"low_random_pdf_uniform": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"high_random_pdf_uniform": DEFAULT_ALPHA_RPG_LARGE_TENSOR,
"shape_sample_exponential": DEFAULT_SHAPE_SE_LARGE_TENSOR,
"lam_sample_exponential": DEFAULT_LAM_SE_LARGE_TENSOR,
"mu_sample_normal": DEFAULT_LAM_SE_LARGE_TENSOR,
"sigma_sample_normal": DEFAULT_LAM_SE_LARGE_TENSOR,
"shape_sample_poisson": DEFAULT_LAM_SE_LARGE_TENSOR,
"lam_sample_poisson": DEFAULT_SHAPE_SE_LARGE_TENSOR,
"shape_sample_uniform": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"low_sample_uniform": DEFAULT_LAM_SE_LARGE_TENSOR,
"high_sample_uniform": DEFAULT_LAM_SE_LARGE_TENSOR,
"alpha_sample_gamma": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"beta_sample_gamma": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"mu_sample_generalized_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"shape_sample_generalized_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"alpha_sample_generalized_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"shape_sample_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"k_sample_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"p_sample_negative_binomial": DEFAULT_SHAPE_SU_LARGE_TENSOR,
"A": DEFAULT_A_LARGE_TENSOR,
"B": DEFAULT_B_LARGE_TENSOR,
"C": DEFAULT_C_LARGE_TENSOR,
"A_linalg_maketrian": DEFAULT_A_MT_LARGE_TENSOR,
"axes": DEFAULT_AXES,
"act_type_leakyrelu": DEFAULT_ACT_TYPE_LR,
"label_softmax": DEFAULT_LABEL_SOFTMAX_LARGE_TENSOR,
"act_type_activation": DEFAULT_ACT_TYPE_ACTIVATION,
"data_squeeze": DEFAULT_DATA_SQUEEZE_LARGE_TENSOR,
"axis_squeeze": DEFAULT_AXIS_SQUEEZE_LARGE_TENSOR,
"a_min": DEFAULT_A_MIN,
"a_max": DEFAULT_A_MAX,
"weights_sum_sq": DEFAULT_WSS_LARGE_TENSOR,
"grads_sum_sq": DEFAULT_GSS_LARGE_TENSOR,
"wds": DEFAULT_WDS_LARGE_TENSOR,
"eta": DEFAULT_ETA,
"eps": DEFAULT_EPSILON,
"stype": DEFAULT_STYPE,
"indices": DEFAULT_INDICES,
"begin": DEFAULT_BEGIN,
"end": DEFAULT_END,
"shape_like": DEFAULT_DATA_LARGE_TENSOR,
"depth": DEFAULT_DEPTH,
"condition": DEFAULT_X_LARGE_TENSOR,
"x": DEFAULT_X_LARGE_TENSOR,
"y": DEFAULT_X_LARGE_TENSOR,
"ravel_data": RAVEL_DATA_LARGE_TENSOR,
"a": DEFAULT_A_LARGE_TENSOR,
"lhs_fill_element_0index": DEFAULT_LHS_FEI_LARGE_TENSOR,
"rhs_fill_element_0index": DEFAULT_RHS_FEI_LARGE_TENSOR,
"mhs": DEFAULT_MHS_LARGE_TENSOR,
"lrs_multi_lars": DEFAULT_WSS_LARGE_TENSOR,
"data_softmax": DEFAULT_LABEL_SOFTMAX_LARGE_TENSOR,
"data_spatialtransformer": DEFAULT_DATA_ST_LARGE_TENSOR,
"loc_spatialtransformer": DEFAULT_LOC_TAR_ST_LARGE_TENSOR,
"target_shape": DEFAULT_LOC_TAR_ST_LARGE_TENSOR,
"transform_type_spatialtransformer": DEFAULT_TRANSFORM,
"sampler_type": DEFAULT_SAMPLER,
"data_col2im": DEFAULT_DATA_C2I_LARGE_TENSOR,
"output_size": DEFAULT_OUTPUT_SIZE_LARGE_TENSOR,
"kernel_col2im": DEFAULT_KERNEL_LARGE_TENSOR,
"stride_col2im": DEFAULT_STRIDE_LARGE_TENSOR,
"data_ctcloss": DEFAULT_DATA_CTCLOSS,
"label_ctcloss": DEFAULT_LABEL_LARGE_TENSOR,
"data_ctc_loss": DEFAULT_DATA_CTCLOSS,
"label_ctc_loss": DEFAULT_LABEL_LARGE_TENSOR,
"parameters": DEFAULT_PARAMETERS_LARGE_TENSOR,
"state": DEFAULT_STATE_LARGE_TENSOR,
"state_size": DEFAULT_STATE_SIZE_LARGE_TENSOR,
"num_layers": DEFAULT_NUM_LAYERS_LARGE_TENSOR,
"data_groupnorm": DEFAULT_DATA_GN_LARGE_TENSOR,
"gamma_groupnorm": DEFAULT_BETA_GAMMA_GN_LARGE_TENSOR,
"beta_groupnorm": DEFAULT_BETA_GAMMA_GN_LARGE_TENSOR,
"data_dropout": DEFAULT_DATA_DROPOUT_LARGE_TENSOR,
"mode_dropout": DEFAULT_MODE_DROPOUT,
"p_dropout": DEFAULT_P_DROPOUT_LARGE_TENSOR,
"axes_dropout": DEFAULT_AXES_DROPOUT_LARGE_TENSOR,
"data_nn_basic": DEFAULT_DATA_NN_BASIC_LARGE_TENSOR,
"num_hidden": DEFAULT_NUM_HIDDEN_LARGE_TENSOR,
"data_fullyconnected": DEFAULT_DATA_FC_LARGE_TENSOR,
"weight_fullyconnected": DEFAULT_WEIGHT_FC_LARGE_TENSOR,
"num_hidden_fullyconnected": DEFAULT_NUM_HIDDEN_FC_LARGE_TENSOR,
"weight_embedding": DEFAULT_WEIGHT_EMBEDDING_LARGE_TENSOR,
"bias": DEFAULT_BIAS_LARGE_TENSOR,
"flatten": DEFAULT_FLATTEN_LARGE_TENSOR,
"data_batchnorm": DEFAULT_DATA_NN_BASIC_LARGE_TENSOR,
"gamma_batchnorm": DEFAULT_GAMMA_LARGE_TENSOR,
"beta_batchnorm": DEFAULT_BETA_LARGE_TENSOR,
"moving_mean_batchnorm": DEFAULT_MOVING_MEAN_LARGE_TENSOR,
"moving_var_batchnorm": DEFAULT_MOVING_VAR_LARGE_TENSOR,
"axis_batchnorm": DEFAULT_AXIS_BN,
"data_softmaxoutput": DEFAULT_DATA_SO_LARGE_TENSOR,
"label_softmaxoutput": DEFAULT_LABEL_SO_LARGE_TENSOR,
"data_maeregressionoutput": DEFAULT_DATA_REG_LARGE_TENSOR,
"label_maeregressionoutput": DEFAULT_LABEL_REG_LARGE_TENSOR,
"data_logisticregressionoutput": DEFAULT_DATA_REG_LARGE_TENSOR,
"label_logisticregressionoutput": DEFAULT_LABEL_REG_LARGE_TENSOR,
"data_linearregressionoutput": DEFAULT_DATA_REG_LARGE_TENSOR,
"label_linearregressionoutput": DEFAULT_LABEL_REG_LARGE_TENSOR,
"data_svmoutput": DEFAULT_DATA_SVM_LARGE_TENSOR,
"label_svmoutput": DEFAULT_LABEL_SVM_LARGE_TENSOR,
"grad_scale": DEFAULT_GRAD_SCALE,
"normalization": DEFAULT_NORMALIZATION,
"margin": DEFAULT_MARGIN,
"regularization_coefficient": DEFAULT_REG_COEFF,
"data_l2normalization": DEFAULT_DATA_NORM_LARGE_TENSOR,
"mode_l2normalization": DEFAULT_MODE_L2,
"gamma_layernorm": DEFAULT_GAMMA_NORM_LARGE_TENSOR,
"beta_layernorm": DEFAULT_BETA_NORM_LARGE_TENSOR,
"data_instancenorm": DEFAULT_DATA_NORM_LARGE_TENSOR,
"gamma_instancenorm": DEFAULT_GAMMA_NORM_LARGE_TENSOR,
"beta_instancenorm": DEFAULT_GAMMA_NORM_LARGE_TENSOR,
"input_dim": DEFAULT_INPUT_DIM_LARGE_TENSOR,
"output_dim": DEFAULT_OUTPUT_DIM_LARGE_TENSOR,
"sparse_grad": DEFAULT_SPARSE_GRAD,
"data1": DEFAULT_DATA1_LARGE_TENSOR,
"data2": DEFAULT_DATA2_LARGE_TENSOR,
"kernel_size": DEFAULT_KERNEL_SIZE_LARGE_TENSOR,
"max_displacement": DEFAULT_MAX_DISPLACEMENT_LARGE_TENSOR,
"stride1": DEFAULT_STRIDE_1_LARGE_TENSOR,
"stride2": DEFAULT_STRIDE_2_LARGE_TENSOR,
"data_im2col": DEFAULT_DATA_I2C_LARGE_TENSOR,
"kernel_im2col": DEFAULT_KERNEL_I2C_LARGE_TENSOR,
"stride_im2col": DEFAULT_STRIDE_I2C_LARGE_TENSOR,
"dilate_im2col": DEFAULT_DILATE_LARGE_TENSOR,
"pad_im2col": DEFAULT_PAD_LARGE_TENSOR,
"data_lrn": DEFAULT_DATA_LRN_LARGE_TENSOR,
"alpha_lrn": DEFAULT_ALPHA,
"beta_lrn": DEFAULT_BETA_LRN,
"nsize": DEFAULT_NSIZE,
"data_layernorm": DEFAULT_DATA_NORM_LARGE_TENSOR,
"axis_layernorm": DEFAULT_AXIS_LARGE_TENSOR}
# These are names of MXNet operator parameters that is of type NDArray.
# We maintain this list to automatically recognize these parameters are to be
# given as NDArray and translate users inputs such as a shape tuple, Numpy Array or
# a list to MXNet NDArray. This is just a convenience added so benchmark utility users
# can just say shape of the tensor, and we automatically create Tensors.
PARAMS_OF_TYPE_NDARRAY = ["lhs", "rhs", "data", "base", "exp", "sample",
"mu", "sigma", "lam", "alpha", "beta", "gamma", "k", "p",
"low", "high", "weight", "bias", "moving_mean", "moving_var",
"weight", "weight32", "grad", "mean", "var", "mom", "n", "d",
"v", "z", "g", "delta", "args", "indices", "shape_like", "y",
"x", "condition", "a", "index", "raveL_data", "label", "grid",
"A", "B", "C", "r1", "r2", "rois", "lrs", "wds", "weights_sum_sq",
"grads_sum_sq", "mhs", "data1", "data2", "loc", "parameters", "state",
"state_cell"]
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.aiplatform_v1beta1.types import study
from google.cloud.aiplatform_v1beta1.types import study as gca_study
from google.cloud.aiplatform_v1beta1.types import vizier_service
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-aiplatform",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class VizierServiceTransport(abc.ABC):
"""Abstract transport class for VizierService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "aiplatform.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_study: gapic_v1.method.wrap_method(
self.create_study, default_timeout=5.0, client_info=client_info,
),
self.get_study: gapic_v1.method.wrap_method(
self.get_study, default_timeout=5.0, client_info=client_info,
),
self.list_studies: gapic_v1.method.wrap_method(
self.list_studies, default_timeout=5.0, client_info=client_info,
),
self.delete_study: gapic_v1.method.wrap_method(
self.delete_study, default_timeout=5.0, client_info=client_info,
),
self.lookup_study: gapic_v1.method.wrap_method(
self.lookup_study, default_timeout=5.0, client_info=client_info,
),
self.suggest_trials: gapic_v1.method.wrap_method(
self.suggest_trials, default_timeout=5.0, client_info=client_info,
),
self.create_trial: gapic_v1.method.wrap_method(
self.create_trial, default_timeout=5.0, client_info=client_info,
),
self.get_trial: gapic_v1.method.wrap_method(
self.get_trial, default_timeout=5.0, client_info=client_info,
),
self.list_trials: gapic_v1.method.wrap_method(
self.list_trials, default_timeout=5.0, client_info=client_info,
),
self.add_trial_measurement: gapic_v1.method.wrap_method(
self.add_trial_measurement,
default_timeout=5.0,
client_info=client_info,
),
self.complete_trial: gapic_v1.method.wrap_method(
self.complete_trial, default_timeout=5.0, client_info=client_info,
),
self.delete_trial: gapic_v1.method.wrap_method(
self.delete_trial, default_timeout=5.0, client_info=client_info,
),
self.check_trial_early_stopping_state: gapic_v1.method.wrap_method(
self.check_trial_early_stopping_state,
default_timeout=5.0,
client_info=client_info,
),
self.stop_trial: gapic_v1.method.wrap_method(
self.stop_trial, default_timeout=5.0, client_info=client_info,
),
self.list_optimal_trials: gapic_v1.method.wrap_method(
self.list_optimal_trials, default_timeout=5.0, client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def create_study(
self,
) -> Callable[
[vizier_service.CreateStudyRequest],
Union[gca_study.Study, Awaitable[gca_study.Study]],
]:
raise NotImplementedError()
@property
def get_study(
self,
) -> Callable[
[vizier_service.GetStudyRequest], Union[study.Study, Awaitable[study.Study]]
]:
raise NotImplementedError()
@property
def list_studies(
self,
) -> Callable[
[vizier_service.ListStudiesRequest],
Union[
vizier_service.ListStudiesResponse,
Awaitable[vizier_service.ListStudiesResponse],
],
]:
raise NotImplementedError()
@property
def delete_study(
self,
) -> Callable[
[vizier_service.DeleteStudyRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def lookup_study(
self,
) -> Callable[
[vizier_service.LookupStudyRequest], Union[study.Study, Awaitable[study.Study]]
]:
raise NotImplementedError()
@property
def suggest_trials(
self,
) -> Callable[
[vizier_service.SuggestTrialsRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def create_trial(
self,
) -> Callable[
[vizier_service.CreateTrialRequest], Union[study.Trial, Awaitable[study.Trial]]
]:
raise NotImplementedError()
@property
def get_trial(
self,
) -> Callable[
[vizier_service.GetTrialRequest], Union[study.Trial, Awaitable[study.Trial]]
]:
raise NotImplementedError()
@property
def list_trials(
self,
) -> Callable[
[vizier_service.ListTrialsRequest],
Union[
vizier_service.ListTrialsResponse,
Awaitable[vizier_service.ListTrialsResponse],
],
]:
raise NotImplementedError()
@property
def add_trial_measurement(
self,
) -> Callable[
[vizier_service.AddTrialMeasurementRequest],
Union[study.Trial, Awaitable[study.Trial]],
]:
raise NotImplementedError()
@property
def complete_trial(
self,
) -> Callable[
[vizier_service.CompleteTrialRequest],
Union[study.Trial, Awaitable[study.Trial]],
]:
raise NotImplementedError()
@property
def delete_trial(
self,
) -> Callable[
[vizier_service.DeleteTrialRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def check_trial_early_stopping_state(
self,
) -> Callable[
[vizier_service.CheckTrialEarlyStoppingStateRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def stop_trial(
self,
) -> Callable[
[vizier_service.StopTrialRequest], Union[study.Trial, Awaitable[study.Trial]]
]:
raise NotImplementedError()
@property
def list_optimal_trials(
self,
) -> Callable[
[vizier_service.ListOptimalTrialsRequest],
Union[
vizier_service.ListOptimalTrialsResponse,
Awaitable[vizier_service.ListOptimalTrialsResponse],
],
]:
raise NotImplementedError()
__all__ = ("VizierServiceTransport",)
|
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet backup features.
Test case is:
4 nodes. 1 2 and 3 send transactions between each other,
fourth node is a miner.
1 2 3 each mine a block to start, then
Miner creates 100 blocks so 1 2 3 each have 50 mature
coins to spend.
Then 5 iterations of 1/2/3 sending coins amongst
themselves to get transactions in the wallets,
and the miner mining one block.
Wallets are backed up using dumpwallet/backupwallet.
Then 5 more iterations of transactions and mining a block.
Miner then generates 101 more blocks, so any
transaction fees paid mature.
Sanity check:
Sum(1,2,3,4 balances) == 114*50
1/2/3 are shutdown, and their wallets erased.
Then restore using wallet.dat backup. And
confirm 1/2/3/4 balances are same as before.
Shutdown again, restore using importwallet,
and confirm again balances are correct.
"""
from random import randint
import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletBackupTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
# nodes 1, 2,3 are spenders, let's give them a keypool=100
self.extra_args = [["-keypool=100"], ["-keypool=100"], ["-keypool=100"], []]
def setup_network(self, split=False):
self.setup_nodes()
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
connect_nodes(self.nodes[2], 3)
connect_nodes(self.nodes[2], 0)
self.sync_all()
def one_send(self, from_node, to_address):
if (randint(1,2) == 1):
amount = Decimal(randint(1,10)) / Decimal(10)
self.nodes[from_node].sendtoaddress(to_address, float(amount))
def do_one_round(self):
a0 = self.nodes[0].getnewaddress()
a1 = self.nodes[1].getnewaddress()
a2 = self.nodes[2].getnewaddress()
self.one_send(0, a1)
self.one_send(0, a2)
self.one_send(1, a0)
self.one_send(1, a2)
self.one_send(2, a0)
self.one_send(2, a1)
# Have the miner (node3) mine a block.
# Must sync mempools before mining.
sync_mempools(self.nodes)
self.nodes[3].generate(1)
sync_blocks(self.nodes)
# As above, this mirrors the original bash test.
def start_three(self):
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
connect_nodes(self.nodes[2], 3)
connect_nodes(self.nodes[2], 0)
def stop_three(self):
self.stop_node(0)
self.stop_node(1)
self.stop_node(2)
def erase_three(self):
os.remove(self.options.tmpdir + "/node0/regtest/wallet.dat")
os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
os.remove(self.options.tmpdir + "/node2/regtest/wallet.dat")
def run_test(self):
self.log.info("Generating initial blockchain")
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.nodes[1].generate(1)
sync_blocks(self.nodes)
self.nodes[2].generate(1)
sync_blocks(self.nodes)
self.nodes[3].generate(100)
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), 250)
assert_equal(self.nodes[1].getbalance(), 250)
assert_equal(self.nodes[2].getbalance(), 250)
assert_equal(self.nodes[3].getbalance(), 0)
self.log.info("Creating transactions")
# Five rounds of sending each other transactions.
for i in range(5):
self.do_one_round()
self.log.info("Backing up")
tmpdir = self.options.tmpdir
self.nodes[0].backupwallet(tmpdir + "/node0/wallet.bak")
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.dump")
self.nodes[1].backupwallet(tmpdir + "/node1/wallet.bak")
self.nodes[1].dumpwallet(tmpdir + "/node1/wallet.dump")
self.nodes[2].backupwallet(tmpdir + "/node2/wallet.bak")
self.nodes[2].dumpwallet(tmpdir + "/node2/wallet.dump")
self.log.info("More transactions")
for i in range(5):
self.do_one_round()
# Generate 101 more blocks, so any fees paid mature
self.nodes[3].generate(101)
self.sync_all()
balance0 = self.nodes[0].getbalance()
balance1 = self.nodes[1].getbalance()
balance2 = self.nodes[2].getbalance()
balance3 = self.nodes[3].getbalance()
total = balance0 + balance1 + balance2 + balance3
# At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.)
# 114 are mature, so the sum of all wallets should be 114 * 250 = 28500.
assert_equal(total, 28500)
##
# Test restoring spender wallets from backups
##
self.log.info("Restoring using wallet.dat")
self.stop_three()
self.erase_three()
# Start node2 with no chain
shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
# Restore wallets from backup
shutil.copyfile(tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallet.dat")
shutil.copyfile(tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallet.dat")
self.log.info("Re-starting nodes")
self.start_three()
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), balance0)
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
self.log.info("Restoring using dumped wallet")
self.stop_three()
self.erase_three()
#start node2 with no chain
shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
self.start_three()
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
self.nodes[0].importwallet(tmpdir + "/node0/wallet.dump")
self.nodes[1].importwallet(tmpdir + "/node1/wallet.dump")
self.nodes[2].importwallet(tmpdir + "/node2/wallet.dump")
sync_blocks(self.nodes)
assert_equal(self.nodes[0].getbalance(), balance0)
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
# Backup to source wallet file must fail
sourcePaths = [
tmpdir + "/node0/regtest/wallet.dat",
tmpdir + "/node0/./regtest/wallet.dat",
tmpdir + "/node0/regtest/",
tmpdir + "/node0/regtest"]
for sourcePath in sourcePaths:
assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath)
if __name__ == '__main__':
WalletBackupTest().main()
|
|
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tabs
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks.ports \
import forms as project_forms
from openstack_dashboard.dashboards.project.networks.ports \
import tables as project_tables
from openstack_dashboard.dashboards.project.networks.ports \
import tabs as project_tabs
STATE_DICT = dict(project_tables.DISPLAY_CHOICES)
STATUS_DICT = dict(project_tables.STATUS_DISPLAY_CHOICES)
VNIC_TYPES = dict(project_forms.VNIC_TYPES)
class CreateView(forms.ModalFormView):
form_class = project_forms.CreatePort
form_id = "create_port_form"
modal_header = _("Create Port")
submit_label = _("Create Port")
submit_url = "horizon:project:networks:addport"
page_title = _("Create Port")
template_name = 'project/networks/ports/create.html'
url = 'horizon:project:networks:detail'
def get_success_url(self):
return reverse(self.url,
args=(self.kwargs['network_id'],))
@memoized.memoized_method
def get_network(self):
try:
network_id = self.kwargs["network_id"]
return api.neutron.network_get(self.request, network_id)
except Exception:
redirect = reverse(self.url,
args=(self.kwargs['network_id'],))
msg = _("Unable to retrieve network.")
exceptions.handle(self.request, msg, redirect=redirect)
def get_context_data(self, **kwargs):
context = super(CreateView, self).get_context_data(**kwargs)
context['network'] = self.get_network()
args = (self.kwargs['network_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
context['cancel_url'] = reverse(self.url, args=args)
return context
def get_initial(self):
network = self.get_network()
return {"network_id": self.kwargs['network_id'],
"network_name": network.name}
class DetailView(tabs.TabbedTableView):
tab_group_class = project_tabs.PortDetailTabs
template_name = 'horizon/common/_detail.html'
page_title = "{{ port.name|default:port.id }}"
@memoized.memoized_method
def get_data(self):
port_id = self.kwargs['port_id']
try:
port = api.neutron.port_get(self.request, port_id)
port.admin_state_label = STATE_DICT.get(port.admin_state,
port.admin_state)
port.status_label = STATUS_DICT.get(port.status,
port.status)
if port.get('binding__vnic_type'):
port.binding__vnic_type = VNIC_TYPES.get(
port.binding__vnic_type, port.binding__vnic_type)
except Exception:
port = []
redirect = self.get_redirect_url()
msg = _('Unable to retrieve port details.')
exceptions.handle(self.request, msg, redirect=redirect)
if (api.neutron.is_extension_supported(self.request, 'mac-learning')
and not hasattr(port, 'mac_state')):
port.mac_state = api.neutron.OFF_STATE
return port
@memoized.memoized_method
def get_network(self, network_id):
try:
network = api.neutron.network_get(self.request, network_id)
except Exception:
network = {}
msg = _('Unable to retrieve network details.')
exceptions.handle(self.request, msg)
return network
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
port = self.get_data()
network_url = "horizon:project:networks:detail"
subnet_url = "horizon:project:networks:subnets:detail"
network = self.get_network(port.network_id)
port.network_name = network.get('name')
port.network_url = reverse(network_url, args=[port.network_id])
for ip in port.fixed_ips:
ip['subnet_url'] = reverse(subnet_url, args=[ip['subnet_id']])
table = project_tables.PortsTable(self.request,
network_id=port.network_id)
# TODO(robcresswell) Add URL for "Ports" crumb after bug/1416838
breadcrumb = [
((port.network_name or port.network_id), port.network_url),
(_("Ports"), None)
]
context["custom_breadcrumb"] = breadcrumb
context["port"] = port
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(port)
return context
def get_tabs(self, request, *args, **kwargs):
port = self.get_data()
return self.tab_group_class(request, port=port, **kwargs)
@staticmethod
def get_redirect_url():
return reverse('horizon:project:networks:index')
class UpdateView(forms.ModalFormView):
form_class = project_forms.UpdatePort
form_id = "update_port_form"
template_name = 'project/networks/ports/update.html'
context_object_name = 'port'
submit_label = _("Save Changes")
submit_url = "horizon:project:networks:editport"
success_url = 'horizon:project:networks:detail'
page_title = _("Update Port")
def get_success_url(self):
return reverse(self.success_url,
args=(self.kwargs['network_id'],))
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
port_id = self.kwargs['port_id']
try:
return api.neutron.port_get(self.request, port_id)
except Exception:
redirect = self.get_success_url()
msg = _('Unable to retrieve port details')
exceptions.handle(self.request, msg, redirect=redirect)
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
port = self._get_object()
context['port_id'] = port['id']
context['network_id'] = port['network_id']
args = (self.kwargs['network_id'], self.kwargs['port_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
context['cancel_url'] = reverse(self.success_url,
args=(self.kwargs['network_id'],))
return context
def get_initial(self):
port = self._get_object()
initial = {'port_id': port['id'],
'network_id': port['network_id'],
'tenant_id': port['tenant_id'],
'name': port['name'],
'admin_state': port['admin_state_up'],
'mac_address': port['mac_address']}
if port.get('binding__vnic_type'):
initial['binding__vnic_type'] = port['binding__vnic_type']
try:
initial['mac_state'] = port['mac_learning_enabled']
except Exception:
# MAC Learning is not set
pass
if 'port_security_enabled' in port:
initial['port_security_enabled'] = port['port_security_enabled']
return initial
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: vmware_drs_rule_info
short_description: Gathers info about DRS rule on the given cluster
description:
- 'This module can be used to gather information about DRS VM-VM and VM-HOST rules from the given cluster.'
version_added: '2.9'
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.5
requirements:
- python >= 2.6
- PyVmomi
options:
cluster_name:
description:
- Name of the cluster.
- DRS information for the given cluster will be returned.
- This is required parameter if C(datacenter) parameter is not provided.
type: str
datacenter:
description:
- Name of the datacenter.
- DRS information for all the clusters from the given datacenter will be returned.
- This is required parameter if C(cluster_name) parameter is not provided.
type: str
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Gather DRS info about given Cluster
vmware_drs_rule_info:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
cluster_name: '{{ cluster_name }}'
delegate_to: localhost
register: cluster_drs_info
- name: Gather DRS info about all Clusters in given datacenter
vmware_drs_rule_info:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
datacenter: '{{ datacenter_name }}'
delegate_to: localhost
register: datacenter_drs_info
'''
RETURN = r'''
drs_rule_info:
description: metadata about DRS rule from given cluster / datacenter
returned: always
type: dict
sample: {
"DC0_C0": [
{
"rule_affinity": false,
"rule_enabled": true,
"rule_key": 1,
"rule_mandatory": true,
"rule_name": "drs_rule_0001",
"rule_type": "vm_vm_rule",
"rule_uuid": "52be5061-665a-68dc-3d25-85cd2d37e114",
"rule_vms": [
"VM_65",
"VM_146"
]
},
],
"DC1_C1": [
{
"rule_affine_host_group_name": "host_group_1",
"rule_affine_hosts": [
"10.76.33.204"
],
"rule_anti_affine_host_group_name": null,
"rule_anti_affine_hosts": [],
"rule_enabled": true,
"rule_key": 1,
"rule_mandatory": false,
"rule_name": "vm_host_rule_0001",
"rule_type": "vm_host_rule",
"rule_uuid": "52687108-4d3a-76f2-d29c-b708c40dbe40",
"rule_vm_group_name": "test_vm_group_1",
"rule_vms": [
"VM_8916",
"VM_4010"
]
}
],
}
'''
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import vmware_argument_spec, PyVmomi, find_datacenter_by_name, get_all_objs
class VmwareDrsInfoManager(PyVmomi):
def __init__(self, module):
super(VmwareDrsInfoManager, self).__init__(module)
datacenter_name = self.params.get('datacenter', None)
if datacenter_name:
datacenter_obj = find_datacenter_by_name(self.content, datacenter_name=datacenter_name)
self.cluster_obj_list = []
if datacenter_obj:
folder = datacenter_obj.hostFolder
self.cluster_obj_list = get_all_objs(self.content, [vim.ClusterComputeResource], folder)
else:
self.module.fail_json(changed=False, msg="Datacenter '%s' not found" % datacenter_name)
cluster_name = self.params.get('cluster_name', None)
if cluster_name:
cluster_obj = self.find_cluster_by_name(cluster_name=cluster_name)
if cluster_obj is None:
self.module.fail_json(changed=False, msg="Cluster '%s' not found" % cluster_name)
else:
self.cluster_obj_list = [cluster_obj]
def get_all_from_group(self, group_name=None, cluster_obj=None, hostgroup=False):
"""
Return all VM / Host names using given group name
Args:
group_name: Rule name
cluster_obj: Cluster managed object
hostgroup: True if we want only host name from group
Returns: List of VM / Host names belonging to given group object
"""
obj_name_list = []
if not all([group_name, cluster_obj]):
return obj_name_list
for group in cluster_obj.configurationEx.group:
if group.name == group_name:
if not hostgroup and isinstance(group, vim.cluster.VmGroup):
obj_name_list = [vm.name for vm in group.vm]
break
elif hostgroup and isinstance(group, vim.cluster.HostGroup):
obj_name_list = [host.name for host in group.host]
break
return obj_name_list
@staticmethod
def normalize_vm_vm_rule_spec(rule_obj=None):
"""
Return human readable rule spec
Args:
rule_obj: Rule managed object
Returns: Dictionary with DRS VM VM Rule info
"""
if rule_obj is None:
return {}
return dict(rule_key=rule_obj.key,
rule_enabled=rule_obj.enabled,
rule_name=rule_obj.name,
rule_mandatory=rule_obj.mandatory,
rule_uuid=rule_obj.ruleUuid,
rule_vms=[vm.name for vm in rule_obj.vm],
rule_type="vm_vm_rule",
rule_affinity=True if isinstance(rule_obj, vim.cluster.AffinityRuleSpec) else False,
)
def normalize_vm_host_rule_spec(self, rule_obj=None, cluster_obj=None):
"""
Return human readable rule spec
Args:
rule_obj: Rule managed object
cluster_obj: Cluster managed object
Returns: Dictionary with DRS VM HOST Rule info
"""
if not all([rule_obj, cluster_obj]):
return {}
return dict(rule_key=rule_obj.key,
rule_enabled=rule_obj.enabled,
rule_name=rule_obj.name,
rule_mandatory=rule_obj.mandatory,
rule_uuid=rule_obj.ruleUuid,
rule_vm_group_name=rule_obj.vmGroupName,
rule_affine_host_group_name=rule_obj.affineHostGroupName,
rule_anti_affine_host_group_name=rule_obj.antiAffineHostGroupName,
rule_vms=self.get_all_from_group(group_name=rule_obj.vmGroupName,
cluster_obj=cluster_obj),
rule_affine_hosts=self.get_all_from_group(group_name=rule_obj.affineHostGroupName,
cluster_obj=cluster_obj,
hostgroup=True),
rule_anti_affine_hosts=self.get_all_from_group(group_name=rule_obj.antiAffineHostGroupName,
cluster_obj=cluster_obj,
hostgroup=True),
rule_type="vm_host_rule",
)
def gather_drs_rule_info(self):
"""
Gather DRS rule information about given cluster
Returns: Dictionary of clusters with DRS information
"""
cluster_rule_info = dict()
for cluster_obj in self.cluster_obj_list:
cluster_rule_info[cluster_obj.name] = []
for drs_rule in cluster_obj.configuration.rule:
if isinstance(drs_rule, vim.cluster.VmHostRuleInfo):
cluster_rule_info[cluster_obj.name].append(self.normalize_vm_host_rule_spec(
rule_obj=drs_rule,
cluster_obj=cluster_obj))
else:
cluster_rule_info[cluster_obj.name].append(self.normalize_vm_vm_rule_spec(rule_obj=drs_rule))
return cluster_rule_info
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
datacenter=dict(type='str', required=False),
cluster_name=dict(type='str', required=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['cluster_name', 'datacenter'],
],
supports_check_mode=True,
)
vmware_drs_info = VmwareDrsInfoManager(module)
module.exit_json(changed=False, drs_rule_info=vmware_drs_info.gather_drs_rule_info())
if __name__ == "__main__":
main()
|
|
#!/usr/bin/env python
"""
Analyze docstrings to detect errors.
If no argument is provided, it does a quick check of docstrings and returns
a csv with all API functions and results of basic checks.
If a function or method is provided in the form 'pandas.function',
'pandas.module.class.method', etc. a list of all errors in the docstring for
the specified function or method.
Usage::
$ ./validate_docstrings.py
$ ./validate_docstrings.py pandas.DataFrame.head
"""
import argparse
import ast
import collections
import doctest
import functools
import glob
import importlib
import inspect
from io import StringIO
import json
import os
import pydoc
import re
import string
import sys
import tempfile
import textwrap
import flake8.main.application
import matplotlib
import numpy
from numpydoc.docscrape import NumpyDocString
import pandas
from pandas.io.formats.printing import pprint_thing
import statsmodels.api
# Template backend makes matplotlib to not plot anything. This is useful
# to avoid that plot windows are open from the doctests while running the
# script. Setting here before matplotlib is loaded.
# We don't warn for the number of open plots, as none is actually being opened
matplotlib.use("agg")
matplotlib.rc("figure", max_open_warning=10000)
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(BASE_PATH))
# TODO: Single-line ignore GL01, GL02
# TODO: Complete import location list
# TODO: Recurse through module to find classes
# TODO: Separate data from code
NO_RETURN_WHITELIST = {
"bse",
"aic",
"bic",
"params",
"tvalues",
"pvalues",
"fvalue",
"fittedvalues",
"mse_model",
"mse_resid",
"mse_total",
"ssr",
"nobs",
"rsquared_adj",
"rsquared",
"resid",
"prsquared",
"zvalues",
"uncentered_tss",
"llf",
"hqic",
"ess",
"centered_tss",
"wresid",
"f_pvalue",
"eigenvals",
"condition_number",
"scale",
"llr",
"llr_pvalue",
"llnull",
"resid_response",
"resid_pearson",
"resid_generalized",
"resid_dev",
"save",
}
MODULES = (
"api",
"tsa.api",
"discrete.conditional_models",
"discrete.count_model",
"discrete.discrete_model",
"duration.hazard_regression",
"regression.linear_model",
"regression.mixed_linear_model",
"regression.process_regression",
"regression.quantile_regression",
"regression.recursive_ls",
"robust.robust_linear_model",
"robust.norms",
"stats.anova",
"stats.mediation",
"tsa.seasonal",
)
ALLOW_SINGLE_LINE_DOCSTRINGS = True
members = []
for mod in MODULES:
imp = importlib.import_module("statsmodels." + mod)
members += inspect.getmembers(imp)
API_CLASSES = []
for member in members:
if not member[0]:
continue
if member[0][0] in string.ascii_uppercase:
if not type(member[1]) is type:
continue
name = str(member[1]).split("'")[1]
if not name.startswith("statsmodels."):
continue
API_CLASSES.append((name, member[1]))
API_CLASSES = sorted(set(API_CLASSES), key=lambda v: v[0])
RUN_DOCTESTS = False
sys.path.insert(1, os.path.join(BASE_PATH, "doc", "sphinxext"))
PRIVATE_CLASSES = []
DIRECTIVES = ["versionadded", "versionchanged", "deprecated"]
ALLOWED_SECTIONS = [
"Parameters",
"Attributes",
"Methods",
"Returns",
"Yields",
"Other Parameters",
"Raises",
"Warns",
"See Also",
"Notes",
"References",
"Examples",
]
ERROR_MSGS = {
"GL01": "Docstring text (summary) should start in the line immediately "
"after the opening quotes (not in the same line, or leaving a "
"blank line in between)",
"GL02": "Closing quotes should be placed in the line after the last text "
"in the docstring (do not close the quotes in the same line as "
"the text, or leave a blank line between the last text and the "
"quotes)",
"GL03": "Double line break found; please use only one blank line to "
"separate sections or paragraphs, and do not leave blank lines "
"at the end of docstrings",
"GL04": "Private classes ({mentioned_private_classes}) should not be "
"mentioned in public docstrings",
"GL05": 'Tabs found at the start of line "{line_with_tabs}", please use '
"whitespace only",
"GL06": 'Found unknown section "{section}". Allowed sections are: '
"{allowed_sections}",
"GL07": "Sections are in the wrong order. "
"Correct order is: {correct_sections}",
"GL08": "The object does not have a docstring",
"GL09": "Deprecation warning should precede extended summary",
"SS01": "No summary found (a short summary in a single line should be "
"present at the beginning of the docstring)",
"SS02": "Summary does not start with a capital letter",
"SS03": "Summary does not end with a period",
"SS04": "Summary contains heading whitespaces",
"SS05": "Summary must start with infinitive verb, not third person "
'(e.g. use "Generate" instead of "Generates")',
"SS06": "Summary should fit in a single line",
"ES01": "No extended summary found",
"PR01": "Parameters {missing_params} not documented",
"PR02": "Unknown parameters {unknown_params}",
"PR03": "Wrong parameters order. Actual: {actual_params}. "
"Documented: {documented_params}",
"PR04": 'Parameter "{param_name}" has no type',
"PR05": 'Parameter "{param_name}" type should not finish with "."',
"PR06": 'Parameter "{param_name}" type should use "{right_type}" instead '
'of "{wrong_type}"',
"PR07": 'Parameter "{param_name}" has no description',
"PR08": 'Parameter "{param_name}" description should start with a '
"capital letter",
"PR09": 'Parameter "{param_name}" description should finish with "."',
"PR10": 'Parameter "{param_name}" requires a space before the colon '
"separating the parameter name and type",
"RT01": "No Returns section found",
"RT02": "The first line of the Returns section should contain only the "
"type, unless multiple values are being returned",
"RT03": "Return value has no description",
"RT04": "Return value description should start with a capital letter",
"RT05": 'Return value description should finish with "."',
"YD01": "No Yields section found",
"SA01": "See Also section not found",
"SA02": "Missing period at end of description for See Also "
'"{reference_name}" reference',
"SA03": "Description should be capitalized for See Also "
'"{reference_name}" reference',
"SA04": 'Missing description for See Also "{reference_name}" reference',
"SA05": "{reference_name} in `See Also` section does not need `pandas` "
"prefix, use {right_reference} instead.",
"EX01": "No examples section found",
"EX02": "Examples do not pass tests:\n{doctest_log}",
"EX03": "flake8 error: {error_code} {error_message}{times_happening}",
"EX04": "Do not import {imported_library}, as it is imported "
"automatically for the examples (numpy as np, pandas as pd)",
}
def error(code, **kwargs):
"""
Return a tuple with the error code and the message with variables replaced.
This is syntactic sugar so instead of:
- `('EX02', ERROR_MSGS['EX02'].format(doctest_log=log))`
We can simply use:
- `error('EX02', doctest_log=log)`
Parameters
----------
code : str
Error code.
**kwargs
Values for the variables in the error messages
Returns
-------
code : str
Error code.
message : str
Error message with variables replaced.
"""
return (code, ERROR_MSGS[code].format(**kwargs))
def get_api_items(api_doc_fd):
"""
Yield information about all public API items.
Parse api.rst file from the documentation, and extract all the functions,
methods, classes, attributes... This should include all pandas public API.
Parameters
----------
api_doc_fd : file descriptor
A file descriptor of the API documentation page, containing the table
of contents with all the public API.
Yields
------
name : str
The name of the object (e.g. 'pandas.Series.str.upper).
func : function
The object itself. In most cases this will be a function or method,
but it can also be classes, properties, cython objects...
section : str
The name of the section in the API page where the object item is
located.
subsection : str
The name of the subsection in the API page where the object item is
located.
"""
current_module = "statsmodels"
previous_line = current_section = current_subsection = ""
position = None
for line in api_doc_fd:
line = line.strip()
if len(line) == len(previous_line):
if set(line) == set("-"):
current_section = previous_line
continue
if set(line) == set("~"):
current_subsection = previous_line
continue
if line.startswith(".. currentmodule::"):
current_module = line.replace(".. currentmodule::", "").strip()
continue
if line == ".. autosummary::":
position = "autosummary"
continue
if position == "autosummary":
if line == "":
position = "items"
continue
if position == "items":
if line == "":
position = None
continue
item = line.strip()
if item.startswith("~statsmodels."):
path = item.replace("~", "").split(".")
item = path[-1]
current_module = ".".join(path[:-1])
func = importlib.import_module(current_module)
for part in item.split("."):
func = getattr(func, part)
yield (
".".join([current_module, item]),
func,
current_section,
current_subsection,
)
previous_line = line
class Docstring(object):
def __init__(self, name):
self.name = name
obj = self._load_obj(name)
self.obj = obj
self.code_obj = self._to_original_callable(obj)
self.raw_doc = obj.__doc__ or ""
self.clean_doc = pydoc.getdoc(obj)
self.doc = NumpyDocString(self.clean_doc)
def __len__(self):
return len(self.raw_doc)
@staticmethod
def _load_obj(name):
"""
Import Python object from its name as string.
Parameters
----------
name : str
Object name to import (e.g. pandas.Series.str.upper)
Returns
-------
object
Python object that can be a class, method, function...
Examples
--------
>>> Docstring._load_obj('pandas.Series')
<class 'pandas.core.series.Series'>
"""
for maxsplit in range(1, name.count(".") + 1):
func_name_split = name.rsplit(".", maxsplit)
module, *func_parts = func_name_split
try:
obj = importlib.import_module(module)
except ImportError:
pass
else:
continue
if "obj" not in locals():
raise ImportError(
"No module can be imported " 'from "{}"'.format(name)
)
for part in func_parts:
obj = getattr(obj, part)
return obj
@staticmethod
def _to_original_callable(obj):
"""
Find the Python object that contains the source code of the object.
This is useful to find the place in the source code (file and line
number) where a docstring is defined. It does not currently work for
all cases, but it should help find some (properties...).
"""
while True:
if inspect.isfunction(obj) or inspect.isclass(obj):
f = inspect.getfile(obj)
if f.startswith("<") and f.endswith(">"):
return None
return obj
if inspect.ismethod(obj):
obj = obj.__func__
elif isinstance(obj, functools.partial):
obj = obj.func
elif isinstance(obj, property):
obj = obj.fget
else:
return None
@property
def type(self):
return type(self.obj).__name__
@property
def is_function_or_method(self):
return inspect.isfunction(self.obj) or inspect.ismethod(self.obj)
@property
def source_file_name(self):
"""
File name where the object is implemented (e.g. pandas/core/frame.py).
"""
try:
fname = inspect.getsourcefile(self.code_obj)
except TypeError:
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the source code file of the object as None, than crash
pass
else:
if fname:
fname = os.path.relpath(fname, BASE_PATH)
return fname
@property
def source_file_def_line(self):
"""
Number of line where the object is defined in its file.
"""
try:
return inspect.getsourcelines(self.code_obj)[-1]
except (OSError, TypeError):
# In some cases the object is something complex like a cython
# object that can't be easily introspected. An it's better to
# return the line number as None, than crash
pass
@property
def github_url(self):
url = "https://github.com/statsmodels/statsmodels/main/"
url += "{}#L{}".format(
self.source_file_name, self.source_file_def_line
)
return url
@property
def start_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(self.raw_doc.split("\n")):
if row.strip():
break
return i
@property
def end_blank_lines(self):
i = None
if self.raw_doc:
for i, row in enumerate(reversed(self.raw_doc.split("\n"))):
if row.strip():
break
return i
@property
def single_line_docstring(self):
return (
self.raw_doc
and "\n" not in self.raw_doc
and ALLOW_SINGLE_LINE_DOCSTRINGS
)
@property
def double_blank_lines(self):
prev = True
for row in self.raw_doc.split("\n"):
if not prev and not row.strip():
return True
prev = row.strip()
return False
@property
def section_titles(self):
sections = []
self.doc._doc.reset()
while not self.doc._doc.eof():
content = self.doc._read_to_next_section()
if (
len(content) > 1
and len(content[0]) == len(content[1])
and set(content[1]) == {"-"}
):
sections.append(content[0])
return sections
@property
def summary(self):
return " ".join(self.doc["Summary"])
@property
def num_summary_lines(self):
return len(self.doc["Summary"])
@property
def extended_summary(self):
if not self.doc["Extended Summary"] and len(self.doc["Summary"]) > 1:
return " ".join(self.doc["Summary"])
return " ".join(self.doc["Extended Summary"])
@property
def needs_summary(self):
return not (bool(self.summary) and bool(self.extended_summary))
@property
def doc_parameters(self):
return collections.OrderedDict(
(name, (type_, "".join(desc)))
for name, type_, desc in self.doc["Parameters"]
)
@property
def signature_parameters(self):
if inspect.isclass(self.obj):
if hasattr(self.obj, "_accessors") and (
self.name.split(".")[-1] in self.obj._accessors
):
# accessor classes have a signature but don't want to show this
return tuple()
try:
sig = inspect.signature(self.obj)
except (TypeError, ValueError):
# Some objects, mainly in C extensions do not support introspection
# of the signature
try:
from numpydoc.docscrape import FunctionDoc
doc = FunctionDoc(self.obj)
if "Signature" in doc:
sig = doc["Signature"].split("(")[-1].split(")")[0]
sig = sig.split(",")
params = []
for param in sig:
if param.strip() in ("*", "/"):
continue
param = param.split("=")[0]
params.append(param)
return tuple([param.name for param in doc["Parameters"]])
except Exception as exc:
print("!! numpydoc failed on {0}!!".format(str(self.obj)))
print(exc)
return tuple()
params = list(sig.parameters.keys())
out_params = params[:]
kind = inspect.Parameter.VAR_POSITIONAL
varargs = [key for key in params if sig.parameters[key].kind == kind]
if varargs:
out_params = [
"*" + param if param == varargs[0] else param
for param in out_params
]
kind = inspect.Parameter.VAR_KEYWORD
varkw = [key for key in params if sig.parameters[key].kind == kind]
if varkw:
out_params = [
"**" + param if param == varkw[0] else param
for param in out_params
]
params = tuple(out_params)
if params and params[0] in ("self", "cls"):
return params[1:]
return params
@property
def parameter_mismatches(self):
errs = []
signature_params = self.signature_parameters
doc_params = tuple(self.doc_parameters)
missing = set(signature_params) - set(doc_params)
if missing:
errs.append(error("PR01", missing_params=pprint_thing(missing)))
extra = set(doc_params) - set(signature_params)
if extra:
errs.append(error("PR02", unknown_params=pprint_thing(extra)))
if (
not missing
and not extra
and signature_params != doc_params
and not (not signature_params and not doc_params)
):
errs.append(
error(
"PR03",
actual_params=signature_params,
documented_params=doc_params,
)
)
return errs
@property
def correct_parameters(self):
return not bool(self.parameter_mismatches)
def parameter_type(self, param):
return self.doc_parameters[param][0]
def parameter_desc(self, param):
desc = self.doc_parameters[param][1]
# Find and strip out any sphinx directives
for directive in DIRECTIVES:
full_directive = ".. {}".format(directive)
if full_directive in desc:
# Only retain any description before the directive
desc = desc[: desc.index(full_directive)]
return desc
@property
def see_also(self):
result = collections.OrderedDict()
for funcs, desc in self.doc["See Also"]:
for func, _ in funcs:
result[func] = "".join(desc)
return result
@property
def examples(self):
return self.doc["Examples"]
@property
def returns(self):
return self.doc["Returns"]
@property
def yields(self):
return self.doc["Yields"]
@property
def method_source(self):
try:
source = inspect.getsource(self.obj)
except TypeError:
return ""
return textwrap.dedent(source)
@property
def method_returns_something(self):
"""
Check if the docstrings method can return something.
Bare returns, returns valued None and returns from nested functions are
disconsidered.
Returns
-------
bool
Whether the docstrings method can return something.
"""
def get_returns_not_on_nested_functions(node):
returns = [node] if isinstance(node, ast.Return) else []
for child in ast.iter_child_nodes(node):
# Ignore nested functions and its subtrees.
if not isinstance(child, ast.FunctionDef):
child_returns = get_returns_not_on_nested_functions(child)
returns.extend(child_returns)
return returns
tree = ast.parse(self.method_source).body
if tree:
returns = get_returns_not_on_nested_functions(tree[0])
return_values = [r.value for r in returns]
# Replace NameConstant nodes valued None for None.
for i, v in enumerate(return_values):
if isinstance(v, ast.NameConstant) and v.value is None:
return_values[i] = None
return any(return_values)
else:
return False
@property
def no_return_whitelisted(self):
method = self.name.split(".")[-1]
return "Results" in self.name and method in NO_RETURN_WHITELIST
@property
def first_line_ends_in_dot(self):
if self.doc:
return self.doc.split("\n")[0][-1] == "."
@property
def deprecated(self):
return ".. deprecated:: " in (self.summary + self.extended_summary)
@property
def mentioned_private_classes(self):
return [klass for klass in PRIVATE_CLASSES if klass in self.raw_doc]
@property
def examples_errors(self):
flags = doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL
finder = doctest.DocTestFinder()
runner = doctest.DocTestRunner(optionflags=flags)
context = {"np": numpy, "pd": pandas, "sm": statsmodels.api}
error_msgs = ""
for test in finder.find(self.raw_doc, self.name, globs=context):
f = StringIO()
runner.run(test, out=f.write)
error_msgs += f.getvalue()
return error_msgs
@property
def examples_source_code(self):
lines = doctest.DocTestParser().get_examples(self.raw_doc)
return [line.source for line in lines]
def validate_pep8(self):
if not self.examples:
return
# F401 is needed to not generate flake8 errors in examples
# that do not user numpy or pandas
content = "".join(
(
"import numpy as np # noqa: F401\n",
"import pandas as pd # noqa: F401\n",
"import statsmodels.api as sm # noqa: F401\n",
*self.examples_source_code,
)
)
application = flake8.main.application.Application()
application.initialize(["--quiet"])
with tempfile.NamedTemporaryFile(mode="w", encoding="utf-8") as file:
file.write(content)
file.flush()
application.run_checks([file.name])
# We need this to avoid flake8 printing the names of the files to
# the standard output
application.formatter.write = lambda line, source: None
application.report()
yield from application.guide.stats.statistics_for("")
def get_validation_data(doc):
"""
Validate the docstring.
Parameters
----------
doc : Docstring
A Docstring object with the given function name.
Returns
-------
tuple
errors : list of tuple
Errors occurred during validation.
warnings : list of tuple
Warnings occurred during validation.
examples_errs : str
Examples usage displayed along the error, otherwise empty string.
Notes
-----
The errors codes are defined as:
- First two characters: Section where the error happens:
* GL: Global (no section, like section ordering errors)
* SS: Short summary
* ES: Extended summary
* PR: Parameters
* RT: Returns
* YD: Yields
* RS: Raises
* WN: Warns
* SA: See Also
* NT: Notes
* RF: References
* EX: Examples
- Last two characters: Numeric error code inside the section
For example, EX02 is the second codified error in the Examples section
(which in this case is assigned to examples that do not pass the tests).
The error codes, their corresponding error messages, and the details on how
they are validated, are not documented more than in the source code of this
function.
"""
errs = []
wrns = []
if not doc.raw_doc:
errs.append(error("GL08"))
return errs, wrns, ""
if doc.start_blank_lines != 1 and not doc.single_line_docstring:
errs.append(error("GL01"))
if doc.end_blank_lines != 1 and not doc.single_line_docstring:
errs.append(error("GL02"))
if doc.double_blank_lines:
errs.append(error("GL03"))
mentioned_errs = doc.mentioned_private_classes
if mentioned_errs:
errs.append(
error("GL04", mentioned_private_classes=", ".join(mentioned_errs))
)
for line in doc.raw_doc.splitlines():
if re.match("^ *\t", line):
errs.append(error("GL05", line_with_tabs=line.lstrip()))
unexpected_sections = [
section
for section in doc.section_titles
if section not in ALLOWED_SECTIONS
]
for section in unexpected_sections:
errs.append(
error(
"GL06",
section=section,
allowed_sections=", ".join(ALLOWED_SECTIONS),
)
)
correct_order = [
section
for section in ALLOWED_SECTIONS
if section in doc.section_titles
]
if correct_order != doc.section_titles:
errs.append(error("GL07", correct_sections=", ".join(correct_order)))
if doc.deprecated and not doc.extended_summary.startswith(
".. deprecated:: "
):
errs.append(error("GL09"))
if not doc.summary:
errs.append(error("SS01"))
else:
if not doc.summary[0].isupper():
errs.append(error("SS02"))
if (
doc.summary[-1] != "."
and not doc.single_line_docstring
and ALLOW_SINGLE_LINE_DOCSTRINGS
):
errs.append(error("SS03"))
if doc.summary != doc.summary.lstrip():
errs.append(error("SS04"))
elif (
doc.is_function_or_method and doc.summary.split(" ")[0][-1] == "s"
):
errs.append(error("SS05"))
if doc.num_summary_lines > 1:
errs.append(error("SS06"))
if not doc.extended_summary:
wrns.append(("ES01", "No extended summary found"))
# PR01: Parameters not documented
# PR02: Unknown parameters
# PR03: Wrong parameters order
errs += doc.parameter_mismatches
for param in doc.doc_parameters:
if not param.startswith("*"): # Check can ignore var / kwargs
if not doc.parameter_type(param):
if ":" in param:
errs.append(error("PR10", param_name=param.split(":")[0]))
else:
errs.append(error("PR04", param_name=param))
else:
if doc.parameter_type(param)[-1] == ".":
errs.append(error("PR05", param_name=param))
common_type_errors = [
("integer", "int"),
("boolean", "bool"),
("string", "str"),
]
for wrong_type, right_type in common_type_errors:
if wrong_type in doc.parameter_type(param):
errs.append(
error(
"PR06",
param_name=param,
right_type=right_type,
wrong_type=wrong_type,
)
)
if not doc.parameter_desc(param):
errs.append(error("PR07", param_name=param))
else:
if not doc.parameter_desc(param)[0].isupper():
errs.append(error("PR08", param_name=param))
if doc.parameter_desc(param)[-1] != ".":
errs.append(error("PR09", param_name=param))
if doc.is_function_or_method:
if not doc.returns:
if doc.method_returns_something and not doc.no_return_whitelisted:
errs.append(error("RT01"))
else:
if len(doc.returns) == 1 and doc.returns[0].name:
errs.append(error("RT02"))
for name_or_type, type_, desc in doc.returns:
if not desc:
errs.append(error("RT03"))
else:
desc = " ".join(desc)
if not desc[0].isupper():
errs.append(error("RT04"))
if not desc.endswith("."):
errs.append(error("RT05"))
if not doc.yields and "yield" in doc.method_source:
errs.append(error("YD01"))
if not doc.see_also:
wrns.append(error("SA01"))
else:
for rel_name, rel_desc in doc.see_also.items():
if rel_desc:
if not rel_desc.endswith("."):
errs.append(error("SA02", reference_name=rel_name))
if not rel_desc[0].isupper():
errs.append(error("SA03", reference_name=rel_name))
else:
errs.append(error("SA04", reference_name=rel_name))
# TODO: Change to statsmodels
if rel_name.startswith("pandas."):
errs.append(
error(
"SA05",
reference_name=rel_name,
right_reference=rel_name[len("pandas.") :],
)
)
examples_errs = ""
if not doc.examples:
wrns.append(error("EX01"))
elif RUN_DOCTESTS:
examples_errs = doc.examples_errors
if examples_errs:
errs.append(error("EX02", doctest_log=examples_errs))
for err in doc.validate_pep8():
errs.append(
error(
"EX03",
error_code=err.error_code,
error_message=err.message,
times_happening=" ({} times)".format(err.count)
if err.count > 1
else "",
)
)
examples_source_code = "".join(doc.examples_source_code)
for wrong_import in ("numpy", "pandas"):
if "import {}".format(wrong_import) in examples_source_code:
errs.append(error("EX04", imported_library=wrong_import))
return errs, wrns, examples_errs
def validate_one(func_name):
"""
Validate the docstring for the given func_name
Parameters
----------
func_name : function
Function whose docstring will be evaluated (e.g. pandas.read_csv).
Returns
-------
dict
A dictionary containing all the information obtained from validating
the docstring.
"""
doc = Docstring(func_name)
errs, wrns, examples_errs = get_validation_data(doc)
return {
"type": doc.type,
"docstring": doc.clean_doc,
"deprecated": doc.deprecated,
"file": doc.source_file_name,
"file_line": doc.source_file_def_line,
"github_link": doc.github_url,
"errors": errs,
"warnings": wrns,
"examples_errors": examples_errs,
}
def validate_all(prefix, ignore_deprecated=False):
"""
Execute the validation of all docstrings, and return a dict with the
results.
Parameters
----------
prefix : str or None
If provided, only the docstrings that start with this pattern will be
validated. If None, all docstrings will be validated.
ignore_deprecated: bool, default False
If True, deprecated objects are ignored when validating docstrings.
Returns
-------
dict
A dictionary with an item for every function/method... containing
all the validation information.
"""
result = {}
seen = {}
# functions from the API docs
api_doc_fnames = os.path.join(BASE_PATH, "docs", "source", "*.rst")
api_items = []
for api_doc_fname in glob.glob(api_doc_fnames):
if "sandbox" in api_doc_fname:
continue
with open(api_doc_fname, encoding="utf8") as f:
api_items += list(get_api_items(f))
for func_name, func_obj, section, subsection in api_items:
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info["deprecated"]:
continue
result[func_name] = doc_info
shared_code_key = doc_info["file"], doc_info["file_line"]
shared_code = seen.get(shared_code_key, "")
result[func_name].update(
{
"in_api": True,
"section": section,
"subsection": subsection,
"shared_code_with": shared_code,
}
)
seen[shared_code_key] = func_name
# functions from introspecting Series and DataFrame
api_item_names = set(list(zip(*api_items))[0])
for class_name, class_ in API_CLASSES:
for member in inspect.getmembers(class_):
func_name = class_name + "." + member[0]
if (
not member[0].startswith("_")
and func_name not in api_item_names
):
if prefix and not func_name.startswith(prefix):
continue
doc_info = validate_one(func_name)
if ignore_deprecated and doc_info["deprecated"]:
continue
result[func_name] = doc_info
result[func_name]["in_api"] = False
return result
def main(func_name, prefix, errors, output_format, ignore_deprecated):
def header(title, width=80, char="#"):
full_line = char * width
side_len = (width - len(title) - 2) // 2
adj = "" if len(title) % 2 == 0 else " "
title_line = "{side} {title}{adj} {side}".format(
side=char * side_len, title=title, adj=adj
)
return "\n{full_line}\n{title_line}\n{full_line}\n\n".format(
full_line=full_line, title_line=title_line
)
exit_status = 0
if func_name is None:
result = validate_all(prefix, ignore_deprecated)
if output_format == "json":
output = json.dumps(result)
else:
if output_format == "default":
output_format = "{text}\n"
elif output_format == "azure":
output_format = (
"##vso[task.logissue type=error;"
"sourcepath={path};"
"linenumber={row};"
"code={code};"
"]{text}\n"
)
else:
raise ValueError(
'Unknown output_format "{}"'.format(output_format)
)
output = []
for name, res in result.items():
for err_code, err_desc in res["errors"]:
# The script would be faster if instead of filtering the
# errors after validating them, it didn't validate them
# initially. But that would complicate the code too much
if errors and err_code not in errors:
continue
exit_status += 1
output.append(
output_format.format(
name=name,
path=res["file"],
row=res["file_line"],
code=err_code,
text="{}: {}".format(name, err_desc),
)
)
output = "".join(sorted(output))
sys.stdout.write(output)
else:
result = validate_one(func_name)
sys.stderr.write(header("Docstring ({})".format(func_name)))
sys.stderr.write("{}\n".format(result["docstring"]))
sys.stderr.write(header("Validation"))
if result["errors"]:
sys.stderr.write(
"{} Errors found:\n".format(len(result["errors"]))
)
for err_code, err_desc in result["errors"]:
# Failing examples are printed at the end
if err_code == "EX02":
sys.stderr.write("\tExamples do not pass tests\n")
continue
sys.stderr.write("\t{}\n".format(err_desc))
if result["warnings"]:
sys.stderr.write(
"{} Warnings found:\n".format(len(result["warnings"]))
)
for wrn_code, wrn_desc in result["warnings"]:
sys.stderr.write("\t{}\n".format(wrn_desc))
if not result["errors"]:
sys.stderr.write(
'Docstring for "{}" correct. :)\n'.format(func_name)
)
if result["examples_errors"]:
sys.stderr.write(header("Doctests"))
sys.stderr.write(result["examples_errors"])
return exit_status
if __name__ == "__main__":
format_opts = "default", "json", "azure"
func_help = (
"function or method to validate (e.g. pandas.DataFrame.head) "
"if not provided, all docstrings are validated and returned "
"as JSON"
)
argparser = argparse.ArgumentParser(
description="validate pandas docstrings"
)
argparser.add_argument("function", nargs="?", default=None, help=func_help)
argparser.add_argument(
"--format",
default="default",
choices=format_opts,
help="format of the output when validating "
"multiple docstrings (ignored when validating one)."
"It can be {}".format(str(format_opts)[1:-1]),
)
argparser.add_argument(
"--prefix",
default=None,
help="pattern for the "
"docstring names, in order to decide which ones "
'will be validated. A prefix "pandas.Series.str."'
"will make the script validate all the docstrings"
"of methods starting by this pattern. It is "
"ignored if parameter function is provided",
)
argparser.add_argument(
"--errors",
default=None,
help="comma separated "
"list of error codes to validate. By default it "
"validates all errors (ignored when validating "
"a single docstring)",
)
argparser.add_argument(
"--ignore_deprecated",
default=False,
action="store_true",
help="if this flag is set, "
"deprecated objects are ignored when validating "
"all docstrings",
)
args = argparser.parse_args()
sys.exit(
main(
args.function,
args.prefix,
args.errors.split(",") if args.errors else None,
args.format,
args.ignore_deprecated,
)
)
|
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the library page."""
import json
import logging
import string
from core.controllers import base
from core.domain import collection_services
from core.domain import exp_services
from core.domain import summary_services
from core.domain import user_services
from core.platform import models
import feconf
import utils
(base_models, exp_models,) = models.Registry.import_models([
models.NAMES.base_model, models.NAMES.exploration])
current_user_services = models.Registry.import_current_user_services()
def get_matching_activity_dicts(query_string, search_cursor):
"""Given a query string and a search cursor, returns a list of activity
dicts that satisfy the search query.
"""
collection_ids, search_cursor = (
collection_services.get_collection_ids_matching_query(
query_string, cursor=search_cursor))
exp_ids, new_search_cursor = (
exp_services.get_exploration_ids_matching_query(
query_string, cursor=search_cursor))
activity_list = []
activity_list = (
summary_services.get_displayable_collection_summary_dicts_matching_ids(
collection_ids))
activity_list += (
summary_services.get_displayable_exp_summary_dicts_matching_ids(
exp_ids))
if len(activity_list) == feconf.DEFAULT_QUERY_LIMIT:
logging.error(
'%s activities were fetched to load the library page. '
'You may be running up against the default query limits.'
% feconf.DEFAULT_QUERY_LIMIT)
return activity_list, new_search_cursor
class LibraryPage(base.BaseHandler):
"""The main library page. Used for both the default list of categories and
for search results.
"""
PAGE_NAME_FOR_CSRF = 'library'
def get(self):
"""Handles GET requests."""
search_mode = 'search' in self.request.url
self.values.update({
'meta_description': (
feconf.SEARCH_PAGE_DESCRIPTION if search_mode
else feconf.LIBRARY_PAGE_DESCRIPTION),
'nav_mode': feconf.NAV_MODE_LIBRARY,
'has_fully_registered': bool(
self.user_id and
user_services.has_fully_registered(self.user_id)),
'LANGUAGE_CODES_AND_NAMES': (
utils.get_all_language_codes_and_names()),
'search_mode': search_mode,
'SEARCH_DROPDOWN_CATEGORIES': feconf.SEARCH_DROPDOWN_CATEGORIES,
})
self.render_template('library/library.html')
class LibraryIndexHandler(base.BaseHandler):
"""Provides data for the default library index page."""
def get(self):
"""Handles GET requests."""
# TODO(sll): Support index pages for other language codes.
summary_dicts_by_category = summary_services.get_library_groups([
feconf.DEFAULT_LANGUAGE_CODE])
recently_published_summary_dicts = (
summary_services.get_recently_published_exploration_summary_dicts())
top_rated_activity_summary_dicts = (
summary_services.get_top_rated_exploration_summary_dicts(
[feconf.DEFAULT_LANGUAGE_CODE]))
featured_activity_summary_dicts = (
summary_services.get_featured_exploration_summary_dicts(
[feconf.DEFAULT_LANGUAGE_CODE]))
preferred_language_codes = [feconf.DEFAULT_LANGUAGE_CODE]
if self.user_id:
user_settings = user_services.get_user_settings(self.user_id)
preferred_language_codes = user_settings.preferred_language_codes
if recently_published_summary_dicts:
summary_dicts_by_category.insert(0, {
'activity_summary_dicts': recently_published_summary_dicts,
'categories': [],
'header': feconf.LIBRARY_CATEGORY_RECENTLY_PUBLISHED,
})
if top_rated_activity_summary_dicts:
summary_dicts_by_category.insert(0, {
'activity_summary_dicts': top_rated_activity_summary_dicts,
'categories': [],
'header': feconf.LIBRARY_CATEGORY_TOP_RATED_EXPLORATIONS,
})
if featured_activity_summary_dicts:
summary_dicts_by_category.insert(0, {
'activity_summary_dicts': featured_activity_summary_dicts,
'categories': [],
'header': feconf.LIBRARY_CATEGORY_FEATURED_EXPLORATIONS,
})
self.values.update({
'activity_summary_dicts_by_category': (
summary_dicts_by_category),
'preferred_language_codes': preferred_language_codes,
})
self.render_json(self.values)
class SearchHandler(base.BaseHandler):
"""Provides data for activity search results."""
def get(self):
"""Handles GET requests."""
query_string = utils.unescape_encoded_uri_component(
self.request.get('q'))
# Remove all punctuation from the query string, and replace it with
# spaces. See http://stackoverflow.com/a/266162 and
# http://stackoverflow.com/a/11693937
remove_punctuation_map = dict(
(ord(char), None) for char in string.punctuation)
query_string = query_string.translate(remove_punctuation_map)
if self.request.get('category'):
query_string += ' category=%s' % self.request.get('category')
if self.request.get('language_code'):
query_string += ' language_code=%s' % self.request.get(
'language_code')
search_cursor = self.request.get('cursor', None)
activity_list, new_search_cursor = get_matching_activity_dicts(
query_string, search_cursor)
self.values.update({
'activity_list': activity_list,
'search_cursor': new_search_cursor,
})
self.render_json(self.values)
class LibraryRedirectPage(base.BaseHandler):
"""An old 'gallery' page that should redirect to the library index page."""
def get(self):
"""Handles GET requests."""
self.redirect('/library')
class ExplorationSummariesHandler(base.BaseHandler):
"""Returns summaries corresponding to ids of public explorations. This
controller supports returning private explorations for the given user.
"""
def get(self):
"""Handles GET requests."""
try:
exp_ids = json.loads(self.request.get('stringified_exp_ids'))
except Exception:
raise self.PageNotFoundException
include_private_exps_str = self.request.get(
'include_private_explorations')
include_private_exps = (
include_private_exps_str.lower() == 'true'
if include_private_exps_str else False)
editor_user_id = self.user_id if include_private_exps else None
if not editor_user_id:
include_private_exps = False
if (not isinstance(exp_ids, list) or not all([
isinstance(exp_id, basestring) for exp_id in exp_ids])):
raise self.PageNotFoundException
if include_private_exps:
summaries = (
summary_services.get_displayable_exp_summary_dicts_matching_ids(
exp_ids,
editor_user_id=editor_user_id))
else:
summaries = (
summary_services.get_displayable_exp_summary_dicts_matching_ids(
exp_ids))
self.values.update({
'summaries': summaries
})
self.render_json(self.values)
|
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend zapcards received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a zapcardd or Zapcard-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting ZAP values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the zapcard data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Zapcard/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Zapcard")
return os.path.expanduser("~/.zapcard")
def read_zapcard_config(dbdir):
"""Read the zapcard.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "zapcard.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a zapcard JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19332 if testnet else 9332
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the zapcardd we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(zapcardd):
info = zapcardd.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
zapcardd.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = zapcardd.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(zapcardd):
address_summary = dict()
address_to_account = dict()
for info in zapcardd.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = zapcardd.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = zapcardd.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-zapcard-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(zapcardd, fromaddresses, toaddress, amount, fee):
all_coins = list_available(zapcardd)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f ZAP available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to zapcardd.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = zapcardd.createrawtransaction(inputs, outputs)
signed_rawtx = zapcardd.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(zapcardd, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = zapcardd.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(zapcardd, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = zapcardd.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(zapcardd, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get zapcards from")
parser.add_option("--to", dest="to", default=None,
help="address to get send zapcards to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of zapcard.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_zapcard_config(options.datadir)
if options.testnet: config['testnet'] = True
zapcardd = connect_JSON(config)
if options.amount is None:
address_summary = list_available(zapcardd)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(zapcardd) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(zapcardd, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(zapcardd, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = zapcardd.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AppEngine request handlers."""
import logging
import os
import jinja2
from oauth2client.anyjson import simplejson as json
from oauth2client.appengine import OAuth2Decorator
from oauth2client.client import AccessTokenRefreshError
import webapp2
from compute_engine_controller import ComputeEngineController
from load_info import LoadInfo
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
decorator = OAuth2Decorator(
client_id = '{{{{ client_id }}}}',
client_secret = '{{{{ client_secret }}}}',
scope=ComputeEngineController.SCOPE,
user_agent='load-balance-compute-engine/1.0')
class IpAddressRequestLog(db.Model):
"""Datastore schema for game server IP address retrieval log."""
client_ip = db.StringProperty()
server_ip = db.StringProperty()
timestamp = db.DateTimeProperty(auto_now=True)
class FrontendHandler(webapp2.RequestHandler):
"""URL handler class for IP address request."""
def get(self):
"""Returns an available server's IP address in JSON format."""
ip = LoadInfo.GetIdleInstanceIpAddress()
if not ip:
ip = ''
self.response.out.write(json.dumps({'ipaddress': ip}))
IpAddressRequestLog(client_ip=self.request.remote_addr,
server_ip=ip).put()
class AdminUiHandler(webapp2.RequestHandler):
"""URL handler class for admin UI page."""
@decorator.oauth_required
def get(self):
"""Returns admin UI of game server cluster."""
try:
# This handler returns stats.html as is. We still need handler here
# to take care of OAuth2.
html_path = os.path.join(os.path.dirname(__file__),
'static_files', 'stats.html')
self.response.out.write(open(html_path).read())
except AccessTokenRefreshError:
self.redirect(decorator.authorize_url())
class StatsJsonHandler(webapp2.RequestHandler):
"""URL handler class for stats list of the cluster."""
@decorator.oauth_required
def get(self):
"""Returns stats of managed Compute Engine instances for Admin UI."""
load_entries = []
instance_list = ComputeEngineController(
decorator.credentials).ListInstances()
all_load_info = LoadInfo.GetAll()
# First, list managed instances whose Compute Engine status is found.
for instance in instance_list:
instance_name = instance['name']
if instance_name in all_load_info:
info = all_load_info[instance_name]
load_entries.append({
'host': instance_name,
'ipaddress': info.get(LoadInfo.IP_ADDRESS, ''),
'status': instance['status'],
'load': info.get(LoadInfo.LOAD, 0),
'force_set': info.get(LoadInfo.FORCE, False),
})
del all_load_info[instance_name]
# Then, list managed instances without Compute Engine status.
for name, info in all_load_info.items():
load_entries.append({
'host': name,
'ipaddress': info.get(LoadInfo.IP_ADDRESS, ''),
'status': 'NOT FOUND',
'load': info.get(LoadInfo.LOAD, 0),
'force_set': info.get(LoadInfo.FORCE, False),
})
self.response.out.write(json.dumps(load_entries))
class StatsUserJsonHandler(webapp2.RequestHandler):
"""URL handler class for game server list of the cluster."""
def get(self):
"""Returns stats of game instances for non logged-in users."""
load_entries = []
all_load_info = LoadInfo.GetAll()
for name, info in all_load_info.items():
load_entries.append({
'host': name,
'ipaddress': info.get(LoadInfo.IP_ADDRESS, ''),
'load': info.get(LoadInfo.LOAD, 0),
})
self.response.out.write(json.dumps(load_entries))
class StartUpHandler(webapp2.RequestHandler):
"""URL handler class for cluster start up."""
@decorator.oauth_required
def get(self):
"""Starts up initial Compute Engine cluster."""
ComputeEngineController(decorator.credentials).StartUpCluster()
class TearDownHandler(webapp2.RequestHandler):
"""URL handler class for cluster shut down."""
@decorator.oauth_required
def get(self):
"""Deletes Compute Engine cluster."""
ComputeEngineController(decorator.credentials).TearDownCluster()
class RegisterHandler(webapp2.RequestHandler):
"""URL handler class for IP address registration of the instance."""
def post(self):
"""Adds the new instance to managed cluster by registering IP address."""
# TODO(user): Secure this URL by using Cloud Endpoints.
name = self.request.get('name')
instance = ComputeEngineController().GetInstanceInfo(name)
if not instance:
return
logging.info('Instance created: %s', str(instance))
external_ip = instance['networkInterfaces'][0][
'accessConfigs'][0]['natIP']
LoadInfo.RegisterInstanceIpAddress(name, external_ip)
class LoadMonitorHandler(webapp2.RequestHandler):
"""URL handler class to receive load report from instances."""
def post(self):
"""Receives request from instance and updates load information."""
# TODO(user): Secure this URL by using Cloud Endpoints.
name = self.request.get('name')
load = self.request.get('load')
if not load:
load = 0
force = self.request.get('force')
force_set = None
if force:
if int(force):
force_set = True
else:
force_set = False
LoadInfo.UpdateLoadInfo(name, int(load), force_set)
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('ok')
class LoadCheckerHandler(webapp2.RequestHandler):
"""URL handler class to perform cron task."""
UPPER_THRESHOLD = 80
LOWER_THRESHOLD = 40
MIN_CLUSTER_SIZE = 5
def get(self):
"""Checks average load level and adjusts cluster size if necessary.
If average load level of instances is more than upper threshold, increase
the number of instances by 20% of original size. If average load level is
less than lower threshold and the current cluster size is larger than
minimum size, decrease the number of instances by 10%. Since shortage
of server is more harmful than excessive instances, we increase more
rapidly than we decrease.
However, shutting down the instance is more complicated than adding
instances depending on game servers. If client is not capable to auto-
reconnect, the game server must be drained before shutting down the
instance. In this exsample, decrement of the instance is not implemented.
"""
cluster_size, average_load = LoadInfo.GetAverageLoad()
if cluster_size:
if average_load > self.UPPER_THRESHOLD:
ComputeEngineController().IncreaseEngine(cluster_size / 5 + 1)
elif (average_load < self.LOWER_THRESHOLD and
cluster_size > self.MIN_CLUSTER_SIZE):
ComputeEngineController().DecreaseEngine(cluster_size / 10 + 1)
class GameSetupHandler(webapp2.RequestHandler):
"""URL handler class to send script to set up game server."""
def get(self):
"""Returns script to set up game server."""
template = jinja_environment.get_template(
os.path.join('worker', 'setup_and_start_game.sh'))
self.response.out.write(template.render({
'cloud_storage': ComputeEngineController.CLOUD_STORAGE_DIR,
'ip_address': self.request.remote_addr
}))
app = webapp2.WSGIApplication(
[
('/getip.json', FrontendHandler),
('/stats', AdminUiHandler),
('/stats.json', StatsJsonHandler),
('/stats-user.json', StatsUserJsonHandler),
('/startup', StartUpHandler),
('/teardown', TearDownHandler),
('/register', RegisterHandler),
('/load', LoadMonitorHandler),
('/check-load', LoadCheckerHandler),
('/setup-and-start-game', GameSetupHandler),
(decorator.callback_path, decorator.callback_handler()),
],
debug=True)
|
|
import pytest
import numpy as np
from numpy.testing import assert_allclose
from quimb import (
ham_heis,
expec,
plus,
is_eigenvector,
eigh,
heisenberg_energy,
)
from quimb.tensor import (
MPS_rand_state,
MPS_product_state,
MPS_computational_state,
MPO_ham_ising,
MPO_ham_XY,
MPO_ham_heis,
MPO_ham_mbl,
MovingEnvironment,
DMRG1,
DMRG2,
DMRGX,
SpinHam,
)
np.random.seed(42)
class TestMovingEnvironment:
def test_bsz1_start_left(self):
tn = MPS_rand_state(6, bond_dim=7)
env = MovingEnvironment(tn, begin='left', bsz=1)
assert env.pos == 0
assert len(env().tensors) == 3
env.move_right()
assert env.pos == 1
assert len(env().tensors) == 3
env.move_right()
assert env.pos == 2
assert len(env().tensors) == 3
env.move_to(5)
assert env.pos == 5
assert len(env().tensors) == 3
def test_bsz1_start_right(self):
tn = MPS_rand_state(6, bond_dim=7)
env = MovingEnvironment(tn, begin='right', bsz=1)
assert env.pos == 5
assert len(env().tensors) == 3
env.move_left()
assert env.pos == 4
assert len(env().tensors) == 3
env.move_left()
assert env.pos == 3
assert len(env().tensors) == 3
env.move_to(0)
assert env.pos == 0
assert len(env().tensors) == 3
def test_bsz2_start_left(self):
tn = MPS_rand_state(6, bond_dim=7)
env = MovingEnvironment(tn, begin='left', bsz=2)
assert len(env().tensors) == 4
env.move_right()
assert len(env().tensors) == 4
env.move_right()
assert len(env().tensors) == 4
with pytest.raises(ValueError):
env.move_to(5)
env.move_to(4)
assert env.pos == 4
assert len(env().tensors) == 4
def test_bsz2_start_right(self):
tn = MPS_rand_state(6, bond_dim=7)
env = MovingEnvironment(tn, begin='right', bsz=2)
assert env.pos == 4
assert len(env().tensors) == 4
env.move_left()
assert env.pos == 3
assert len(env().tensors) == 4
env.move_left()
assert env.pos == 2
assert len(env().tensors) == 4
with pytest.raises(ValueError):
env.move_to(-1)
env.move_to(0)
assert env.pos == 0
assert len(env().tensors) == 4
@pytest.mark.parametrize("n", [20, 19])
@pytest.mark.parametrize("bsz", [1, 2])
@pytest.mark.parametrize("ssz", [1 / 2, 1.0])
def test_cyclic_moving_env_init_left(self, n, bsz, ssz):
nenv = 2
p = MPS_rand_state(n, 4, cyclic=True)
norm = p.H & p
mes = MovingEnvironment(norm, begin='left', bsz=bsz,
cyclic=True, ssz=ssz)
assert len(mes.envs) == n // 2 + n % 2
assert mes.pos == 0
assert len(mes.envs[0].tensors) == 2 * bsz + nenv
assert len(mes.envs[n // 2 - 1].tensors) == 2 * bsz + 1
assert n // 2 + n % 2 not in mes.envs
assert n - 1 not in mes.envs
for i in range(1, 2 * n):
mes.move_right()
assert mes.pos == i % n
cur_env = mes()
assert len(cur_env.tensors) == 2 * bsz + nenv
assert (cur_env ^ all) == pytest.approx(1.0)
@pytest.mark.parametrize("n", [20, 19])
@pytest.mark.parametrize("bsz", [1, 2])
@pytest.mark.parametrize("ssz", [1 / 2, 1.0])
def test_cyclic_moving_env_init_right(self, n, bsz, ssz):
p = MPS_rand_state(n, 4, cyclic=True)
norm = p.H | p
mes = MovingEnvironment(norm, begin='right', bsz=bsz,
cyclic=True, ssz=ssz)
assert len(mes.envs) == n // 2 + n % 2
assert mes.pos == n - 1
assert len(mes.envs[n - 1].tensors) == 2 * bsz + 2
assert len(mes.envs[n - n // 2].tensors) == 2 * bsz + 1
assert 0 not in mes.envs
assert n // 2 - 1 not in mes.envs
for i in reversed(range(-n, n - 1)):
mes.move_left()
assert mes.pos == i % n
cur_env = mes()
assert len(cur_env.tensors) == 2 * bsz + 2
assert (cur_env ^ all) == pytest.approx(1.0)
class TestDMRG1:
def test_single_explicit_sweep(self):
h = MPO_ham_heis(5)
dmrg = DMRG1(h, bond_dims=3)
assert dmrg._k[0].dtype == float
energy_tn = (dmrg._b | dmrg.ham | dmrg._k)
e0 = energy_tn ^ ...
assert abs(e0.imag) < 1e-13
de1 = dmrg.sweep_right()
e1 = energy_tn ^ ...
assert_allclose(de1, e1)
assert abs(e1.imag) < 1e-13
de2 = dmrg.sweep_right()
e2 = energy_tn ^ ...
assert_allclose(de2, e2)
assert abs(e2.imag) < 1e-13
# state is already left canonized after right sweep
de3 = dmrg.sweep_left(canonize=False)
e3 = energy_tn ^ ...
assert_allclose(de3, e3)
assert abs(e2.imag) < 1e-13
de4 = dmrg.sweep_left()
e4 = energy_tn ^ ...
assert_allclose(de4, e4)
assert abs(e2.imag) < 1e-13
# test still normalized
assert dmrg._k[0].dtype == float
dmrg._k.align_(dmrg._b)
assert_allclose(abs(dmrg._b @ dmrg._k), 1)
assert e1.real < e0.real
assert e2.real < e1.real
assert e3.real < e2.real
assert e4.real < e3.real
@pytest.mark.parametrize("dense", [False, True])
@pytest.mark.parametrize("MPO_ham", [MPO_ham_XY, MPO_ham_heis])
@pytest.mark.parametrize("cyclic", [False, True])
def test_ground_state_matches(self, dense, MPO_ham, cyclic):
n = 10
tol = 3e-2 if cyclic else 1e-4
h = MPO_ham(n, cyclic=cyclic)
dmrg = DMRG1(h, bond_dims=[4, 8, 12])
dmrg.opts['local_eig_ham_dense'] = dense
dmrg.opts['periodic_segment_size'] = 1.0
dmrg.opts['periodic_nullspace_fudge_factor'] = 1e-6
assert dmrg.solve(tol=tol / 10, verbosity=1)
assert dmrg.state.cyclic == cyclic
eff_e, mps_gs = dmrg.energy, dmrg.state
mps_gs_dense = mps_gs.to_dense()
assert_allclose(mps_gs_dense.H @ mps_gs_dense, 1.0, rtol=tol)
h_dense = h.to_dense()
# check against dense form
actual_e, gs = eigh(h_dense, k=1)
assert_allclose(actual_e, eff_e, rtol=tol)
assert_allclose(abs(expec(mps_gs_dense, gs)), 1.0, rtol=tol)
# check against actual MPO_ham
if MPO_ham is MPO_ham_XY:
ham_dense = ham_heis(n, cyclic=cyclic,
j=(1.0, 1.0, 0.0), sparse=True)
elif MPO_ham is MPO_ham_heis:
ham_dense = ham_heis(n, cyclic=cyclic, sparse=True)
actual_e, gs = eigh(ham_dense, k=1)
assert_allclose(actual_e, eff_e, rtol=tol)
assert_allclose(abs(expec(mps_gs_dense, gs)), 1.0, rtol=tol)
def test_ising_and_MPS_product_state(self):
h = MPO_ham_ising(6, bx=2.0, j=0.1)
dmrg = DMRG1(h, bond_dims=8)
assert dmrg.solve(verbosity=1)
eff_e, mps_gs = dmrg.energy, dmrg.state
mps_gs_dense = mps_gs.to_dense()
assert_allclose(mps_gs_dense.H @ mps_gs_dense, 1.0)
# check against dense
h_dense = h.to_dense()
actual_e, gs = eigh(h_dense, k=1)
assert_allclose(actual_e, eff_e)
assert_allclose(abs(expec(mps_gs_dense, gs)), 1.0)
exp_gs = MPS_product_state([plus()] * 6)
assert_allclose(abs(exp_gs.H @ mps_gs), 1.0, rtol=1e-3)
class TestDMRG2:
@pytest.mark.parametrize("dense", [False, True])
@pytest.mark.parametrize("MPO_ham", [MPO_ham_XY, MPO_ham_heis])
@pytest.mark.parametrize("cyclic", [False, True])
def test_matches_exact(self, dense, MPO_ham, cyclic):
n = 6
h = MPO_ham(n, cyclic=cyclic)
tol = 3e-2 if cyclic else 1e-4
dmrg = DMRG2(h, bond_dims=[4, 8, 12])
assert dmrg._k[0].dtype == float
dmrg.opts['local_eig_ham_dense'] = dense
dmrg.opts['periodic_segment_size'] = 1.0
dmrg.opts['periodic_nullspace_fudge_factor'] = 1e-6
assert dmrg.solve(tol=tol / 10, verbosity=1)
# XXX: need to dispatch SLEPc eigh on real input
# assert dmrg._k[0].dtype == float
eff_e, mps_gs = dmrg.energy, dmrg.state
mps_gs_dense = mps_gs.to_dense()
assert_allclose(expec(mps_gs_dense, mps_gs_dense), 1.0, rtol=tol)
h_dense = h.to_dense()
# check against dense form
actual_e, gs = eigh(h_dense, k=1)
assert_allclose(actual_e, eff_e, rtol=tol)
assert_allclose(abs(expec(mps_gs_dense, gs)), 1.0, rtol=tol)
# check against actual MPO_ham
if MPO_ham is MPO_ham_XY:
ham_dense = ham_heis(n, cyclic=cyclic, j=(1.0, 1.0, 0.0))
elif MPO_ham is MPO_ham_heis:
ham_dense = ham_heis(n, cyclic=cyclic)
actual_e, gs = eigh(ham_dense, k=1)
assert_allclose(actual_e, eff_e, rtol=tol)
assert_allclose(abs(expec(mps_gs_dense, gs)), 1.0, rtol=tol)
def test_cyclic_solve_big_with_segmenting(self):
n = 150
ham = MPO_ham_heis(n, cyclic=True)
dmrg = DMRG2(ham, bond_dims=range(10, 30, 2))
dmrg.opts['periodic_segment_size'] = 1 / 3
assert dmrg.solve(tol=1, verbosity=2)
assert dmrg.energy == pytest.approx(heisenberg_energy(n), 1e-3)
@pytest.mark.parametrize("dtype", [np.float32, np.float64,
np.complex64, np.complex128])
def test_dtypes(self, dtype):
H = MPO_ham_heis(8).astype(dtype)
dmrg = DMRG2(H)
dmrg.opts['local_eig_backend'] = 'scipy'
dmrg.solve(max_sweeps=3)
res_dtype, = {t.dtype for t in dmrg.state}
assert res_dtype == dtype
def test_total_size_2(self):
N = 2
builder = SpinHam(1 / 2)
for i in range(N - 1):
builder[i, i + 1] += 1.0, 'Z', 'Z'
H = builder.build_mpo(N)
dmrg = DMRG2(H)
dmrg.solve(verbosity=1)
assert dmrg.energy == pytest.approx(-1 / 4)
def test_variable_bond_ham(self):
import quimb as qu
HB = SpinHam(1 / 2)
HB[0, 1] += 0.6, 'Z', 'Z'
HB[1, 2] += 0.7, 'Z', 'Z'
HB[1, 2] += 0.8, 'X', 'X'
HB[2, 3] += 0.9, 'Y', 'Y'
H_mpo = HB.build_mpo(4)
H_sps = HB.build_sparse(4)
assert H_mpo.bond_sizes() == [3, 4, 3]
Sx, Sy, Sz = map(qu.spin_operator, 'xyz')
H_explicit = (
qu.ikron(0.6 * Sz & Sz, [2, 2, 2, 2], [0, 1]) +
qu.ikron(0.7 * Sz & Sz, [2, 2, 2, 2], [1, 2]) +
qu.ikron(0.8 * Sx & Sx, [2, 2, 2, 2], [1, 2]) +
qu.ikron(0.9 * Sy & Sy, [2, 2, 2, 2], [2, 3])
)
assert_allclose(H_explicit, H_mpo.to_dense())
assert_allclose(H_explicit, H_sps.A)
class TestDMRGX:
def test_explicit_sweeps(self):
n = 8
chi = 16
ham = MPO_ham_mbl(n, dh=4, seed=42)
p0 = MPS_rand_state(n, 2).expand_bond_dimension(chi)
b0 = p0.H
p0.align_(ham, b0)
en0 = (p0 & ham & b0) ^ ...
dmrgx = DMRGX(ham, p0, chi)
dmrgx.sweep_right()
en1 = dmrgx.sweep_left(canonize=False)
assert en0 != en1
dmrgx.sweep_right(canonize=False)
en = dmrgx.sweep_right(canonize=True)
# check normalized
assert_allclose(dmrgx._k.H @ dmrgx._k, 1.0)
k = dmrgx._k.to_dense()
h = ham.to_dense()
el, ev = eigh(h)
# check variance very low
assert np.abs((k.H @ h @ h @ k) - (k.H @ h @ k)**2) < 1e-12
# check exactly one eigenvalue matched well
assert np.sum(np.abs(el - en) < 1e-12) == 1
# check exactly one eigenvector is matched with high fidelity
ovlps = (ev.H @ k).A**2
big_ovlps = ovlps[ovlps > 1e-12]
assert_allclose(big_ovlps, [1])
# check fully
assert is_eigenvector(k, h, tol=1e-10)
def test_solve_bigger(self):
n = 14
chi = 16
ham = MPO_ham_mbl(n, dh=8, seed=42)
p0 = MPS_computational_state('00110111000101')
dmrgx = DMRGX(ham, p0, chi)
assert dmrgx.solve(tol=1e-5, sweep_sequence='R')
assert dmrgx.state[0].dtype == float
|
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.conf
from nova.tests.functional.api_sample_tests import api_sample_base
from nova.tests.unit.image import fake
CONF = nova.conf.CONF
class ServersSampleBase(api_sample_base.ApiSampleTestBaseV21):
extra_extensions_to_load = ["os-access-ips"]
microversion = None
def _post_server(self, use_common_server_api_samples=True):
# param use_common_server_api_samples: Boolean to set whether tests use
# common sample files for server post request and response.
# Default is True which means _get_sample_path method will fetch the
# common server sample files from 'servers' directory.
# Set False if tests need to use extension specific sample files
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'compute_endpoint': self._get_compute_endpoint(),
'versioned_compute_endpoint': self._get_vers_compute_endpoint(),
'glance_host': self._get_glance_host(),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '80fe::'
}
# TODO(gmann): Remove this hack once all tests using this common
# _post_server method are enabled with all extension.
# This is added to avoid all tests updates together.
post_req_template = 'server-post-req'
post_resp_template = 'server-post-resp'
if self.all_extensions and use_common_server_api_samples:
post_req_template = 'server-create-req'
post_resp_template = 'server-create-resp'
orig_value = self.__class__._use_common_server_api_samples
orig_sample_dir = self.__class__.sample_dir
try:
self.__class__._use_common_server_api_samples = (
use_common_server_api_samples)
response = self._do_post('servers', post_req_template, subs)
status = self._verify_response(post_resp_template, subs,
response, 202)
return status
finally:
self.__class__._use_common_server_api_samples = orig_value
self.__class__.sample_dir = orig_sample_dir
def setUp(self):
super(ServersSampleBase, self).setUp()
self.api.microversion = self.microversion
class ServersSampleJsonTest(ServersSampleBase):
sample_dir = 'servers'
microversion = None
def _get_flags(self):
f = super(ServersSampleBase, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.keypairs.Keypairs')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_ips.Extended_ips')
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.extended_ips_mac.'
'Extended_ips_mac')
return f
def test_servers_post(self):
return self._post_server()
def test_servers_get(self):
uuid = self.test_servers_post()
response = self._do_get('servers/%s' % uuid)
subs = {}
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
subs['access_ip_v4'] = '1.2.3.4'
subs['access_ip_v6'] = '80fe::'
self._verify_response('server-get-resp', subs, response, 200)
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers')
subs = {'id': uuid}
self._verify_response('servers-list-resp', subs, response, 200)
def test_servers_details(self):
uuid = self.test_servers_post()
response = self._do_get('servers/detail')
subs = {}
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
subs['access_ip_v4'] = '1.2.3.4'
subs['access_ip_v6'] = '80fe::'
self._verify_response('servers-details-resp', subs, response, 200)
class ServersSampleJson29Test(ServersSampleJsonTest):
microversion = '2.9'
# NOTE(gmann): microversion tests do not need to run for v2 API
# so defining scenarios only for v2.9 which will run the original tests
# by appending '(v2_9)' in test_id.
scenarios = [('v2_9', {'api_major_version': 'v2.1'})]
class ServersSampleJson219Test(ServersSampleJsonTest):
microversion = '2.19'
sample_dir = 'servers'
scenarios = [('v2_19', {'api_major_version': 'v2.1'})]
def test_servers_post(self):
return self._post_server(False)
def test_servers_put(self):
uuid = self.test_servers_post()
response = self._do_put('servers/%s' % uuid, 'server-put-req', {})
subs = {
'image_id': fake.get_valid_image_id(),
'hostid': '[a-f0-9]+',
'glance_host': self._get_glance_host(),
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '80fe::'
}
self._verify_response('server-put-resp', subs, response, 200)
class ServersUpdateSampleJsonTest(ServersSampleBase):
sample_dir = 'servers'
# TODO(gmann): This will be removed once all API tests runs for
# all extension enable.
all_extensions = True
def test_update_server(self):
uuid = self._post_server()
subs = {}
subs['hostid'] = '[a-f0-9]+'
subs['access_ip_v4'] = '1.2.3.4'
subs['access_ip_v6'] = '80fe::'
response = self._do_put('servers/%s' % uuid,
'server-update-req', subs)
self._verify_response('server-update-resp', subs, response, 200)
class ServerSortKeysJsonTests(ServersSampleBase):
sample_dir = 'servers-sort'
def _get_flags(self):
f = super(ServerSortKeysJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.server_sort_keys.'
'Server_sort_keys')
return f
def test_servers_list(self):
self._post_server()
response = self._do_get('servers?sort_key=display_name&sort_dir=asc')
self._verify_response('server-sort-keys-list-resp', {}, response,
200)
class ServersSampleAllExtensionJsonTest(ServersSampleJsonTest):
all_extensions = True
sample_dir = None
class ServersActionsJsonTest(ServersSampleBase):
sample_dir = 'servers'
def _test_server_action(self, uuid, action, req_tpl,
subs=None, resp_tpl=None, code=202):
subs = subs or {}
subs.update({'action': action,
'glance_host': self._get_glance_host()})
response = self._do_post('servers/%s/action' % uuid,
req_tpl,
subs)
if resp_tpl:
self._verify_response(resp_tpl, subs, response, code)
else:
self.assertEqual(code, response.status_code)
self.assertEqual("", response.content)
def test_server_reboot_hard(self):
uuid = self._post_server()
self._test_server_action(uuid, "reboot",
'server-action-reboot',
{"type": "HARD"})
def test_server_reboot_soft(self):
uuid = self._post_server()
self._test_server_action(uuid, "reboot",
'server-action-reboot',
{"type": "SOFT"})
def test_server_rebuild(self):
uuid = self._post_server()
image = fake.get_valid_image_id()
params = {
'uuid': image,
'name': 'foobar',
'pass': 'seekr3t',
'hostid': '[a-f0-9]+',
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '80fe::',
}
resp = self._do_post('servers/%s/action' % uuid,
'server-action-rebuild', params)
subs = params.copy()
del subs['uuid']
self._verify_response('server-action-rebuild-resp', subs, resp, 202)
def test_server_resize(self):
self.flags(allow_resize_to_same_host=True)
uuid = self._post_server()
self._test_server_action(uuid, "resize",
'server-action-resize',
{"id": '2',
"host": self._get_host()})
return uuid
def test_server_revert_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "revertResize",
'server-action-revert-resize')
def test_server_confirm_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "confirmResize",
'server-action-confirm-resize',
code=204)
def test_server_create_image(self):
uuid = self._post_server()
self._test_server_action(uuid, 'createImage',
'server-action-create-image',
{'name': 'foo-image'})
class ServersActionsJson219Test(ServersSampleBase):
microversion = '2.19'
sample_dir = 'servers'
scenarios = [('v2_19', {'api_major_version': 'v2.1'})]
def test_server_rebuild(self):
uuid = self._post_server()
image = fake.get_valid_image_id()
params = {
'uuid': image,
'name': 'foobar',
'description': 'description of foobar',
'pass': 'seekr3t',
'hostid': '[a-f0-9]+',
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '80fe::',
}
resp = self._do_post('servers/%s/action' % uuid,
'server-action-rebuild', params)
subs = params.copy()
del subs['uuid']
self._verify_response('server-action-rebuild-resp', subs, resp, 202)
class ServersActionsAllJsonTest(ServersActionsJsonTest):
all_extensions = True
sample_dir = None
class ServerStartStopJsonTest(ServersSampleBase):
sample_dir = 'servers'
def _get_flags(self):
f = super(ServerStartStopJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.server_start_stop.'
'Server_start_stop')
return f
def _test_server_action(self, uuid, action, req_tpl):
response = self._do_post('servers/%s/action' % uuid,
req_tpl,
{'action': action})
self.assertEqual(202, response.status_code)
self.assertEqual("", response.content)
def test_server_start(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop', 'server-action-stop')
self._test_server_action(uuid, 'os-start', 'server-action-start')
def test_server_stop(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop', 'server-action-stop')
class ServersSampleMultiStatusJsonTest(ServersSampleBase):
sample_dir = 'servers'
extra_extensions_to_load = ["os-access-ips"]
def _get_flags(self):
f = super(ServersSampleMultiStatusJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.legacy_v2.contrib.'
'server_list_multi_status.Server_list_multi_status')
return f
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers?status=active&status=error')
subs = {'id': uuid}
self._verify_response('servers-list-resp', subs, response, 200)
class ServerTriggerCrashDumpJsonTest(ServersSampleBase):
sample_dir = 'servers'
microversion = '2.17'
scenarios = [('v2_17', {'api_major_version': 'v2.1'})]
def test_trigger_crash_dump(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'server-action-trigger-crash-dump',
{})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
|
|
"""
Copyright 2015 Jatinshravan
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from abc import ABCMeta, abstractmethod
import os
import socket
import subprocess
import json
import sys
class Hypervisor(object):
"""
Hypervisor class from which all sub hypervisor classes inherit their
methods.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, hyp_params):
#Method to initialize the hypervisor
pass
@abstractmethod
def start_networks(self, connections, br_type, br_name):
#Method to start the network bridges specified in the arguments.
pass
@abstractmethod
def create_vm(self, vm, work_dir, iso_dir):
#Method to start vm from the VM object in the arguments.
pass
@abstractmethod
def destroy_networks(self, bridge):
#Method to destroy networks created. Uses a resources file for this.
pass
@abstractmethod
def destroy_vms(self, vms):
#Method to kill all VMs created. Uses a resources file for this.
pass
def graphical_console(self, vm_name):
#Method to connect to the graphical console of a VM
pass
@abstractmethod
def restart_stop_vms(self, vm_name, stop):
pass
"""
Class that handle ESXI hypervisor
"""
class ESXI(Hypervisor):
def __init__(self, hyp_params):
if ((hyp_params != None) and ("datastore" in hyp_params)):
if (os.path.isdir(hyp_params["datastore"])):
self.datast = hyp_params["datastore"]
else:
print ('not able to access data store ',hyp_params["datastore"])
sys.exit(1)
else:
print ("Required parameter - datastore missing for ESXI hypervisor")
sys.exit(1)
print ("In ESXI INit", self.datast)
def gen_base_vmx(self, fname):
""" Opens a file with the name as fname, and puts the base
statements of a .vmx file into this
"""
fhdl = open(fname, 'w')
fhdl.write('.encoding = "UTF-8"\n')
fhdl.write('config.version = "8"\n')
fhdl.write('virtualHW.version = "7"\n')
fhdl.write('pciBridge0.present = "TRUE"\n')
fhdl.write('svga.present = "TRUE"\n')
fhdl.write('pciBridge4.present = "TRUE"\n')
fhdl.write('pciBridge4.virtualDev = "pcieRootPort"\n')
fhdl.write('pciBridge4.functions = "8"\n')
fhdl.write('pciBridge5.present = "TRUE"\n')
fhdl.write('pciBridge5.virtualDev = "pcieRootPort"\n')
fhdl.write('pciBridge5.functions = "8"\n')
fhdl.write('pciBridge6.present = "TRUE"\n')
fhdl.write('pciBridge6.virtualDev = "pcieRootPort"\n')
fhdl.write('pciBridge6.functions = "8"\n')
fhdl.write('pciBridge7.present = "TRUE"\n')
fhdl.write('pciBridge7.virtualDev = "pcieRootPort"\n')
fhdl.write('pciBridge7.functions = "8"\n')
fhdl.write('virtualHW.productCompatibility = "hosted"\n')
fhdl.write('memSize = "1108"\n')
fhdl.write('sched.cpu.units = "mhz"\n')
fhdl.write('powerType.powerOff = "soft"\n')
fhdl.write('powerType.suspend = "hard"\n')
fhdl.write('powerType.reset = "soft"\n')
fhdl.write('guestOS = "other-64"\n')
fhdl.write('toolScripts.afterPowerOn = "TRUE"\n')
fhdl.write('toolScripts.afterResume = "TRUE"\n')
fhdl.write('toolScripts.beforeSuspend = "TRUE"\n')
fhdl.write('toolScripts.beforePowerOff = "TRUE"\n')
fhdl.write('chipset.onlineStandby = "FALSE"\n')
fhdl.write('sched.cpu.min = "0"\n')
fhdl.write('sched.cpu.shares = "normal"\n')
fhdl.write('sched.mem.min = "0"\n')
fhdl.write('sched.mem.minSize = "0"\n')
fhdl.write('sched.mem.shares = "normal"\n')
fhdl.write('floppy0.present = "FALSE"\n')
fhdl.write('replay.supported = "FALSE"\n')
fhdl.write('replay.filename = ""\n')
fhdl.write('ide0:0.redo = ""\n')
fhdl.write('pciBridge0.pciSlotNumber = "17"\n')
fhdl.write('pciBridge4.pciSlotNumber = "21"\n')
fhdl.write('pciBridge5.pciSlotNumber = "22"\n')
fhdl.write('pciBridge6.pciSlotNumber = "23"\n')
fhdl.write('pciBridge7.pciSlotNumber = "24"\n')
fhdl.write('vmci0.pciSlotNumber = "33"\n')
fhdl.write('vmci0.present = "TRUE"\n')
fhdl.write('vmci0.id = "158687753"\n')
fhdl.write('evcCompatibilityMode = "FALSE"\n')
fhdl.write('vmotion.checkpointFBSize = "4194304"\n')
fhdl.write('cleanShutdown = "FALSE"\n')
fhdl.write('softPowerOff = "FALSE"\n')
return (fhdl)
def destroy_networks(self, bridge):
# For ESXI the bridge class does all the cleanup, nothing to do here
pass
def start_networks(self, connections, br_type, br_name):
# For ESXI the bridge class does all the setup. Nothing to do here
pass
def destroy_restart_stop_vms(self, vms, action):
for vm in vms:
# find the vmid of this vm
vm_name = vm.keys()[0]
vm_id = vm[vm.keys()[0]]
vm_datast = os.path.join(self.datast, vm_name)
subprocess.call(['vim-cmd', 'vmsvc/power.off', vm_id])
if (action == 'restart'):
subprocess.call(['vim-cmd', 'vmsvc/power.on', vm_id])
elif (action == 'clean'):
subprocess.call(['vim-cmd', 'vmsvc/unregister', vm_id])
for i in os.listdir(vm_datast):
os.remove(os.path.join(vm_datast, i))
os.rmdir(vm_datast)
elif (action == 'stop'):
pass
else:
print ("Illegal argument to destroy_restart_stop")
print ("In ESXI destroy VM")
def create_vm(self, vm, tmp_dir, iso_dir):
# Create the vm directory under the datastore
vm_dir = os.path.join(self.datast, vm.name)
subprocess.call(['mkdir', vm_dir])
#Create the base .vmx file
vmx_fil = os.path.join(vm_dir, vm.name + '.vmx')
fhdl = self.gen_base_vmx(vmx_fil)
fhdl.write('nvram = "%s.nvram"\n' %(vm.name))
fhdl.write('displayName = "%s"\n' %(vm.name))
#if there is a .iso file, then enable the cdrom option
fname = os.path.join(iso_dir, vm.version+'.iso')
if (os.path.isfile(fname)):
fhdl.write('ide0:1.deviceType = "cdrom-image"\n')
fhdl.write('ide0:1.fileName = "%s"\n' %(fname))
fhdl.write('ide0:1.present = "TRUE"\n')
#If there is a .vmdk file, then use that or create a new one
fname = os.path.join(iso_dir, vm.version + '.vmdk')
vmdk_fl = os.path.join(vm_dir, vm.name + '.vmdk' )
print ("Creating the disk, may take several minutes")
if (os.path.isfile(fname)):
subprocess.call(['cp', fname, vmdk_fl])
else:
subprocess.call(['vmkfstools', '-c', '16g', '-d' 'thin', '-a', \
'ide', vmdk_fl])
fhdl.write('ide0:0.fileName = "%s"\n' %(vmdk_fl))
fhdl.write('ide0:0.mode = "independent-persistent"\n')
fhdl.write('ide0:0.present = "TRUE"\n')
# Fill up the network list
intf_no = 0
for i in vm.connections:
fhdl.write('ethernet%d.virtualDev = "e1000"\n' %(intf_no))
fhdl.write('ethernet%d.networkName = "%s"\n' %(intf_no, i))
fhdl.write('ethernet%d.addressType = "generated"\n' %(intf_no))
fhdl.write('ethernet%d.present = "TRUE"\n' %(intf_no))
intf_no += 1
# Now create the VM
fhdl.close()
proc = subprocess.Popen(['vim-cmd', 'solo/registervm', vmx_fil],
stdout = subprocess.PIPE)
vm_id = proc.communicate()[0]
subprocess.call(['vim-cmd', 'vmsvc/power.on', vm_id])
name_to_port = {}
name_to_port[vm.name] = vm_id.rstrip()
return name_to_port
def graphical_console(self, vm_name):
#Please define this method
pass
def restart_stop_vms(self, vm_name, stop):
#Please define this method
pass
"""
Class that starts networks and VMs and also destroys them in KVM
"""
class KVM(Hypervisor):
telnet_start_port = 20000 #Default port to start using for console
def __init__(self, hyp_params):
pass
"""
Function to generate the file that is used by virsh net commands to
start the networks
"""
def gen_net_xml(self, sw_name, br_typ):
f_obj = open('conf/net.xml', 'w')
f_obj.write('<network>\n')
f_obj.write("\t<name>%s</name>\n" % sw_name)
f_obj.write("\t<forward mode = 'bridge'/>\n")
f_obj.write("\t<bridge name = '%s'/>\n" % sw_name)
#Linux Bridge does not require virtualport type to be written in
if br_typ == 'OVSBridge':
f_obj.write("\t<virtualport type = 'openvswitch'/>\n")
f_obj.write("</network>\n")
f_obj.close()
"""
Function that calls the virsh net commands after creating XML files for
each network.
"""
def start_networks(self, connections, br_type, br_names):
"""
Bridges list can be used to create the bridges using
subprocess.call()
"""
bridges = []
bridges.extend([br_names['mgmt'], br_names['dummy']])
bridges.extend(connections.keys())
for i in xrange(len(bridges)):
com0 = 'virsh'
com1 = '--connect'
com2 = 'qemu:///system'
self.gen_net_xml(bridges[i], br_type)
subprocess.call([com0,com1,com2,'net-define', 'conf/net.xml'])
subprocess.call([com0,com1,com2,'net-autostart', bridges[i]])
subprocess.call([com0,com1,com2,'net-start', bridges[i]])
os.remove('conf/net.xml')
"""
Function to get an empty TCP port in case the VM is to be connected via
telnet. To connect to the VM's console via telnet, the port returned
by this function is used
"""
def get_port(self):
start_port = KVM.telnet_start_port
while True:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('', start_port))
except socket.error as msg:
start_port += 10
sock.close()
continue
sock.close()
KVM.telnet_start_port = start_port + 1
return start_port
"""
Function to add the --network set of commands to the virt install
commands
"""
def add_network(self, cmd_list, vm):
for i in vm.connections:
cmd_list.append('--network')
cmd_list.append('network=%s,model=e1000' \
% i)
return cmd_list
"""
Function to start the VMs using virt-install commands
"""
def create_vm(self, vm, work_dir, iso_dir):
name_to_port = {}
form = ''
extra = []
cmd_list = [
'virt-install',
'--name=%s' % vm.name,
'--ram=1024',
'--watchdog','i6300esb,action=none']
# If .iso is present add the cdrom option
iso_fil = os.path.join(iso_dir, vm.version + '.iso')
if (os.path.exists(iso_fil)):
cmd_list.append('--cdrom=' + iso_fil)
#If vmdk file is present add it as the disk
print ("Copying or creating disk - May take several minutes")
vmdk_fil = os.path.join(iso_dir, vm.version + '.vmdk')
tgt_fil = os.path.join(work_dir, vm.name + '.img')
if (os.path.exists(vmdk_fil)):
subprocess.call(['qemu-img', 'convert', '-f', 'vmdk', '-O', \
'qcow2', vmdk_fil, tgt_fil])
if (not(os.path.exists(iso_fil))):
cmd_list.append('--boot=hd')
else:
subprocess.call(['qemu-img', 'create', '-fqcow2', tgt_fil, '16G'])
cmd_list.append('--disk=%s,format=qcow2,device=disk,bus=ide'
%(tgt_fil))
if 'boot_device' in vm.extra_commands.keys():
if vm.extra_commands['boot_device'] == 'cdrom':
cmd_list.append('--livecd')
if 'machine_type' in vm.extra_commands.keys():
cmd_list.append('--machine='+vm.extra_commands['machine_type'])
if 'cpu_type' in vm.extra_commands.keys():
cmd_list.extend(['--cpu', 'qemu64,disable=svm'])
if vm.console == "telnet":
tcp_port = self.get_port()
cmd_list.extend(['--graphics', 'none', '--serial',\
'tcp,host=:%d,mode=bind,protocol=telnet' % tcp_port\
,'--noautoconsole'])
name_to_port[vm.name] = "%d" % tcp_port
else:
cmd_list.extend(['--graphics', 'vnc', '--noautoconsole'])
name_to_port[vm.name] = "vnc"
cmd_list = self.add_network(cmd_list, vm)
subprocess.call(cmd_list)
return name_to_port
def destroy_networks(self,bridge):
com0 = 'virsh'
com1 = '--connect'
com2 = 'qemu:///system'
subprocess.call([com0,com1,com2,'net-destroy', bridge])
subprocess.call([com0,com1,com2,'net-undefine',bridge])
def destroy_vms(self, vms):
for vm in vms:
subprocess.call(['virsh','destroy', vm.keys()[0]])
subprocess.call(['virsh','undefine',vm.keys()[0]])
"""
elif action == 'restart':
subprocess.call(['virsh', 'start', vm.keys()[0]])
elif action == 'stop':
pass
else:
print ("Illegal argument to destroy_restart_stop_vms. Should be\
'clean', 'restart' or 'stop'")
"""
def restart_stop_vms(self, vm_name, stop):
subprocess.call(['virsh', 'destroy', vm_name])
if not stop:
subprocess.call(['virsh', 'start', vm_name])
def graphical_console(self, vm_name):
subprocess.call(['virt-viewer', vm_name])
|
|
"""Utilities for implementing `nbio_interface.AbstractIOServices` for
pika connection adapters.
"""
import collections
import errno
import functools
import logging
import numbers
import os
import socket
import ssl
import sys
import traceback
from rez.vendor.pika.adapters.utils.nbio_interface import (AbstractIOReference,
AbstractStreamTransport)
import rez.vendor.pika.compat
import rez.vendor.pika.diagnostic_utils
# "Try again" error codes for non-blocking socket I/O - send()/recv().
# NOTE: POSIX.1 allows either error to be returned for this case and doesn't require
# them to have the same value.
_TRY_IO_AGAIN_SOCK_ERROR_CODES = (
errno.EAGAIN,
errno.EWOULDBLOCK,
)
# "Connection establishment pending" error codes for non-blocking socket
# connect() call.
# NOTE: EINPROGRESS for Posix and EWOULDBLOCK for Windows
_CONNECTION_IN_PROGRESS_SOCK_ERROR_CODES = (
errno.EINPROGRESS,
errno.EWOULDBLOCK,
)
_LOGGER = logging.getLogger(__name__)
# Decorator that logs exceptions escaping from the decorated function
_log_exceptions = rez.vendor.pika.diagnostic_utils.create_log_exception_decorator(_LOGGER) # pylint: disable=C0103
def check_callback_arg(callback, name):
"""Raise TypeError if callback is not callable
:param callback: callback to check
:param name: Name to include in exception text
:raises TypeError:
"""
if not callable(callback):
raise TypeError('{} must be callable, but got {!r}'.format(
name, callback))
def check_fd_arg(fd):
"""Raise TypeError if file descriptor is not an integer
:param fd: file descriptor
:raises TypeError:
"""
if not isinstance(fd, numbers.Integral):
raise TypeError(
'Paramter must be a file descriptor, but got {!r}'.format(fd))
def _retry_on_sigint(func):
"""Function decorator for retrying on SIGINT.
"""
@functools.wraps(func)
def retry_sigint_wrap(*args, **kwargs):
"""Wrapper for decorated function"""
while True:
try:
return func(*args, **kwargs)
except rez.vendor.pika.compat.SOCKET_ERROR as error:
if error.errno == errno.EINTR:
continue
else:
raise
return retry_sigint_wrap
class SocketConnectionMixin(object):
"""Implements
`pika.adapters.utils.nbio_interface.AbstractIOServices.connect_socket()`
on top of
`pika.adapters.utils.nbio_interface.AbstractFileDescriptorServices` and
basic `pika.adapters.utils.nbio_interface.AbstractIOServices`.
"""
def connect_socket(self, sock, resolved_addr, on_done):
"""Implement
:py:meth:`.nbio_interface.AbstractIOServices.connect_socket()`.
"""
return _AsyncSocketConnector(
nbio=self, sock=sock, resolved_addr=resolved_addr,
on_done=on_done).start()
class StreamingConnectionMixin(object):
"""Implements
`.nbio_interface.AbstractIOServices.create_streaming_connection()` on
top of `.nbio_interface.AbstractFileDescriptorServices` and basic
`nbio_interface.AbstractIOServices` services.
"""
def create_streaming_connection(self,
protocol_factory,
sock,
on_done,
ssl_context=None,
server_hostname=None):
"""Implement
:py:meth:`.nbio_interface.AbstractIOServices.create_streaming_connection()`.
"""
try:
return _AsyncStreamConnector(
nbio=self,
protocol_factory=protocol_factory,
sock=sock,
ssl_context=ssl_context,
server_hostname=server_hostname,
on_done=on_done).start()
except Exception as error:
_LOGGER.error('create_streaming_connection(%s) failed: %r', sock,
error)
# Close the socket since this function takes ownership
try:
sock.close()
except Exception as error: # pylint: disable=W0703
# We log and suppress the exception from sock.close() so that
# the original error from _AsyncStreamConnector constructor will
# percolate
_LOGGER.error('%s.close() failed: %r', sock, error)
raise
class _AsyncServiceAsyncHandle(AbstractIOReference):
"""This module's adaptation of `.nbio_interface.AbstractIOReference`
"""
def __init__(self, subject):
"""
:param subject: subject of the reference containing a `cancel()` method
"""
self._cancel = subject.cancel
def cancel(self):
"""Cancel pending operation
:returns: False if was already done or cancelled; True otherwise
:rtype: bool
"""
return self._cancel()
class _AsyncSocketConnector(object):
"""Connects the given non-blocking socket asynchronously using
`.nbio_interface.AbstractFileDescriptorServices` and basic
`.nbio_interface.AbstractIOServices`. Used for implementing
`.nbio_interface.AbstractIOServices.connect_socket()`.
"""
_STATE_NOT_STARTED = 0 # start() not called yet
_STATE_ACTIVE = 1 # workflow started
_STATE_CANCELED = 2 # workflow aborted by user's cancel() call
_STATE_COMPLETED = 3 # workflow completed: succeeded or failed
def __init__(self, nbio, sock, resolved_addr, on_done):
"""
:param AbstractIOServices | AbstractFileDescriptorServices nbio:
:param socket.socket sock: non-blocking socket that needs to be
connected via `socket.socket.connect()`
:param tuple resolved_addr: resolved destination address/port two-tuple
which is compatible with the given's socket's address family
:param callable on_done: user callback that takes None upon successful
completion or exception upon error (check for `BaseException`) as
its only arg. It will not be called if the operation was cancelled.
:raises ValueError: if host portion of `resolved_addr` is not an IP
address or is inconsistent with the socket's address family as
validated via `socket.inet_pton()`
"""
check_callback_arg(on_done, 'on_done')
try:
socket.inet_pton(sock.family, resolved_addr[0])
except Exception as error: # pylint: disable=W0703
if not hasattr(socket, 'inet_pton'):
_LOGGER.debug(
'Unable to check resolved address: no socket.inet_pton().')
else:
msg = ('Invalid or unresolved IP address '
'{!r} for socket {}: {!r}').format(
resolved_addr, sock, error)
_LOGGER.error(msg)
raise ValueError(msg)
self._nbio = nbio
self._sock = sock
self._addr = resolved_addr
self._on_done = on_done
self._state = self._STATE_NOT_STARTED
self._watching_socket_events = False
@_log_exceptions
def _cleanup(self):
"""Remove socket watcher, if any
"""
if self._watching_socket_events:
self._watching_socket_events = False
self._nbio.remove_writer(self._sock.fileno())
def start(self):
"""Start asynchronous connection establishment.
:rtype: AbstractIOReference
"""
assert self._state == self._STATE_NOT_STARTED, (
'_AsyncSocketConnector.start(): expected _STATE_NOT_STARTED',
self._state)
self._state = self._STATE_ACTIVE
# Continue the rest of the operation on the I/O loop to avoid calling
# user's completion callback from the scope of user's call
self._nbio.add_callback_threadsafe(self._start_async)
return _AsyncServiceAsyncHandle(self)
def cancel(self):
"""Cancel pending connection request without calling user's completion
callback.
:returns: False if was already done or cancelled; True otherwise
:rtype: bool
"""
if self._state == self._STATE_ACTIVE:
self._state = self._STATE_CANCELED
_LOGGER.debug('User canceled connection request for %s to %s',
self._sock, self._addr)
self._cleanup()
return True
_LOGGER.debug(
'_AsyncSocketConnector cancel requested when not ACTIVE: '
'state=%s; %s', self._state, self._sock)
return False
@_log_exceptions
def _report_completion(self, result):
"""Advance to COMPLETED state, remove socket watcher, and invoke user's
completion callback.
:param BaseException | None result: value to pass in user's callback
"""
_LOGGER.debug('_AsyncSocketConnector._report_completion(%r); %s',
result, self._sock)
assert isinstance(result, (BaseException, type(None))), (
'_AsyncSocketConnector._report_completion() expected exception or '
'None as result.', result)
assert self._state == self._STATE_ACTIVE, (
'_AsyncSocketConnector._report_completion() expected '
'_STATE_NOT_STARTED', self._state)
self._state = self._STATE_COMPLETED
self._cleanup()
self._on_done(result)
@_log_exceptions
def _start_async(self):
"""Called as callback from I/O loop to kick-start the workflow, so it's
safe to call user's completion callback from here, if needed
"""
if self._state != self._STATE_ACTIVE:
# Must have been canceled by user before we were called
_LOGGER.debug(
'Abandoning sock=%s connection establishment to %s '
'due to inactive state=%s', self._sock, self._addr, self._state)
return
try:
self._sock.connect(self._addr)
except (Exception, rez.vendor.pika.compat.SOCKET_ERROR) as error: # pylint: disable=W0703
if (isinstance(error, rez.vendor.pika.compat.SOCKET_ERROR) and
error.errno in _CONNECTION_IN_PROGRESS_SOCK_ERROR_CODES):
# Connection establishment is pending
pass
else:
_LOGGER.error('%s.connect(%s) failed: %r', self._sock,
self._addr, error)
self._report_completion(error)
return
# Get notified when the socket becomes writable
try:
self._nbio.set_writer(self._sock.fileno(), self._on_writable)
except Exception as error: # pylint: disable=W0703
_LOGGER.exception('async.set_writer(%s) failed: %r', self._sock,
error)
self._report_completion(error)
return
else:
self._watching_socket_events = True
_LOGGER.debug('Connection-establishment is in progress for %s.',
self._sock)
@_log_exceptions
def _on_writable(self):
"""Called when socket connects or fails to. Check for predicament and
invoke user's completion callback.
"""
if self._state != self._STATE_ACTIVE:
# This should never happen since we remove the watcher upon
# `cancel()`
_LOGGER.error(
'Socket connection-establishment event watcher '
'called in inactive state (ignoring): %s; state=%s', self._sock,
self._state)
return
# The moment of truth...
error_code = self._sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if not error_code:
_LOGGER.info('Socket connected: %s', self._sock)
result = None
else:
error_msg = os.strerror(error_code)
_LOGGER.error('Socket failed to connect: %s; error=%s (%s)',
self._sock, error_code, error_msg)
result = rez.vendor.pika.compat.SOCKET_ERROR(error_code, error_msg)
self._report_completion(result)
class _AsyncStreamConnector(object):
"""Performs asynchronous SSL session establishment, if requested, on the
already-connected socket and links the streaming transport to protocol.
Used for implementing
`.nbio_interface.AbstractIOServices.create_streaming_connection()`.
"""
_STATE_NOT_STARTED = 0 # start() not called yet
_STATE_ACTIVE = 1 # start() called and kicked off the workflow
_STATE_CANCELED = 2 # workflow terminated by cancel() request
_STATE_COMPLETED = 3 # workflow terminated by success or failure
def __init__(self, nbio, protocol_factory, sock, ssl_context,
server_hostname, on_done):
"""
NOTE: We take ownership of the given socket upon successful completion
of the constructor.
See `AbstractIOServices.create_streaming_connection()` for detailed
documentation of the corresponding args.
:param AbstractIOServices | AbstractFileDescriptorServices nbio:
:param callable protocol_factory:
:param socket.socket sock:
:param ssl.SSLContext | None ssl_context:
:param str | None server_hostname:
:param callable on_done:
"""
check_callback_arg(protocol_factory, 'protocol_factory')
check_callback_arg(on_done, 'on_done')
if not isinstance(ssl_context, (type(None), ssl.SSLContext)):
raise ValueError('Expected ssl_context=None | ssl.SSLContext, but '
'got {!r}'.format(ssl_context))
if server_hostname is not None and ssl_context is None:
raise ValueError('Non-None server_hostname must not be passed '
'without ssl context')
# Check that the socket connection establishment had completed in order
# to avoid stalling while waiting for the socket to become readable
# and/or writable.
try:
sock.getpeername()
except Exception as error:
raise ValueError(
'Expected connected socket, but getpeername() failed: '
'error={!r}; {}; '.format(error, sock))
self._nbio = nbio
self._protocol_factory = protocol_factory
self._sock = sock
self._ssl_context = ssl_context
self._server_hostname = server_hostname
self._on_done = on_done
self._state = self._STATE_NOT_STARTED
self._watching_socket = False
@_log_exceptions
def _cleanup(self, close):
"""Cancel pending async operations, if any
:param bool close: close the socket if true
"""
_LOGGER.debug('_AsyncStreamConnector._cleanup(%r)', close)
if self._watching_socket:
_LOGGER.debug(
'_AsyncStreamConnector._cleanup(%r): removing RdWr; %s', close,
self._sock)
self._watching_socket = False
self._nbio.remove_reader(self._sock.fileno())
self._nbio.remove_writer(self._sock.fileno())
try:
if close:
_LOGGER.debug(
'_AsyncStreamConnector._cleanup(%r): closing socket; %s',
close, self._sock)
try:
self._sock.close()
except Exception as error: # pylint: disable=W0703
_LOGGER.exception('_sock.close() failed: error=%r; %s',
error, self._sock)
raise
finally:
self._sock = None
self._nbio = None
self._protocol_factory = None
self._ssl_context = None
self._server_hostname = None
self._on_done = None
def start(self):
"""Kick off the workflow
:rtype: AbstractIOReference
"""
_LOGGER.debug('_AsyncStreamConnector.start(); %s', self._sock)
assert self._state == self._STATE_NOT_STARTED, (
'_AsyncStreamConnector.start() expected '
'_STATE_NOT_STARTED', self._state)
self._state = self._STATE_ACTIVE
# Request callback from I/O loop to start processing so that we don't
# end up making callbacks from the caller's scope
self._nbio.add_callback_threadsafe(self._start_async)
return _AsyncServiceAsyncHandle(self)
def cancel(self):
"""Cancel pending connection request without calling user's completion
callback.
:returns: False if was already done or cancelled; True otherwise
:rtype: bool
"""
if self._state == self._STATE_ACTIVE:
self._state = self._STATE_CANCELED
_LOGGER.debug('User canceled streaming linkup for %s', self._sock)
# Close the socket, since we took ownership
self._cleanup(close=True)
return True
_LOGGER.debug(
'_AsyncStreamConnector cancel requested when not ACTIVE: '
'state=%s; %s', self._state, self._sock)
return False
@_log_exceptions
def _report_completion(self, result):
"""Advance to COMPLETED state, cancel async operation(s), and invoke
user's completion callback.
:param BaseException | tuple result: value to pass in user's callback.
`tuple(transport, protocol)` on success, exception on error
"""
_LOGGER.debug('_AsyncStreamConnector._report_completion(%r); %s',
result, self._sock)
assert isinstance(result, (BaseException, tuple)), (
'_AsyncStreamConnector._report_completion() expected exception or '
'tuple as result.', result, self._state)
assert self._state == self._STATE_ACTIVE, (
'_AsyncStreamConnector._report_completion() expected '
'_STATE_ACTIVE', self._state)
self._state = self._STATE_COMPLETED
# Notify user
try:
self._on_done(result)
except Exception:
_LOGGER.exception('%r: _on_done(%r) failed.',
self._report_completion, result)
raise
finally:
# NOTE: Close the socket on error, since we took ownership of it
self._cleanup(close=isinstance(result, BaseException))
@_log_exceptions
def _start_async(self):
"""Called as callback from I/O loop to kick-start the workflow, so it's
safe to call user's completion callback from here if needed
"""
_LOGGER.debug('_AsyncStreamConnector._start_async(); %s', self._sock)
if self._state != self._STATE_ACTIVE:
# Must have been canceled by user before we were called
_LOGGER.debug(
'Abandoning streaming linkup due to inactive state '
'transition; state=%s; %s; .', self._state, self._sock)
return
# Link up protocol and transport if this is a plaintext linkup;
# otherwise kick-off SSL workflow first
if self._ssl_context is None:
self._linkup()
else:
_LOGGER.debug('Starting SSL handshake on %s', self._sock)
# Wrap our plain socket in ssl socket
try:
self._sock = self._ssl_context.wrap_socket(
self._sock,
server_side=False,
do_handshake_on_connect=False,
suppress_ragged_eofs=False, # False = error on incoming EOF
server_hostname=self._server_hostname)
except Exception as error: # pylint: disable=W0703
_LOGGER.exception('SSL wrap_socket(%s) failed: %r', self._sock,
error)
self._report_completion(error)
return
self._do_ssl_handshake()
@_log_exceptions
def _linkup(self):
"""Connection is ready: instantiate and link up transport and protocol,
and invoke user's completion callback.
"""
_LOGGER.debug('_AsyncStreamConnector._linkup()')
transport = None
try:
# Create the protocol
try:
protocol = self._protocol_factory()
except Exception as error:
_LOGGER.exception('protocol_factory() failed: error=%r; %s',
error, self._sock)
raise
if self._ssl_context is None:
# Create plaintext streaming transport
try:
transport = _AsyncPlaintextTransport(
self._sock, protocol, self._nbio)
except Exception as error:
_LOGGER.exception('PlainTransport() failed: error=%r; %s',
error, self._sock)
raise
else:
# Create SSL streaming transport
try:
transport = _AsyncSSLTransport(self._sock, protocol,
self._nbio)
except Exception as error:
_LOGGER.exception('SSLTransport() failed: error=%r; %s',
error, self._sock)
raise
_LOGGER.debug('_linkup(): created transport %r', transport)
# Acquaint protocol with its transport
try:
protocol.connection_made(transport)
except Exception as error:
_LOGGER.exception(
'protocol.connection_made(%r) failed: error=%r; %s',
transport, error, self._sock)
raise
_LOGGER.debug('_linkup(): introduced transport to protocol %r; %r',
transport, protocol)
except Exception as error: # pylint: disable=W0703
result = error
else:
result = (transport, protocol)
self._report_completion(result)
@_log_exceptions
def _do_ssl_handshake(self):
"""Perform asynchronous SSL handshake on the already wrapped socket
"""
_LOGGER.debug('_AsyncStreamConnector._do_ssl_handshake()')
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'_do_ssl_handshake: Abandoning streaming linkup due '
'to inactive state transition; state=%s; %s; .', self._state,
self._sock)
return
done = False
try:
try:
self._sock.do_handshake()
except ssl.SSLError as error:
if error.errno == ssl.SSL_ERROR_WANT_READ:
_LOGGER.debug('SSL handshake wants read; %s.', self._sock)
self._watching_socket = True
self._nbio.set_reader(self._sock.fileno(),
self._do_ssl_handshake)
self._nbio.remove_writer(self._sock.fileno())
elif error.errno == ssl.SSL_ERROR_WANT_WRITE:
_LOGGER.debug('SSL handshake wants write. %s', self._sock)
self._watching_socket = True
self._nbio.set_writer(self._sock.fileno(),
self._do_ssl_handshake)
self._nbio.remove_reader(self._sock.fileno())
else:
# Outer catch will report it
raise
else:
done = True
_LOGGER.info('SSL handshake completed successfully: %s',
self._sock)
except Exception as error: # pylint: disable=W0703
_LOGGER.exception('SSL do_handshake failed: error=%r; %s', error,
self._sock)
self._report_completion(error)
return
if done:
# Suspend I/O and link up transport with protocol
_LOGGER.debug(
'_do_ssl_handshake: removing watchers ahead of linkup: %s',
self._sock)
self._nbio.remove_reader(self._sock.fileno())
self._nbio.remove_writer(self._sock.fileno())
# So that our `_cleanup()` won't interfere with the transport's
# socket watcher configuration.
self._watching_socket = False
_LOGGER.debug(
'_do_ssl_handshake: pre-linkup removal of watchers is done; %s',
self._sock)
self._linkup()
class _AsyncTransportBase( # pylint: disable=W0223
AbstractStreamTransport):
"""Base class for `_AsyncPlaintextTransport` and `_AsyncSSLTransport`.
"""
_STATE_ACTIVE = 1
_STATE_FAILED = 2 # connection failed
_STATE_ABORTED_BY_USER = 3 # cancel() called
_STATE_COMPLETED = 4 # done with connection
_MAX_RECV_BYTES = 4096 # per socket.recv() documentation recommendation
# Max per consume call to prevent event starvation
_MAX_CONSUME_BYTES = 1024 * 100
class RxEndOfFile(OSError):
"""We raise this internally when EOF (empty read) is detected on input.
"""
def __init__(self):
super(_AsyncTransportBase.RxEndOfFile, self).__init__(
-1, 'End of input stream (EOF)')
def __init__(self, sock, protocol, nbio):
"""
:param socket.socket | ssl.SSLSocket sock: connected socket
:param pika.adapters.utils.nbio_interface.AbstractStreamProtocol protocol:
corresponding protocol in this transport/protocol pairing; the
protocol already had its `connection_made()` method called.
:param AbstractIOServices | AbstractFileDescriptorServices nbio:
"""
_LOGGER.debug('_AsyncTransportBase.__init__: %s', sock)
self._sock = sock
self._protocol = protocol
self._nbio = nbio
self._state = self._STATE_ACTIVE
self._tx_buffers = collections.deque()
self._tx_buffered_byte_count = 0
def abort(self):
"""Close connection abruptly without waiting for pending I/O to
complete. Will invoke the corresponding protocol's `connection_lost()`
method asynchronously (not in context of the abort() call).
:raises Exception: Exception-based exception on error
"""
_LOGGER.info('Aborting transport connection: state=%s; %s', self._state,
self._sock)
self._initiate_abort(None)
def get_protocol(self):
"""Return the protocol linked to this transport.
:rtype: pika.adapters.utils.nbio_interface.AbstractStreamProtocol
"""
return self._protocol
def get_write_buffer_size(self):
"""
:returns: Current size of output data buffered by the transport
:rtype: int
"""
return self._tx_buffered_byte_count
def _buffer_tx_data(self, data):
"""Buffer the given data until it can be sent asynchronously.
:param bytes data:
:raises ValueError: if called with empty data
"""
if not data:
_LOGGER.error('write() called with empty data: state=%s; %s',
self._state, self._sock)
raise ValueError('write() called with empty data {!r}'.format(data))
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring write() called during inactive state: '
'state=%s; %s', self._state, self._sock)
return
self._tx_buffers.append(data)
self._tx_buffered_byte_count += len(data)
def _consume(self):
"""Utility method for use by subclasses to ingest data from socket and
dispatch it to protocol's `data_received()` method socket-specific
"try again" exception, per-event data consumption limit is reached,
transport becomes inactive, or a fatal failure.
Consumes up to `self._MAX_CONSUME_BYTES` to prevent event starvation or
until state becomes inactive (e.g., `protocol.data_received()` callback
aborts the transport)
:raises: Whatever the corresponding `sock.recv()` raises except the
socket error with errno.EINTR
:raises: Whatever the `protocol.data_received()` callback raises
:raises _AsyncTransportBase.RxEndOfFile: upon shutdown of input stream
"""
bytes_consumed = 0
while (self._state == self._STATE_ACTIVE and
bytes_consumed < self._MAX_CONSUME_BYTES):
data = self._sigint_safe_recv(self._sock, self._MAX_RECV_BYTES)
bytes_consumed += len(data)
# Empty data, should disconnect
if not data:
_LOGGER.error('Socket EOF; %s', self._sock)
raise self.RxEndOfFile()
# Pass the data to the protocol
try:
self._protocol.data_received(data)
except Exception as error:
_LOGGER.exception(
'protocol.data_received() failed: error=%r; %s', error,
self._sock)
raise
def _produce(self):
"""Utility method for use by subclasses to emit data from tx_buffers.
This method sends chunks from `tx_buffers` until all chunks are
exhausted or sending is interrupted by an exception. Maintains integrity
of `self.tx_buffers`.
:raises: whatever the corresponding `sock.send()` raises except the
socket error with errno.EINTR
"""
while self._tx_buffers:
num_bytes_sent = self._sigint_safe_send(self._sock,
self._tx_buffers[0])
chunk = self._tx_buffers.popleft()
if num_bytes_sent < len(chunk):
_LOGGER.debug('Partial send, requeing remaining data; %s of %s',
num_bytes_sent, len(chunk))
self._tx_buffers.appendleft(chunk[num_bytes_sent:])
self._tx_buffered_byte_count -= num_bytes_sent
assert self._tx_buffered_byte_count >= 0, (
'_AsyncTransportBase._produce() tx buffer size underflow',
self._tx_buffered_byte_count, self._state)
@staticmethod
@_retry_on_sigint
def _sigint_safe_recv(sock, max_bytes):
"""Receive data from socket, retrying on SIGINT.
:param sock: stream or SSL socket
:param max_bytes: maximum number of bytes to receive
:returns: received data or empty bytes uppon end of file
:rtype: bytes
:raises: whatever the corresponding `sock.recv()` raises except socket
error with errno.EINTR
"""
return sock.recv(max_bytes)
@staticmethod
@_retry_on_sigint
def _sigint_safe_send(sock, data):
"""Send data to socket, retrying on SIGINT.
:param sock: stream or SSL socket
:param data: data bytes to send
:returns: number of bytes actually sent
:rtype: int
:raises: whatever the corresponding `sock.send()` raises except socket
error with errno.EINTR
"""
return sock.send(data)
@_log_exceptions
def _deactivate(self):
"""Unregister the transport from I/O events
"""
if self._state == self._STATE_ACTIVE:
_LOGGER.info('Deactivating transport: state=%s; %s', self._state,
self._sock)
self._nbio.remove_reader(self._sock.fileno())
self._nbio.remove_writer(self._sock.fileno())
self._tx_buffers.clear()
@_log_exceptions
def _close_and_finalize(self):
"""Close the transport's socket and unlink the transport it from
references to other assets (protocol, etc.)
"""
if self._state != self._STATE_COMPLETED:
_LOGGER.info('Closing transport socket and unlinking: state=%s; %s',
self._state, self._sock)
try:
self._sock.shutdown(socket.SHUT_RDWR)
except rez.vendor.pika.compat.SOCKET_ERROR:
pass
self._sock.close()
self._sock = None
self._protocol = None
self._nbio = None
self._state = self._STATE_COMPLETED
@_log_exceptions
def _initiate_abort(self, error):
"""Initiate asynchronous abort of the transport that concludes with a
call to the protocol's `connection_lost()` method. No flushing of
output buffers will take place.
:param BaseException | None error: None if being canceled by user,
including via falsie return value from protocol.eof_received;
otherwise the exception corresponding to the the failed connection.
"""
_LOGGER.info(
'_AsyncTransportBase._initate_abort(): Initiating abrupt '
'asynchronous transport shutdown: state=%s; error=%r; %s',
self._state, error, self._sock)
assert self._state != self._STATE_COMPLETED, (
'_AsyncTransportBase._initate_abort() expected '
'non-_STATE_COMPLETED', self._state)
if self._state == self._STATE_COMPLETED:
return
self._deactivate()
# Update state
if error is None:
# Being aborted by user
if self._state == self._STATE_ABORTED_BY_USER:
# Abort by user already pending
_LOGGER.debug('_AsyncTransportBase._initiate_abort(): '
'ignoring - user-abort already pending.')
return
# Notification priority is given to user-initiated abort over
# failed connection
self._state = self._STATE_ABORTED_BY_USER
else:
# Connection failed
if self._state != self._STATE_ACTIVE:
assert self._state == self._STATE_ABORTED_BY_USER, (
'_AsyncTransportBase._initate_abort() expected '
'_STATE_ABORTED_BY_USER', self._state)
return
self._state = self._STATE_FAILED
# Schedule callback from I/O loop to avoid potential reentry into user
# code
self._nbio.add_callback_threadsafe(
functools.partial(self._connection_lost_notify_async, error))
@_log_exceptions
def _connection_lost_notify_async(self, error):
"""Handle aborting of transport either due to socket error or user-
initiated `abort()` call. Must be called from an I/O loop callback owned
by us in order to avoid reentry into user code from user's API call into
the transport.
:param BaseException | None error: None if being canceled by user;
otherwise the exception corresponding to the the failed connection.
"""
_LOGGER.debug('Concluding transport shutdown: state=%s; error=%r',
self._state, error)
if self._state == self._STATE_COMPLETED:
return
if error is not None and self._state != self._STATE_FAILED:
# Priority is given to user-initiated abort notification
assert self._state == self._STATE_ABORTED_BY_USER, (
'_AsyncTransportBase._connection_lost_notify_async() '
'expected _STATE_ABORTED_BY_USER', self._state)
return
# Inform protocol
try:
self._protocol.connection_lost(error)
except Exception as exc: # pylint: disable=W0703
_LOGGER.exception('protocol.connection_lost(%r) failed: exc=%r; %s',
error, exc, self._sock)
# Re-raise, since we've exhausted our normal failure notification
# mechanism (i.e., connection_lost())
raise
finally:
self._close_and_finalize()
class _AsyncPlaintextTransport(_AsyncTransportBase):
"""Implementation of `nbio_interface.AbstractStreamTransport` for a
plaintext connection.
"""
def __init__(self, sock, protocol, nbio):
"""
:param socket.socket sock: non-blocking connected socket
:param pika.adapters.utils.nbio_interface.AbstractStreamProtocol protocol:
corresponding protocol in this transport/protocol pairing; the
protocol already had its `connection_made()` method called.
:param AbstractIOServices | AbstractFileDescriptorServices nbio:
"""
super(_AsyncPlaintextTransport, self).__init__(sock, protocol, nbio)
# Request to be notified of incoming data; we'll watch for writability
# only when our write buffer is non-empty
self._nbio.set_reader(self._sock.fileno(), self._on_socket_readable)
def write(self, data):
"""Buffer the given data until it can be sent asynchronously.
:param bytes data:
:raises ValueError: if called with empty data
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring write() called during inactive state: '
'state=%s; %s', self._state, self._sock)
return
assert data, ('_AsyncPlaintextTransport.write(): empty data from user.',
data, self._state)
# pika/pika#1286
# NOTE: Modify code to write data to buffer before setting writer.
# Otherwise a race condition can occur where ioloop executes writer
# while buffer is still empty.
tx_buffer_was_empty = self.get_write_buffer_size() == 0
self._buffer_tx_data(data)
if tx_buffer_was_empty:
self._nbio.set_writer(self._sock.fileno(), self._on_socket_writable)
_LOGGER.debug('Turned on writability watcher: %s', self._sock)
@_log_exceptions
def _on_socket_readable(self):
"""Ingest data from socket and dispatch it to protocol until exception
occurs (typically EAGAIN or EWOULDBLOCK), per-event data consumption
limit is reached, transport becomes inactive, or failure.
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring readability notification due to inactive '
'state: state=%s; %s', self._state, self._sock)
return
try:
self._consume()
except self.RxEndOfFile:
try:
keep_open = self._protocol.eof_received()
except Exception as error: # pylint: disable=W0703
_LOGGER.exception(
'protocol.eof_received() failed: error=%r; %s', error,
self._sock)
self._initiate_abort(error)
else:
if keep_open:
_LOGGER.info(
'protocol.eof_received() elected to keep open: %s',
self._sock)
self._nbio.remove_reader(self._sock.fileno())
else:
_LOGGER.info('protocol.eof_received() elected to close: %s',
self._sock)
self._initiate_abort(None)
except (Exception, rez.vendor.pika.compat.SOCKET_ERROR) as error: # pylint: disable=W0703
if (isinstance(error, rez.vendor.pika.compat.SOCKET_ERROR) and
error.errno in _TRY_IO_AGAIN_SOCK_ERROR_CODES):
_LOGGER.debug('Recv would block on %s', self._sock)
else:
_LOGGER.exception(
'_AsyncBaseTransport._consume() failed, aborting '
'connection: error=%r; sock=%s; Caller\'s stack:\n%s',
error, self._sock, ''.join(
traceback.format_exception(*sys.exc_info())))
self._initiate_abort(error)
else:
if self._state != self._STATE_ACTIVE:
# Most likely our protocol's `data_received()` aborted the
# transport
_LOGGER.debug(
'Leaving Plaintext consumer due to inactive '
'state: state=%s; %s', self._state, self._sock)
@_log_exceptions
def _on_socket_writable(self):
"""Handle writable socket notification
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring writability notification due to inactive '
'state: state=%s; %s', self._state, self._sock)
return
# We shouldn't be getting called with empty tx buffers
assert self._tx_buffers, (
'_AsyncPlaintextTransport._on_socket_writable() called, '
'but _tx_buffers is empty.', self._state)
try:
# Transmit buffered data to remote socket
self._produce()
except (Exception, rez.vendor.pika.compat.SOCKET_ERROR) as error: # pylint: disable=W0703
if (isinstance(error, rez.vendor.pika.compat.SOCKET_ERROR) and
error.errno in _TRY_IO_AGAIN_SOCK_ERROR_CODES):
_LOGGER.debug('Send would block on %s', self._sock)
else:
_LOGGER.exception(
'_AsyncBaseTransport._produce() failed, aborting '
'connection: error=%r; sock=%s; Caller\'s stack:\n%s',
error, self._sock, ''.join(
traceback.format_exception(*sys.exc_info())))
self._initiate_abort(error)
else:
if not self._tx_buffers:
self._nbio.remove_writer(self._sock.fileno())
_LOGGER.debug('Turned off writability watcher: %s', self._sock)
class _AsyncSSLTransport(_AsyncTransportBase):
"""Implementation of `.nbio_interface.AbstractStreamTransport` for an SSL
connection.
"""
def __init__(self, sock, protocol, nbio):
"""
:param ssl.SSLSocket sock: non-blocking connected socket
:param pika.adapters.utils.nbio_interface.AbstractStreamProtocol protocol:
corresponding protocol in this transport/protocol pairing; the
protocol already had its `connection_made()` method called.
:param AbstractIOServices | AbstractFileDescriptorServices nbio:
"""
super(_AsyncSSLTransport, self).__init__(sock, protocol, nbio)
self._ssl_readable_action = self._consume
self._ssl_writable_action = None
# Bootstrap consumer; we'll take care of producer once data is buffered
self._nbio.set_reader(self._sock.fileno(), self._on_socket_readable)
# Try reading asap just in case read-ahead caused some
self._nbio.add_callback_threadsafe(self._on_socket_readable)
def write(self, data):
"""Buffer the given data until it can be sent asynchronously.
:param bytes data:
:raises ValueError: if called with empty data
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring write() called during inactive state: '
'state=%s; %s', self._state, self._sock)
return
assert data, ('_AsyncSSLTransport.write(): empty data from user.',
data, self._state)
# pika/pika#1286
# NOTE: Modify code to write data to buffer before setting writer.
# Otherwise a race condition can occur where ioloop executes writer
# while buffer is still empty.
tx_buffer_was_empty = self.get_write_buffer_size() == 0
self._buffer_tx_data(data)
if tx_buffer_was_empty and self._ssl_writable_action is None:
self._ssl_writable_action = self._produce
self._nbio.set_writer(self._sock.fileno(), self._on_socket_writable)
_LOGGER.debug('Turned on writability watcher: %s', self._sock)
@_log_exceptions
def _on_socket_readable(self):
"""Handle readable socket indication
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring readability notification due to inactive '
'state: state=%s; %s', self._state, self._sock)
return
if self._ssl_readable_action:
try:
self._ssl_readable_action()
except Exception as error: # pylint: disable=W0703
self._initiate_abort(error)
else:
_LOGGER.debug(
'SSL readable action was suppressed: '
'ssl_writable_action=%r; %s', self._ssl_writable_action,
self._sock)
@_log_exceptions
def _on_socket_writable(self):
"""Handle writable socket notification
"""
if self._state != self._STATE_ACTIVE:
_LOGGER.debug(
'Ignoring writability notification due to inactive '
'state: state=%s; %s', self._state, self._sock)
return
if self._ssl_writable_action:
try:
self._ssl_writable_action()
except Exception as error: # pylint: disable=W0703
self._initiate_abort(error)
else:
_LOGGER.debug(
'SSL writable action was suppressed: '
'ssl_readable_action=%r; %s', self._ssl_readable_action,
self._sock)
@_log_exceptions
def _consume(self):
"""[override] Ingest data from socket and dispatch it to protocol until
exception occurs (typically ssl.SSLError with
SSL_ERROR_WANT_READ/WRITE), per-event data consumption limit is reached,
transport becomes inactive, or failure.
Update consumer/producer registration.
:raises Exception: error that signals that connection needs to be
aborted
"""
next_consume_on_readable = True
try:
super(_AsyncSSLTransport, self)._consume()
except ssl.SSLError as error:
if error.errno == ssl.SSL_ERROR_WANT_READ:
_LOGGER.debug('SSL ingester wants read: %s', self._sock)
elif error.errno == ssl.SSL_ERROR_WANT_WRITE:
# Looks like SSL re-negotiation
_LOGGER.debug('SSL ingester wants write: %s', self._sock)
next_consume_on_readable = False
else:
_LOGGER.exception(
'_AsyncBaseTransport._consume() failed, aborting '
'connection: error=%r; sock=%s; Caller\'s stack:\n%s',
error, self._sock, ''.join(
traceback.format_exception(*sys.exc_info())))
raise # let outer catch block abort the transport
else:
if self._state != self._STATE_ACTIVE:
# Most likely our protocol's `data_received()` aborted the
# transport
_LOGGER.debug(
'Leaving SSL consumer due to inactive '
'state: state=%s; %s', self._state, self._sock)
return
# Consumer exited without exception; there may still be more,
# possibly unprocessed, data records in SSL input buffers that
# can be read without waiting for socket to become readable.
# In case buffered input SSL data records still remain
self._nbio.add_callback_threadsafe(self._on_socket_readable)
# Update consumer registration
if next_consume_on_readable:
if not self._ssl_readable_action:
self._nbio.set_reader(self._sock.fileno(),
self._on_socket_readable)
self._ssl_readable_action = self._consume
# NOTE: can't use identity check, it fails for instance methods
if self._ssl_writable_action == self._consume: # pylint: disable=W0143
self._nbio.remove_writer(self._sock.fileno())
self._ssl_writable_action = None
else:
# WANT_WRITE
if not self._ssl_writable_action:
self._nbio.set_writer(self._sock.fileno(),
self._on_socket_writable)
self._ssl_writable_action = self._consume
if self._ssl_readable_action:
self._nbio.remove_reader(self._sock.fileno())
self._ssl_readable_action = None
# Update producer registration
if self._tx_buffers and not self._ssl_writable_action:
self._ssl_writable_action = self._produce
self._nbio.set_writer(self._sock.fileno(), self._on_socket_writable)
@_log_exceptions
def _produce(self):
"""[override] Emit data from tx_buffers all chunks are exhausted or
sending is interrupted by an exception (typically ssl.SSLError with
SSL_ERROR_WANT_READ/WRITE).
Update consumer/producer registration.
:raises Exception: error that signals that connection needs to be
aborted
"""
next_produce_on_writable = None # None means no need to produce
try:
super(_AsyncSSLTransport, self)._produce()
except ssl.SSLError as error:
if error.errno == ssl.SSL_ERROR_WANT_READ:
# Looks like SSL re-negotiation
_LOGGER.debug('SSL emitter wants read: %s', self._sock)
next_produce_on_writable = False
elif error.errno == ssl.SSL_ERROR_WANT_WRITE:
_LOGGER.debug('SSL emitter wants write: %s', self._sock)
next_produce_on_writable = True
else:
_LOGGER.exception(
'_AsyncBaseTransport._produce() failed, aborting '
'connection: error=%r; sock=%s; Caller\'s stack:\n%s',
error, self._sock, ''.join(
traceback.format_exception(*sys.exc_info())))
raise # let outer catch block abort the transport
else:
# No exception, so everything must have been written to the socket
assert not self._tx_buffers, (
'_AsyncSSLTransport._produce(): no exception from parent '
'class, but data remains in _tx_buffers.', len(
self._tx_buffers))
# Update producer registration
if self._tx_buffers:
assert next_produce_on_writable is not None, (
'_AsyncSSLTransport._produce(): next_produce_on_writable is '
'still None', self._state)
if next_produce_on_writable:
if not self._ssl_writable_action:
self._nbio.set_writer(self._sock.fileno(),
self._on_socket_writable)
self._ssl_writable_action = self._produce
# NOTE: can't use identity check, it fails for instance methods
if self._ssl_readable_action == self._produce: # pylint: disable=W0143
self._nbio.remove_reader(self._sock.fileno())
self._ssl_readable_action = None
else:
# WANT_READ
if not self._ssl_readable_action:
self._nbio.set_reader(self._sock.fileno(),
self._on_socket_readable)
self._ssl_readable_action = self._produce
if self._ssl_writable_action:
self._nbio.remove_writer(self._sock.fileno())
self._ssl_writable_action = None
else:
# NOTE: can't use identity check, it fails for instance methods
if self._ssl_readable_action == self._produce: # pylint: disable=W0143
self._nbio.remove_reader(self._sock.fileno())
self._ssl_readable_action = None
assert self._ssl_writable_action != self._produce, ( # pylint: disable=W0143
'_AsyncSSLTransport._produce(): with empty tx_buffers, '
'writable_action cannot be _produce when readable is '
'_produce', self._state)
else:
# NOTE: can't use identity check, it fails for instance methods
assert self._ssl_writable_action == self._produce, ( # pylint: disable=W0143
'_AsyncSSLTransport._produce(): with empty tx_buffers, '
'expected writable_action as _produce when readable_action '
'is not _produce', 'writable_action:',
self._ssl_writable_action, 'readable_action:',
self._ssl_readable_action, 'state:', self._state)
self._ssl_writable_action = None
self._nbio.remove_writer(self._sock.fileno())
# Update consumer registration
if not self._ssl_readable_action:
self._ssl_readable_action = self._consume
self._nbio.set_reader(self._sock.fileno(), self._on_socket_readable)
# In case input SSL data records have been buffered
self._nbio.add_callback_threadsafe(self._on_socket_readable)
elif self._sock.pending():
self._nbio.add_callback_threadsafe(self._on_socket_readable)
|
|
#!/usr/bin/env python
#------------------------------------------------------------------------------
#
# Generate EOxServer JSON range type from ENVISAT product
#
# Project: XML Metadata Handling
# Authors: Martin Paces <martin.paces@eox.at>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2014 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
import traceback
import sys
import os.path
import json
from lxml import etree
from envisat.eheader import ProductHeader
import ns_opt20
import ns_sar20
import ns_xsi
XML_OPTS = {'pretty_print': True, 'xml_declaration': True, 'encoding': 'utf-8'}
def get_footprint_and_center(header):
return []
def eop_asar(header, platform='ENVISAT:ASAR', ns_sar=None):
ns_sar = ns_sar or ns_sar20
ns_eop = ns_sar.ns_eop
ns_gml = ns_sar.ns_gml
ns_om = ns_sar.ns_om
SAR = ns_sar.E
EOP = ns_eop.E
OM = ns_om.E
def _polar(pol):
return pol[0]+pol[2]
time_acq_start = header.sph['FIRST_LINE_TIME'].isoformat().replace("+00:00", "Z")
time_acq_stop = header.sph['LAST_LINE_TIME'].isoformat().replace("+00:00", "Z")
time_prod = header.mph['PROC_TIME'].isoformat().replace("+00:00", "Z")
if platform == "ENVISAT:ASAR":
_platform = EOP.Platform(
EOP.shortName("Envisat"),
EOP.orbitType("LEO"),
)
_instrument = "ASAR"
_opmode = header.mph['PRODUCT'][4:6]
_polmode = "D" if _opmode == "AP" else "S"
if _opmode in ("IM", "AP"):
_wrs_code_space = "ENVISAT ASAR IM/AP"
elif _opmode in ("WS", "GM"):
_wrs_code_space = "ENVISAT ASAR WS"
else:
_wrs_code_space = "ENVISAT ASAR"
elif platform == "ERS1:AMI-SAR":
_platform = EOP.Platform(
EOP.shortName("ERS"),
EOP.serialIdentifier("1"),
EOP.orbitType("LEO"),
)
_instrument = "AMI/SAR"
_opmode = "Image"
_polmode = "S"
_wrs_code_space = "ERS SAR"
elif platform == "ERS2:AMI-SAR":
_platform = EOP.Platform(
EOP.shortName("ERS"),
EOP.serialIdentifier("2"),
EOP.orbitType("LEO"),
)
_instrument = "AMI/SAR"
_opmode = "Image"
_polmode = "S"
_wrs_code_space = "ERS SAR"
_polchannels = [_polar(header.sph['MDS1_TX_RX_POLAR'])]
if header.sph['MDS2_TX_RX_POLAR'] != ' ':
_polchannels.append(_polar(header.sph['MDS2_TX_RX_POLAR']))
if len(_polchannels) > 1:
if _polchannels[1] == "VV":
_polchannels = (_polchannels[1], _polchannels[0])
if _polchannels[1] == "HH":
_polchannels = (_polchannels[1], _polchannels[0])
_polchannels = ", ".join(_polchannels)
eo_equipment = EOP.EarthObservationEquipment(
ns_gml.getRandomId(),
EOP.platform(_platform),
EOP.instrument(EOP.Instrument(
EOP.shortName(_instrument)
),
),
EOP.sensor(EOP.Sensor(
EOP.sensorType("RADAR"),
EOP.operationalMode(_opmode),
EOP.swathIdentifier(header.sph['SWATH']),
)),
EOP.acquisitionParameters(SAR.Acquisition(
EOP.orbitNumber("%d"%header.mph['ABS_ORBIT']),
EOP.orbitDirection(header.sph['PASS'].strip()),
EOP.wrsLongitudeGrid("%d"%header.mph['REL_ORBIT'],
**{'codeSpace': _wrs_code_space}),
SAR.polarisationMode(_polmode),
SAR.polarisationChannels(_polchannels),
)),
)
metadata = EOP.EarthObservationMetaData(
EOP.identifier(header.mph['PRODUCT'][:-3]),
EOP.parentIdentifier("ENVISAT:%s"%(header.mph['PRODUCT'][:9])),
EOP.acquisitionType("NOMINAL"),
EOP.productType(header.mph['PRODUCT'][:10]),
EOP.status("ARCHIVED"),
EOP.downlinkedTo(
EOP.DownlinkInformation(
EOP.acquisitionStation(header.mph['ACQUISITION_STATION'].strip()),
),
),
EOP.processing(
EOP.ProcessingInformation(
EOP.processingCenter(header.mph['PROC_CENTER'].strip()),
EOP.processingDate(time_prod),
EOP.processorVersion(header.mph['SOFTWARE_VER'].strip()),
),
),
)
xml_eop = SAR.EarthObservation(
ns_gml.getRandomId(),
ns_eop.getSchemaLocation("SAR"),
OM.phenomenonTime(ns_gml.getTimePeriod(time_acq_start, time_acq_stop)),
#OM.resultQuality(), #optional
OM.resultTime(ns_gml.getTimeInstant(time_acq_stop)),
#OM.validTime(), # optional
OM.procedure(eo_equipment),
OM.observedProperty({ns_xsi.nil: "true", "nilReason": "inapplicable"}),
OM.featureOfInterest(
#ns_eop.getFootprint(*get_footprint_and_center(header))
),
OM.result(EOP.EarthObservationResult(ns_gml.getRandomId())),
EOP.metaDataProperty(metadata),
)
xml_eop = etree.ElementTree(xml_eop)
#xml_eop.getroot().addprevious(ns_eop.getSchematronPI())
return xml_eop
def eop_meris(header, ns_opt=None):
ns_opt = ns_opt or ns_opt20
ns_eop = ns_opt.ns_eop
ns_gml = ns_opt.ns_gml
ns_om = ns_opt.ns_om
OPT = ns_opt.E
EOP = ns_eop.E
OM = ns_om.E
time_acq_start = header.sph['FIRST_LINE_TIME'].isoformat().replace("+00:00", "Z")
time_acq_stop = header.sph['LAST_LINE_TIME'].isoformat().replace("+00:00", "Z")
time_prod = header.mph['PROC_TIME'].isoformat().replace("+00:00", "Z")
if header.sph['FIRST_MID_LAT'] > header.sph['LAST_MID_LAT']:
_pass = "DESCENDING"
else:
_pass = "ASCENDING"
eo_equipment = EOP.EarthObservationEquipment(
ns_gml.getRandomId(),
EOP.platform(EOP.Platform(
EOP.shortName("Envisat"),
EOP.orbitType("LEO"),
)),
EOP.instrument(EOP.Instrument(
EOP.shortName("MERIS")
),
),
EOP.sensor(EOP.Sensor(
EOP.sensorType("OPTICAL"),
EOP.operationalMode(header.mph['PRODUCT'][4:6]),
)),
EOP.acquisitionParameters(EOP.Acquisition(
EOP.orbitNumber("%d"%header.mph['ABS_ORBIT']),
EOP.orbitDirection(_pass),
EOP.wrsLongitudeGrid("%d"%header.mph['REL_ORBIT'],
**{'codeSpace': "ENVISAT MERIS"}),
)),
)
metadata = EOP.EarthObservationMetaData(
EOP.identifier(header.mph['PRODUCT'][:-3]),
EOP.parentIdentifier("ENVISAT:%s"%(header.mph['PRODUCT'][:9])),
EOP.acquisitionType("NOMINAL"),
EOP.productType(header.mph['PRODUCT'][:10]),
EOP.status("ARCHIVED"),
EOP.downlinkedTo(
EOP.DownlinkInformation(
EOP.acquisitionStation(header.mph['ACQUISITION_STATION'].strip()),
),
),
EOP.processing(
EOP.ProcessingInformation(
EOP.processingCenter(header.mph['PROC_CENTER'].strip()),
EOP.processingDate(time_prod),
EOP.processorVersion(header.mph['SOFTWARE_VER'].strip()),
),
),
)
xml_eop = OPT.EarthObservation(
ns_gml.getRandomId(),
ns_eop.getSchemaLocation("OPT"),
#EOP.parameter(), #optional
OM.phenomenonTime(ns_gml.getTimePeriod(time_acq_start, time_acq_stop)),
#OM.resultQuality(), #optional
OM.resultTime(ns_gml.getTimeInstant(time_prod)),
#OM.validTime(), # optional
OM.procedure(eo_equipment),
OM.observedProperty({ns_xsi.nil: "true", "nilReason": "inapplicable"}),
OM.featureOfInterest(
#ns_eop.getFootprint(*get_footprint_and_center(header))
),
OM.result(OPT.EarthObservationResult(ns_gml.getRandomId())),
EOP.metaDataProperty(metadata),
)
xml_eop = etree.ElementTree(xml_eop)
#xml_eop.getroot().addprevious(ns_eop.getSchematronPI())
return xml_eop
def main(fname):
with file(fname) as fid:
header = ProductHeader(fid)
#print header.mph
#print "----------------------"
#print header.sph
pid = header.mph["PRODUCT"]
if pid.startswith("MER_") and pid[8] == '1':
eop = eop_meris(header)
elif pid.startswith("MER_") and pid[8] == '2':
eop = eop_meris(header)
elif pid.startswith("ASA_") and pid[6] in ('P', 'M', 'G') and pid[8] == '1':
eop = eop_asar(header)
elif pid.startswith("SAR_") and pid[6] in ('P', 'M', 'G') and pid[8] == '1':
eop = eop_asar(header, platform='ERS'+pid[-1]+":AMI-SAR")
else:
raise ValueError("Unsupported product type %s"%pid[:9])
print etree.tostring(eop, **XML_OPTS)
if __name__ == "__main__":
EXENAME = os.path.basename(sys.argv[0])
DEBUG = False
try:
INPUT = sys.argv[1]
for arg in sys.argv[2:]:
if arg == "DEBUG":
DEBUG = True # dump debuging output
except IndexError:
print >>sys.stderr, "ERROR: %s: Not enough input arguments!"%EXENAME
print >>sys.stderr
print >>sys.stderr, "Extract EOxServer range-type (JSON) from N1 file."
print >>sys.stderr
print >>sys.stderr, "USAGE: %s <input-n1> [DEBUG]"%EXENAME
sys.exit(1)
if DEBUG:
print >>sys.stderr, "input-n1: ", INPUT
try:
main(INPUT)
except Exception as exc:
print >>sys.stderr, "ERROR: %s: %s "%(EXENAME, exc)
if DEBUG:
print >>sys.stderr, traceback.format_exc()
sys.exit(1)
|
|
# Copyright 2014-2015 The Alive authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from z3 import *
gbl_unique_id = 0
def mk_unique_id():
global gbl_unique_id
id = str(gbl_unique_id)
gbl_unique_id += 1
return id
def fold_ite_list(l):
if len(l) == 0:
return None
cond, val = l[0]
if len(l) == 1:
return val
return If(cond, val, fold_ite_list(l[1:]))
def freshBV(prefix, size):
return BitVec('%s_%s' % (prefix, mk_unique_id()), size)
def mk_and(l):
l = [e for e in l if not is_true(e)]
if len(l) == 0:
return BoolVal(True)
if len(l) == 1:
return l[0]
return And(l)
def mk_or(l):
l = [e for e in l if not is_false(e)]
if len(l) == 0:
return BoolVal(False)
if len(l) == 1:
return l[0]
return Or(l)
def mk_not(e):
if is_false(e):
return BoolVal(True)
if is_true(e):
return BoolVal(False)
return Not(e)
def mk_distinct(l):
if len(l) < 2:
return BoolVal(True)
return Distinct(l)
def mk_if(c, a, b):
if is_true(c):
return a
if is_false(c):
return b
return If(c, a, b)
def mk_implies(a, b):
if is_true(a):
return b
if is_false(a) or is_true(b):
return BoolVal(True)
if is_false(b):
return Not(a)
return Implies(a, b)
def mk_concat(l):
if len(l) == 1:
return l[0]
return Concat(l)
def mk_forall(l, f):
if l == []:
return f
return ForAll(l, f)
def mk_exists(l, f):
if l == []:
return f
return Exists(l, f)
def toBV(b):
return If(b, BitVecVal(1, 1), BitVecVal(0, 1))
def truncateOrZExt(src, tgt):
srcb = src.size()
if isinstance(tgt, int):
tgtb = tgt
else:
tgtb = tgt.size()
if srcb == tgtb:
return src
if srcb > tgtb:
return Extract(tgtb - 1, 0, src)
return ZeroExt(tgtb - srcb, src)
def truncateOrSExt(src, tgt):
srcb = src.size()
tgtb = tgt.size()
if srcb == tgtb:
return src
if srcb > tgtb:
return Extract(tgtb - 1, 0, src)
return SignExt(tgtb - srcb, src)
def truncateOrPad(src, tgt):
srcb = src.size()
tgtb = tgt.size()
if srcb == tgtb:
return src
if srcb > tgtb:
return Extract(srcb - 1, srcb - tgtb, src)
return Concat(src, BitVecVal(0, tgtb - srcb))
"""
def no_overflow_smul(a, b):
size = a.size()
assert b.size() == size
m = SignExt(size, a) * SignExt(size, b)
min = BitVecVal(-(1 << (size-1)), 2*size)
max = BitVecVal((1 << (size-1)) -1, 2*size)
return And(m >= min, m <= max)
"""
def no_overflow_smul(a, b):
size = a.size()
assert b.size() == size
m = SignExt(size, a) * SignExt(size, b)
return m == SignExt(size, a * b)
def no_overflow_umul(a, b):
size = a.size()
assert b.size() == size
m = Extract(2*size-1, size, ZeroExt(size, a) * ZeroExt(size, b))
return m == BitVecVal(0, size)
def isShiftedMask(a):
v = (a - 1) | a
return [v != 0, ((v + 1) & v) == 0]
def bv_log2(v, bitwidth):
def rec(h, l):
if h <= l:
return BitVecVal(l, bitwidth)
mid = l+int((h-l)/2)
return If(Extract(h,mid+1,v) != 0, rec(h, mid+1), rec(mid, l))
return rec(v.size()-1, 0)
"""
linear version of log2
def bv_log2(v, bitwidth):
def rec(i):
if i == 0:
return BitVecVal(0, bitwidth)
return If(Extract(i,i,v) == BitVecVal(1,1), BitVecVal(i,bitwidth), rec(i-1))
return rec(v.size()-1)
"""
def ctlz(v, output_width):
size = v.size()
def rec(i):
if i < 0:
return BitVecVal(size, output_width)
return If(Extract(i,i,v) == BitVecVal(1, 1),
BitVecVal(size-1-i, output_width),
rec(i-1))
return rec(size-1)
def cttz(v, output_width):
size = v.size()
def rec(i):
if i == size:
return BitVecVal(size, output_width)
return If(Extract(i,i,v) == BitVecVal(1, 1),
BitVecVal(i, output_width),
rec(i+1))
return rec(0)
def ComputeNumSignBits(v, bitwidth):
size = v.size()
size1 = size - 1
sign = Extract(size1, size1, v)
def rec(i):
if i < 0:
return BitVecVal(size, bitwidth)
return If(Extract(i,i,v) == sign,
rec(i-1),
BitVecVal(size1-i, bitwidth))
return rec(size - 2)
##########################
# Type inference utilities
def register_pick_one_type(v):
global gbl_one_type_only
gbl_one_type_only.add(str(v))
def unregister_pick_one_type(vs):
global gbl_one_type_only
for v in vs.iterkeys():
gbl_one_type_only.discard(v)
def reset_pick_one_type():
global gbl_one_type_only
gbl_one_type_only = set([])
def get_pick_one_type():
return gbl_one_type_only
##########################
# number of users of an instruction
def get_users_var(name):
return BitVec('u_' + name, 8)
def get_flag_var(flag, inst):
dst = 'src' if gbl_is_source else 'tgt'
return BitVec('f_%s_%s_%s' % (flag, inst, dst), 1)
def set_smt_is_source(s):
global gbl_is_source
gbl_is_source = s
gbl_infer_flags = False
def set_infer_flags(f):
global gbl_infer_flags
gbl_infer_flags = f
def do_infer_flags():
return gbl_infer_flags
gbl_use_array_theory = False
def set_use_array_theory(f):
global gbl_use_array_theory
gbl_use_array_theory = f
def use_array_theory():
return gbl_use_array_theory
gbl_new_semantics = False
def set_use_new_semantics(f):
global gbl_new_semantics
gbl_new_semantics = f
def use_new_semantics():
return gbl_new_semantics
gbl_ptr_size = 32
def set_ptr_size(m):
global gbl_ptr_size
sz = m.get_interp(Int('ptrsize'))
if sz is not None:
gbl_ptr_size = sz.as_long()
def get_ptr_size():
return gbl_ptr_size
##########################
# Error handling
class AliveError(Exception):
pass
class ParseError():
def __init__(self, msgs, token = None):
if isinstance(msgs, list):
self.msgs = msgs
else:
self.msgs = [msgs]
self.token = token
def __repr__(self):
lineno = get_lineno()
line = get_line(lineno)
col = get_column(line, self.token)
return exception2str("\n".join(self.msgs), line, lineno, col)
gbl_line_offset = 0
def exception2str(msg, line, lineno, col, line_offset = None):
if line_offset is None:
line_offset = gbl_line_offset
s = "ERROR: %s (line: %d)\n" % (msg, line_offset + lineno)
s += line + '\n'
s += ' ' * col + '^'
return s
def get_lineno():
return gbl_parse_str.count('\n', 0, gbl_parse_loc) + 1
def get_line(lineno):
return gbl_parse_str.split('\n')[lineno-1]
def get_column(s, tok):
col = gbl_parse_loc - (gbl_parse_str.rfind('\n', 0, gbl_parse_loc)+1)
if not tok:
return col
token_col = s.find(tok, col)
return token_col if token_col >= 0 else col
def save_parse_str(s, line):
global gbl_parse_str, gbl_line_offset
gbl_parse_str = s
gbl_line_offset = line-1
def save_loc(loc):
global gbl_parse_loc
gbl_parse_loc = loc
|
|
import sys, string
import pythoncom
import win32api
from win32com.adsi import *
verbose_level = 0
server = '' # Must have trailing /
local_name = win32api.GetComputerName()
def DumpRoot():
"Dumps the root DSE"
path = "LDAP://%srootDSE" % server
rootdse = ADsGetObject(path)
for item in rootdse.Get("SupportedLDAPVersion"):
print "%s supports ldap version %s" % (path, item)
attributes = ["CurrentTime", "defaultNamingContext"]
for attr in attributes:
val = rootdse.Get(attr)
print " %s=%s" % (attr, val)
###############################################
#
# Code taken from article titled:
# Reading attributeSchema and classSchema Objects
def _DumpClass(child):
attrs = "Abstract lDAPDisplayName schemaIDGUID schemaNamingContext attributeSyntax oMSyntax"
_DumpTheseAttributes(child, string.split(attrs))
def _DumpAttribute(child):
attrs = "lDAPDisplayName schemaIDGUID adminDescription adminDisplayName rDNAttID defaultHidingValue defaultObjectCategory systemOnly defaultSecurityDescriptor"
_DumpTheseAttributes(child, string.split(attrs))
def _DumpTheseAttributes(child, attrs):
for attr in attrs:
try:
val = child.Get(attr)
except pythoncom.com_error, details:
continue
# ###
(hr, msg, exc, arg) = details
if exc and exc[2]: msg = exc[2]
val = "<Error: %s>" % (msg,)
if verbose_level >= 2:
print " %s: %s=%s" % (child.Class, attr, val)
def DumpSchema():
"Dumps the default DSE schema"
# Bind to rootDSE to get the schemaNamingContext property.
path = "LDAP://%srootDSE" % server
rootdse = ADsGetObject(path)
name = rootdse.Get("schemaNamingContext")
# Bind to the actual schema container.
path= "LDAP://" + server + name
print "Binding to", path
ob = ADsGetObject(path)
nclasses = nattr = nsub = nunk = 0
# Enumerate the attribute and class objects in the schema container.
for child in ob:
# Find out if this is a class, attribute, or subSchema object.
class_name = child.Class
if class_name == "classSchema":
_DumpClass(child)
nclasses = nclasses + 1
elif class_name == "attributeSchema":
_DumpAttribute(child)
nattr = nattr + 1
elif class_name == "subSchema":
nsub = nsub + 1
else:
print "Unknown class:", class_name
nunk = nunk + 1
if verbose_level:
print "Processed", nclasses, "classes"
print "Processed", nattr, "attributes"
print "Processed", nsub, "sub-schema's"
print "Processed", nunk, "unknown types"
def _DumpObject(ob, level = 0):
prefix = " " * level
print "%s%s object: %s" % (prefix, ob.Class, ob.Name)
# Do the directory object thing
try:
dir_ob = ADsGetObject(ob.ADsPath, IID_IDirectoryObject)
except pythoncom.com_error:
dir_ob = None
if dir_ob is not None:
info = dir_ob.GetObjectInformation()
print "%s RDN='%s', ObjectDN='%s'" % (prefix, info.RDN, info.ObjectDN)
# Create a list of names to fetch
names = ["distinguishedName"]
attrs = dir_ob.GetObjectAttributes(names)
for attr in attrs:
for val, typ in attr.Values:
print "%s Attribute '%s' = %s" % (prefix, attr.AttrName, val)
for child in ob:
_DumpObject(child, level+1)
def DumpAllObjects():
"Recursively dump the entire directory!"
path = "LDAP://%srootDSE" % server
rootdse = ADsGetObject(path)
name = rootdse.Get("defaultNamingContext")
# Bind to the actual schema container.
path= "LDAP://" + server + name
print "Binding to", path
ob = ADsGetObject(path)
# Enumerate the attribute and class objects in the schema container.
_DumpObject(ob)
##########################################################
#
# Code taken from article:
# Example Code for Enumerating Schema Classes, Attributes, and Syntaxes
# Fill a map with VT_ datatypes, to give us better names:
vt_map = {}
for name, val in pythoncom.__dict__.items():
if name[:3] == "VT_":
vt_map[val] = name
def DumpSchema2():
"Dumps the schema using an alternative technique"
path = "LDAP://%sschema" % (server,)
schema = ADsGetObject(path, IID_IADsContainer)
nclass = nprop = nsyntax = 0
for item in schema:
item_class = string.lower(item.Class)
if item_class == "class":
items = []
if item.Abstract: items.append("Abstract")
if item.Auxiliary: items.append("Auxiliary")
# if item.Structural: items.append("Structural")
desc = string.join(items, ", ")
import win32com.util
iid_name = win32com.util.IIDToInterfaceName(item.PrimaryInterface)
if verbose_level >= 2:
print "Class: Name=%s, Flags=%s, Primary Interface=%s" % (item.Name, desc, iid_name)
nclass = nclass + 1
elif item_class == "property":
if item.MultiValued:
val_type = "Multi-Valued"
else:
val_type = "Single-Valued"
if verbose_level >= 2:
print "Property: Name=%s, %s" % (item.Name, val_type)
nprop = nprop + 1
elif item_class == "syntax":
data_type = vt_map.get(item.OleAutoDataType, "<unknown type>")
if verbose_level >= 2:
print "Syntax: Name=%s, Datatype = %s" % (item.Name, data_type)
nsyntax = nsyntax + 1
if verbose_level >= 1:
print "Processed", nclass, "classes"
print "Processed", nprop, "properties"
print "Processed", nsyntax, "syntax items"
def DumpGC():
"Dumps the GC: object (whatever that is!)"
ob = ADsGetObject("GC:", IID_IADsContainer)
for sub_ob in ob:
print "GC ob: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
def DumpLocalUsers():
"Dumps the local machine users"
path = "WinNT://%s,computer" % (local_name,)
ob = ADsGetObject(path, IID_IADsContainer)
ob.put_Filter(["User", "Group"])
for sub_ob in ob:
print "User/Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
def DumpLocalGroups():
"Dumps the local machine groups"
path = "WinNT://%s,computer" % (local_name,)
ob = ADsGetObject(path, IID_IADsContainer)
ob.put_Filter(["Group"])
for sub_ob in ob:
print "Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
# get the members
members = sub_ob.Members()
for member in members:
print " Group member: %s (%s)" % (member.Name, member.ADsPath)
def usage(tests):
import os
print "Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])
print " -v : Verbose - print more information"
print " -s : server - execute the tests against the named server"
print "where Test is one of:"
for t in tests:
print t.__name__,":", t.__doc__
print
print "If not tests are specified, all tests are run"
sys.exit(1)
def main():
import getopt, traceback
tests = []
for ob in globals().values():
if type(ob)==type(main) and ob.__doc__:
tests.append(ob)
opts, args = getopt.getopt(sys.argv[1:], "s:hv")
for opt, val in opts:
if opt=="-s":
if val[-1] not in "\\/":
val = val + "/"
global server
server = val
if opt=="-h":
usage(tests)
if opt=="-v":
global verbose_level
verbose_level = verbose_level + 1
if len(args)==0:
print "Running all tests - use '-h' to see command-line options..."
dotests = tests
else:
dotests = []
for arg in args:
for t in tests:
if t.__name__==arg:
dotests.append(t)
break
else:
print "Test '%s' unknown - skipping" % arg
if not len(dotests):
print "Nothing to do!"
usage(tests)
for test in dotests:
try:
test()
except:
print "Test %s failed" % test.__name__
traceback.print_exc()
if __name__=='__main__':
main()
|
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import abc
import sys
import re
import functools
from collections import defaultdict
import lucidity.error
# Type of a RegexObject for isinstance check.
_RegexType = type(re.compile(''))
class Template(object):
'''A template.'''
_STRIP_EXPRESSION_REGEX = re.compile(r'{(.+?)(:(\\}|.)+?)}')
_PLAIN_PLACEHOLDER_REGEX = re.compile(r'{(.+?)}')
_TEMPLATE_REFERENCE_REGEX = re.compile(r'{@(?P<reference>.+?)}')
ANCHOR_START, ANCHOR_END, ANCHOR_BOTH = (1, 2, 3)
RELAXED, STRICT = (1, 2)
def __init__(self, name, pattern, anchor=ANCHOR_START,
default_placeholder_expression='[\w_.\-]+',
duplicate_placeholder_mode=RELAXED,
template_resolver=None):
'''Initialise with *name* and *pattern*.
*anchor* determines how the pattern is anchored during a parse. A
value of :attr:`~Template.ANCHOR_START` (the default) will match the
pattern against the start of a path. :attr:`~Template.ANCHOR_END` will
match against the end of a path. To anchor at both the start and end
(a full path match) use :attr:`~Template.ANCHOR_BOTH`. Finally,
``None`` will try to match the pattern once anywhere in the path.
*duplicate_placeholder_mode* determines how duplicate placeholders will
be handled during parsing. :attr:`~Template.RELAXED` mode extracts the
last matching value without checking the other values.
:attr:`~Template.STRICT` mode ensures that all duplicate placeholders
extract the same value and raises :exc:`~lucidity.error.ParseError` if
they do not.
If *template_resolver* is supplied, use it to resolve any template
references in the *pattern* during operations. It should conform to the
:class:`Resolver` interface. It can be changed at any time on the
instance to affect future operations.
'''
super(Template, self).__init__()
self.duplicate_placeholder_mode = duplicate_placeholder_mode
self.template_resolver = template_resolver
self._default_placeholder_expression = default_placeholder_expression
self._period_code = '_LPD_'
self._at_code = '_WXV_'
self._name = name
self._pattern = pattern
self._anchor = anchor
# Check that supplied pattern is valid and able to be compiled.
self._construct_regular_expression(self.pattern)
def __repr__(self):
'''Return unambiguous representation of template.'''
return '{0}(name={1!r}, pattern={2!r})'.format(
self.__class__.__name__, self.name, self.pattern
)
@property
def name(self):
'''Return name of template.'''
return self._name
@property
def pattern(self):
'''Return template pattern.'''
return self._pattern
def expanded_pattern(self):
'''Return pattern with all referenced templates expanded recursively.
Raise :exc:`lucidity.error.ResolveError` if pattern contains a reference
that cannot be resolved by currently set template_resolver.
'''
return self._TEMPLATE_REFERENCE_REGEX.sub(
self._expand_reference, self.pattern
)
def _expand_reference(self, match):
'''Expand reference represented by *match*.'''
reference = match.group('reference')
if self.template_resolver is None:
raise lucidity.error.ResolveError(
'Failed to resolve reference {0!r} as no template resolver set.'
.format(reference)
)
template = self.template_resolver.get(reference)
if template is None:
raise lucidity.error.ResolveError(
'Failed to resolve reference {0!r} using template resolver.'
.format(reference)
)
return template.expanded_pattern()
def parse(self, path):
'''Return dictionary of data extracted from *path* using this template.
Raise :py:class:`~lucidity.error.ParseError` if *path* is not
parsable by this template.
'''
# Construct regular expression for expanded pattern.
regex = self._construct_regular_expression(self.expanded_pattern())
# Parse.
parsed = {}
match = regex.search(path)
if match:
data = {}
for key, value in sorted(match.groupdict().items()):
# Strip number that was added to make group name unique.
key = key[:-3]
# If strict mode enabled for duplicate placeholders, ensure that
# all duplicate placeholders extract the same value.
if self.duplicate_placeholder_mode == self.STRICT:
if key in parsed:
if parsed[key] != value:
raise lucidity.error.ParseError(
'Different extracted values for placeholder '
'{0!r} detected. Values were {1!r} and {2!r}.'
.format(key, parsed[key], value)
)
else:
parsed[key] = value
# Expand dot notation keys into nested dictionaries.
target = data
parts = key.split(self._period_code)
for part in parts[:-1]:
target = target.setdefault(part, {})
target[parts[-1]] = value
return data
else:
raise lucidity.error.ParseError(
'Path {0!r} did not match template pattern.'.format(path)
)
def format(self, data):
'''Return a path formatted by applying *data* to this template.
Raise :py:class:`~lucidity.error.FormatError` if *data* does not
supply enough information to fill the template fields.
'''
format_specification = self._construct_format_specification(
self.expanded_pattern()
)
return self._PLAIN_PLACEHOLDER_REGEX.sub(
functools.partial(self._format, data=data),
format_specification
)
def _format(self, match, data):
'''Return value from data for *match*.'''
placeholder = match.group(1)
parts = placeholder.split('.')
try:
value = data
for part in parts:
value = value[part]
except (TypeError, KeyError):
raise lucidity.error.FormatError(
'Could not format data {0!r} due to missing key {1!r}.'
.format(data, placeholder)
)
else:
return value
def keys(self):
'''Return unique set of placeholders in pattern.'''
format_specification = self._construct_format_specification(
self.expanded_pattern()
)
return set(self._PLAIN_PLACEHOLDER_REGEX.findall(format_specification))
def references(self):
'''Return unique set of referenced templates in pattern.'''
format_specification = self._construct_format_specification(
self.pattern
)
return set(self._TEMPLATE_REFERENCE_REGEX.findall(format_specification))
def _construct_format_specification(self, pattern):
'''Return format specification from *pattern*.'''
return self._STRIP_EXPRESSION_REGEX.sub('{\g<1>}', pattern)
def _construct_regular_expression(self, pattern):
'''Return a regular expression to represent *pattern*.'''
# Escape non-placeholder components.
expression = re.sub(
r'(?P<placeholder>{(.+?)(:(\\}|.)+?)?})|(?P<other>.+?)',
self._escape,
pattern
)
# Replace placeholders with regex pattern.
expression = re.sub(
r'{(?P<placeholder>.+?)(:(?P<expression>(\\}|.)+?))?}',
functools.partial(
self._convert, placeholder_count=defaultdict(int)
),
expression
)
if self._anchor is not None:
if bool(self._anchor & self.ANCHOR_START):
expression = '^{0}'.format(expression)
if bool(self._anchor & self.ANCHOR_END):
expression = '{0}$'.format(expression)
# Compile expression.
try:
compiled = re.compile(expression)
except re.error as error:
if any([
'bad group name' in str(error),
'bad character in group name' in str(error)
]):
raise ValueError('Placeholder name contains invalid '
'characters.')
else:
_, value, traceback = sys.exc_info()
message = 'Invalid pattern: {0}'.format(value)
raise ValueError, message, traceback #@IgnorePep8
return compiled
def _convert(self, match, placeholder_count):
'''Return a regular expression to represent *match*.
*placeholder_count* should be a `defaultdict(int)` that will be used to
store counts of unique placeholder names.
'''
placeholder_name = match.group('placeholder')
# Support at symbol (@) as referenced template indicator. Currently,
# this symbol not a valid character for a group name in the standard
# Python regex library. Rather than rewrite or monkey patch the library
# work around the restriction with a unique identifier.
placeholder_name = placeholder_name.replace('@', self._at_code)
# Support period (.) as nested key indicator. Currently, a period is
# not a valid character for a group name in the standard Python regex
# library. Rather than rewrite or monkey patch the library work around
# the restriction with a unique identifier.
placeholder_name = placeholder_name.replace('.', self._period_code)
# The re module does not support duplicate group names. To support
# duplicate placeholder names in templates add a unique count to the
# regular expression group name and strip it later during parse.
placeholder_count[placeholder_name] += 1
placeholder_name += '{0:03d}'.format(
placeholder_count[placeholder_name]
)
expression = match.group('expression')
if expression is None:
expression = self._default_placeholder_expression
# Un-escape potentially escaped characters in expression.
expression = expression.replace('\{', '{').replace('\}', '}')
return r'(?P<{0}>{1})'.format(placeholder_name, expression)
def _escape(self, match):
'''Escape matched 'other' group value.'''
groups = match.groupdict()
if groups['other'] is not None:
return re.escape(groups['other'])
return groups['placeholder']
class Resolver(object):
'''Template resolver interface.'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get(self, template_name, default=None):
'''Return template that matches *template_name*.
If no template matches then return *default*.
'''
return default
@classmethod
def __subclasshook__(cls, subclass):
'''Return whether *subclass* fulfils this interface.'''
if cls is Resolver:
return callable(getattr(subclass, 'get', None))
return NotImplemented
|
|
"""
Django settings for stagecraft project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import json
import sys
from os.path import abspath, dirname, join as pjoin
from .rediss import *
try:
from urllib.parse import urlparse # Python 3
except ImportError:
from urlparse import urlparse # Python 2
BASE_DIR = abspath(pjoin(dirname(__file__), '..', '..'))
sys.path.append(pjoin(BASE_DIR, 'apps'))
sys.path.append(pjoin(BASE_DIR, 'libs'))
import djcelery
from celery.schedules import crontab
djcelery.setup_loader()
# Defined here for safety, they should also be defined in each environment.
DEBUG = False
ALLOWED_HOSTS = []
USE_X_FORWARDED_HOST = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
STATSD_HOST = 'localhost'
STATSD_PORT = 8125
STATSD_PREFIX = 'pp.apps.stagecraft'
STATSD_MAXUDPSIZE = 512
STATSD_CLIENT = 'django_statsd.clients.normal'
def load_paas_settings():
paas = {}
if 'VCAP_SERVICES' in os.environ:
vcap = json.loads(os.environ['VCAP_SERVICES'])
for service in vcap['postgres']:
if service['name'] == 'gds-performance-platform-pg-service':
paas['DATABASE_URL'] = service['credentials']['uri']
for service in vcap['redis']:
if service['name'] == 'redis':
database_number = os.environ['REDIS_DATABASE_NUMBER']
url = service['credentials']['uri']
url += '/' + database_number
paas['REDIS_URL'] = url
return paas
def load_databases_from_environment():
# eg postgres://user3123:pass123@database.foo.com:6212/db982398
DATABASE_URL = urlparse(os.environ['DATABASE_URL'])
return {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': DATABASE_URL.path[1:],
'USER': DATABASE_URL.username,
'PASSWORD': DATABASE_URL.password,
'HOST': DATABASE_URL.hostname,
'PORT': DATABASE_URL.port,
}
}
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djcelery',
'reversion',
'stagecraft.apps.collectors',
'stagecraft.apps.datasets',
'stagecraft.apps.dashboards',
'stagecraft.apps.organisation',
'stagecraft.apps.transforms',
'stagecraft.apps.users',
)
MIDDLEWARE_CLASSES = (
'dogslow.WatchdogMiddleware',
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'stagecraft.libs.request_logger.middleware.RequestLoggerMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'reversion.middleware.RevisionMiddleware',
)
STATSD_PATCHES = (
'django_statsd.patches.db',
)
ROOT_URLCONF = 'stagecraft.urls'
WSGI_APPLICATION = 'stagecraft.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['-s', '--exclude-dir=stagecraft/settings']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = abspath(pjoin(BASE_DIR, 'assets'))
DOGSLOW_LOG_TO_FILE = False
DOGSLOW_TIMER = 1
DOGSLOW_LOGGER = 'stagecraft.apps'
DOGSLOW_LOG_LEVEL = 'INFO'
#: Only add pickle to this list if your broker is secured
#: from unwanted access (see userguide/security.html)
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_WORKER_MAX_TASKS_PER_CHILD = 64
CELERY_WORKER_MAX_MEMORY_PER_CHILD = 64000
CELERY_RESULT_BACKEND = 'djcelery.backends.database.DatabaseBackend'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYBEAT_SCHEDULE = {
'realtime': {
'task': 'stagecraft.apps.collectors.tasks.run_collectors_by_type',
'schedule': crontab(minute='*/5'),
'args': ('ga-realtime', 'piwik-realtime')
},
'hourly': {
'task': 'stagecraft.apps.collectors.tasks.run_collectors_by_type',
'schedule': crontab(minute=0),
'args': ('pingdom',)
},
'daily': {
'task': 'stagecraft.apps.collectors.tasks.run_collectors_by_type',
'schedule': crontab(minute=0, hour=0),
'args': (
'ga',
'ga-contrib-content-table',
'ga-trending',
'gcloud',
'piwik-core',
'webtrends-keymetrics',
'webtrends-reports'
)
},
}
ROLES = [
{
"role": "collector",
"permissions": {
"DataSet": ["get"],
"Provider": ["get", "post", "put", "delete"],
"DataSource": ["get", "post", "put", "delete"],
"CollectorType": ["get", "post", "put", "delete"],
"Collector": ["get", "post", "put", "delete"]
}
},
{
"role": "collector-view",
"permissions": {
"CollectorType": ["get"],
"Collector": ["get"]
}
},
{
"role": "dashboard",
"permissions": {
"Dashboard": ["get", "post", "put", "delete"],
"Module": ["get", "post", "put", "delete"],
"ModuleType": ["get", "post", "put", "delete"],
},
},
{
"role": "dashboard-editor",
"permissions": {
"Dashboard": ["get", "post", "put"],
"Module": ["get", "post", "put"],
"ModuleType": ["get"],
"DataGroup": ["get", "post"],
"DataSet": ["get", "post"],
"Node": ["get"],
"NodeType": ["get"],
"Transform": ["get", "post"],
"TransformType": ["get"],
},
},
{
"role": "admin",
"permissions": {
"Dashboard": ["get", "post", "put", "delete"],
"Module": ["get", "post", "put", "delete"],
"ModuleType": ["get", "post", "put", "delete"],
"DataGroup": ["get", "post", "put", "delete"],
"DataSet": ["get", "post", "put", "delete"],
"Node": ["get", "post", "put", "delete"],
"NodeType": ["get", "post", "put", "delete"],
"Transform": ["get", "post", "put", "delete"],
"TransformType": ["get", "post", "put", "delete"],
"User": ["get", "post", "put", "delete"],
"Provider": ["get", "post", "put", "delete"],
"DataSource": ["get", "post", "put", "delete"],
"CollectorType": ["get", "post", "put", "delete"],
"Collector": ["get", "post", "put", "delete"],
},
},
{
"role": "omniscient",
"permissions": {
},
},
{
"role": "signin",
"permissions": {
"DataGroup": ["get", "post", "put", "delete"],
"DataSet": ["get", "post", "put", "delete"],
},
},
{
"role": "organisation",
"permissions": {
"Node": ["get", "post", "put", "delete"],
"NodeType": ["get", "post", "put", "delete"],
},
},
{
"role": "transforms",
"permissions": {
"Transform": ["get", "post", "put", "delete"],
"TransformType": ["get", "post", "put", "delete"],
},
},
{
"role": "user",
"permissions": {
"User": ["get", "post", "put", "delete"],
},
},
{
"role": "anon",
"permissions": {
"Dashboard": ["get"],
"Module": ["get"],
"ModuleType": ["get"],
"Node": ["get"],
"NodeType": ["get"],
"Transform": ["get"],
"TransformType": ["get"],
},
},
]
DISABLE_COLLECTORS = False
TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}]
|
|
"""distutils.msvccompiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
import sys, os
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import \
CCompiler, gen_lib_options
from distutils import log
_can_read_reg = False
try:
import winreg
_can_read_reg = True
hkey_mod = winreg
RegOpenKeyEx = winreg.OpenKeyEx
RegEnumKey = winreg.EnumKey
RegEnumValue = winreg.EnumValue
RegError = winreg.error
except ImportError:
try:
import win32api
import win32con
_can_read_reg = True
hkey_mod = win32con
RegOpenKeyEx = win32api.RegOpenKeyEx
RegEnumKey = win32api.RegEnumKey
RegEnumValue = win32api.RegEnumValue
RegError = win32api.error
except ImportError:
log.info("Warning: Can't read registry to find the "
"necessary compiler setting\n"
"Make sure that Python modules winreg, "
"win32api or win32con are installed.")
pass
if _can_read_reg:
HKEYS = (hkey_mod.HKEY_USERS,
hkey_mod.HKEY_CURRENT_USER,
hkey_mod.HKEY_LOCAL_MACHINE,
hkey_mod.HKEY_CLASSES_ROOT)
def read_keys(base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while True:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i += 1
return L
def read_values(base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while True:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[convert_mbcs(name)] = convert_mbcs(value)
i += 1
return d
def convert_mbcs(s):
dec = getattr(s, "decode", None)
if dec is not None:
try:
s = dec("mbcs")
except UnicodeError:
pass
return s
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.load_macros(version)
def set_macro(self, macro, path, key):
for base in HKEYS:
d = read_values(base, path)
if d:
self.macros["$(%s)" % macro] = d[key]
break
def load_macros(self, version):
vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
net = r"Software\Microsoft\.NETFramework"
self.set_macro("FrameworkDir", net, "installroot")
try:
if version > 7.0:
self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
else:
self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
except KeyError as exc: #
raise DistutilsPlatformError(
"""Python was built with Visual Studio 2003;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2003 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = read_values(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = s.replace(k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
if majorVersion >= 13:
# v13 was skipped and should be v14
majorVersion += 1
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def get_build_architecture():
"""Return the processor architecture.
Possible results are "Intel" or "AMD64".
"""
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return "Intel"
j = sys.version.find(")", i)
return sys.version[i+len(prefix):j]
def normalize_and_reduce_paths(paths):
"""Return a list of normalized paths with duplicates removed.
The current order of paths is maintained.
"""
# Paths are normalized so things like: /a and /a/ aren't both preserved.
reduced_paths = []
for p in paths:
np = os.path.normpath(p)
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
if np not in reduced_paths:
reduced_paths.append(np)
return reduced_paths
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__(self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = get_build_version()
self.__arch = get_build_architecture()
if self.__arch == "Intel":
# x86
if self.__version >= 7:
self.__root = r"Software\Microsoft\VisualStudio"
self.__macros = MacroExpander(self.__version)
else:
self.__root = r"Software\Microsoft\Devstudio"
self.__product = "Visual Studio version %s" % self.__version
else:
# Win64. Assume this was built with the platform SDK
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
self.initialized = False
def initialize(self):
self.__paths = []
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
self.__paths = self.get_msvc_paths("path")
if len(self.__paths) == 0:
raise DistutilsPlatformError("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed."
% self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
self.set_path_env_var('lib')
self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in os.environ['path'].split(';'):
self.__paths.append(p)
except KeyError:
pass
self.__paths = normalize_and_reduce_paths(self.__paths)
os.environ['path'] = ";".join(self.__paths)
self.preprocess_options = None
if self.__arch == "Intel":
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
]
else:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames(self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized:
self.initialize()
compile_info = self._setup_compile(output_dir, macros, include_dirs,
sources, depends, extra_postargs)
macros, objects, extra_postargs, pp_opts, build = compile_info
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname(src)
rc_dir = os.path.dirname(obj)
try:
# first compile .MC to .RC and .H file
self.spawn([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
else:
# how to handle this file?
raise CompileError("Don't know how to compile %s to %s"
% (src, obj))
output_opt = "/Fo" + obj
try:
self.spawn([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
def create_static_lib(self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname,
output_dir=output_dir)
if self._need_link(objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
(libraries, library_dirs, runtime_library_dirs) = fixed_args
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options(self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
os.path.dirname(objects[0]),
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
self.spawn([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option(self, dir):
raise DistutilsPlatformError(
"don't know how to set runtime library search path for MSVC++")
def library_option(self, lib):
return self.library_filename(lib)
def find_library_file(self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in os.environ['Path'].split(';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
def get_msvc_paths(self, path, platform='x86'):
"""Get a list of devstudio directories (include, lib or path).
Return a list of strings. The list will be empty if unable to
access the registry or appropriate registry keys not found.
"""
if not _can_read_reg:
return []
path = path + " dirs"
if self.__version >= 7:
key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
% (self.__root, self.__version))
else:
key = (r"%s\6.0\Build System\Components\Platforms"
r"\Win32 (%s)\Directories" % (self.__root, platform))
for base in HKEYS:
d = read_values(base, key)
if d:
if self.__version >= 7:
return self.__macros.sub(d[path]).split(";")
else:
return d[path].split(";")
# MSVC 6 seems to create the registry entries we need only when
# the GUI is run.
if self.__version == 6:
for base in HKEYS:
if read_values(base, r"%s\6.0" % self.__root) is not None:
self.warn("It seems you have Visual Studio 6 installed, "
"but the expected registry settings are not present.\n"
"You must at least run the Visual Studio GUI once "
"so that these entries are created.")
break
return []
def set_path_env_var(self, name):
"""Set environment variable 'name' to an MSVC path type value.
This is equivalent to a SET command prior to execution of spawned
commands.
"""
if name == "lib":
p = self.get_msvc_paths("library")
else:
p = self.get_msvc_paths(name)
if p:
os.environ[name] = ';'.join(p)
if get_build_version() >= 8.0:
log.debug("Importing new compiler from distutils.msvc9compiler")
OldMSVCCompiler = MSVCCompiler
from distutils.msvc9compiler import MSVCCompiler
# get_build_architecture not really relevant now we support cross-compile
from distutils.msvc9compiler import MacroExpander
|
|
#!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu),
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This script contains unit tests of the :mod:`rmgpy.statmech.rotation` module.
"""
import unittest
import numpy
from rmgpy.statmech.rotation import LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor
import rmgpy.constants as constants
################################################################################
class TestLinearRotor(unittest.TestCase):
"""
Contains unit tests of the LinearRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = LinearRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the LinearRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the LinearRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the LinearRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = B * J * (J + 1)
Eact = self.mode.getLevelEnergy(J)
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the LinearRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = 2 * J + 1
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact)
def test_getPartitionFunction_classical(self):
"""
Test the LinearRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([72.6691, 121.115, 242.230, 363.346, 484.461])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the LinearRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([72.8360, 121.282, 242.391, 363.512, 484.627])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the LinearRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the LinearRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the LinearRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1, 1, 1, 1, 1]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the LinearRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.997705, 0.998624, 0.999312, 0.999541, 0.999656]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the LinearRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([5.28592, 5.79674, 6.48989, 6.89535, 7.18304]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the LinearRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([5.28592, 5.79674, 6.48989, 6.89535, 7.18304]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the LinearRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n]) / sumStates[n], 1.0, 3)
def test_getSumOfStates_quantum(self):
"""
Test the LinearRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 4000.*11.96, 2.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 3)
def test_getDensityOfStates_classical(self):
"""
Test the LinearRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000.*11.96, 1.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the LinearRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000.*11.96, 2.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a LinearRotor object can be reconstructed from its repr()
output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a LinearRotor object can be pickled and unpickled with no
loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestNonlinearRotor(unittest.TestCase):
"""
Contains unit tests of the NonlinearRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = numpy.array([3.415, 16.65, 20.07])
self.symmetry = 4
self.quantum = False
self.mode = NonlinearRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the NonlinearRotor.rotationalConstant property.
"""
Bexp = numpy.array([4.93635, 1.0125, 0.839942])
Bact = self.mode.rotationalConstant.value_si
for B0, B in zip(Bexp, Bact):
self.assertAlmostEqual(B0, B, 4)
def test_setRotationalConstant(self):
"""
Test setting the NonlinearRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
for I0, I in zip(Iexp, Iact):
self.assertAlmostEqual(I0, I, 4)
def test_getPartitionFunction_classical(self):
"""
Test the NonlinearRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([651.162, 1401.08, 3962.84, 7280.21, 11208.6])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the NonlinearRotor.getHeatCapacity() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the NonlinearRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the NonlinearRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([7.97876, 8.74500, 9.78472, 10.3929, 10.8244]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the NonlinearRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.8 < numpy.sum(densStates[0:n]) / sumStates[n] < 1.25, '{0} != {1}'.format(numpy.sum(densStates[0:n]), sumStates[n]))
def test_getDensityOfStates_classical(self):
"""
Test the NonlinearRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 100
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a NonlinearRotor object can be reconstructed from its
repr() output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertEqual(self.mode.inertia.value.shape, mode.inertia.value.shape)
for I0, I in zip(self.mode.inertia.value, mode.inertia.value):
self.assertAlmostEqual(I0, I, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a NonlinearRotor object can be pickled and unpickled with
no loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertEqual(self.mode.inertia.value.shape, mode.inertia.value.shape)
for I0, I in zip(self.mode.inertia.value, mode.inertia.value):
self.assertAlmostEqual(I0, I, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestKRotor(unittest.TestCase):
"""
Contains unit tests of the KRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = KRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the KRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the KRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the KRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = float(B * J * J)
Eact = float(self.mode.getLevelEnergy(J))
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the KRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = 1 if J == 0 else 2
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact, '{0} != {1}'.format(gact, gexp))
def test_getPartitionFunction_classical(self):
"""
Test the KRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([10.6839, 13.7929, 19.5060, 23.8899, 27.5857])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the KRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([10.6839, 13.7929, 19.5060, 23.8899, 27.5857])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the KRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the KRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the KRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the KRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([0.5, 0.5, 0.5, 0.5, 0.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the KRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([2.86874, 3.12415, 3.47072, 3.67346, 3.81730]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the KRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([2.86874, 3.12415, 3.47072, 3.67346, 3.81730]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the KRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.75 < numpy.sum(densStates[0:n+1]) / sumStates[n] < 1.3333, '{0} != {1}'.format(numpy.sum(densStates[0:n+1]), sumStates[n]))
def test_getSumOfStates_quantum(self):
"""
Test the KRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 1000*11.96, 1*11.96)
sumStates = self.mode.getSumOfStates(Elist)
densStates = self.mode.getDensityOfStates(Elist)
for n in range(10, len(Elist)):
self.assertTrue(0.8 < numpy.sum(densStates[0:n+1]) / sumStates[n] < 1.25, '{0} != {1}'.format(numpy.sum(densStates[0:n+1]), sumStates[n]))
def test_getDensityOfStates_classical(self):
"""
Test the KRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 3000*11.96, 0.05*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 500
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the KRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 4000*11.96, 2*11.96)
densStates = self.mode.getDensityOfStates(Elist)
T = 500
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a KRotor object can be reconstructed from its repr() output
with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a KRotor object can be pickled and unpickled with no loss
of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
################################################################################
class TestSphericalTopRotor(unittest.TestCase):
"""
Contains unit tests of the SphericalTopRotor class.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
self.inertia = 11.75
self.symmetry = 2
self.quantum = False
self.mode = SphericalTopRotor(
inertia = (self.inertia,"amu*angstrom^2"),
symmetry = self.symmetry,
quantum = self.quantum,
)
def test_getRotationalConstant(self):
"""
Test getting the SphericalTopRotor.rotationalConstant property.
"""
Bexp = 1.434692
Bact = self.mode.rotationalConstant.value_si
self.assertAlmostEqual(Bexp, Bact, 4)
def test_setRotationalConstant(self):
"""
Test setting the SphericalTopRotor.rotationalConstant property.
"""
B = self.mode.rotationalConstant
B.value_si *= 2
self.mode.rotationalConstant = B
Iexp = 0.5 * self.inertia
Iact = self.mode.inertia.value_si * constants.Na * 1e23
self.assertAlmostEqual(Iexp, Iact, 4)
def test_getLevelEnergy(self):
"""
Test the SphericalTopRotor.getLevelEnergy() method.
"""
B = self.mode.rotationalConstant.value_si * constants.h * constants.c * 100.
B *= constants.Na
for J in range(0, 100):
Eexp = B * J * (J + 1)
Eact = self.mode.getLevelEnergy(J)
if J == 0:
self.assertEqual(Eact, 0)
else:
self.assertAlmostEqual(Eexp, Eact, delta=1e-4*Eexp)
def test_getLevelDegeneracy(self):
"""
Test the SphericalTopRotor.getLevelDegeneracy() method.
"""
for J in range(0, 100):
gexp = (2 * J + 1)**2
gact = self.mode.getLevelDegeneracy(J)
self.assertEqual(gexp, gact, '{0} != {1}'.format(gact, gexp))
def test_getPartitionFunction_classical(self):
"""
Test the SphericalTopRotor.getPartitionFunction() method for a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([1552.74, 3340.97, 9449.69, 17360.2, 26727.8])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getPartitionFunction_quantum(self):
"""
Test the SphericalTopRotor.getPartitionFunction() method for a quantum
rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Qexplist = numpy.array([1555.42, 3344.42, 9454.57, 17366.2, 26734.7])
for T, Qexp in zip(Tlist, Qexplist):
Qact = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-4*Qexp)
def test_getHeatCapacity_classical(self):
"""
Test the SphericalTopRotor.getHeatCapacity() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getHeatCapacity_quantum(self):
"""
Test the SphericalTopRotor.getHeatCapacity() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Cvexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R
for T, Cvexp in zip(Tlist, Cvexplist):
Cvact = self.mode.getHeatCapacity(T)
self.assertAlmostEqual(Cvexp, Cvact, delta=1e-4*Cvexp)
def test_getEnthalpy_classical(self):
"""
Test the SphericalTopRotor.getEnthalpy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.5, 1.5, 1.5, 1.5, 1.5]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEnthalpy_quantum(self):
"""
Test the SphericalTopRotor.getEnthalpy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Hexplist = numpy.array([1.49828, 1.49897, 1.49948, 1.49966, 1.49974]) * constants.R * Tlist
for T, Hexp in zip(Tlist, Hexplist):
Hact = self.mode.getEnthalpy(T)
self.assertAlmostEqual(Hexp, Hact, delta=1e-4*Hexp)
def test_getEntropy_classical(self):
"""
Test the SphericalTopRotor.getEntropy() method using a classical rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([8.84778, 9.61402, 10.6537, 11.2619, 11.6935]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getEntropy_quantum(self):
"""
Test the SphericalTopRotor.getEntropy() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,500,1000,1500,2000])
Sexplist = numpy.array([8.84778, 9.61402, 10.6537, 11.2619, 11.6935]) * constants.R
for T, Sexp in zip(Tlist, Sexplist):
Sact = self.mode.getEntropy(T)
self.assertAlmostEqual(Sexp, Sact, delta=1e-4*Sexp)
def test_getSumOfStates_classical(self):
"""
Test the SphericalTopRotor.getSumOfStates() method using a classical rotor.
"""
self.mode.quantum = False
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(20, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 1)
def test_getSumOfStates_quantum(self):
"""
Test the SphericalTopRotor.getSumOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
densStates = self.mode.getDensityOfStates(Elist)
sumStates = self.mode.getSumOfStates(Elist)
for n in range(1, len(Elist)):
self.assertAlmostEqual(numpy.sum(densStates[0:n+1]) / sumStates[n], 1.0, 3)
def test_getDensityOfStates_classical(self):
"""
Test the SphericalTopRotor.getDensityOfStates() method using a classical
rotor.
"""
self.mode.quantum = False
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 2000*11.96, 1.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_getDensityOfStates_quantum(self):
"""
Test the SphericalTopRotor.getDensityOfStates() method using a quantum rotor.
"""
self.mode.quantum = True
Tlist = numpy.array([300,400,500])
Elist = numpy.arange(0, 4000*11.96, 2.0*11.96)
for T in Tlist:
densStates = self.mode.getDensityOfStates(Elist)
Qact = numpy.sum(densStates * numpy.exp(-Elist / constants.R / T))
Qexp = self.mode.getPartitionFunction(T)
self.assertAlmostEqual(Qexp, Qact, delta=1e-2*Qexp)
def test_repr(self):
"""
Test that a SphericalTopRotor object can be reconstructed from its
repr() output with no loss of information.
"""
mode = None
exec('mode = {0!r}'.format(self.mode))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
def test_pickle(self):
"""
Test that a SphericalTopRotor object can be pickled and unpickled
with no loss of information.
"""
import cPickle
mode = cPickle.loads(cPickle.dumps(self.mode,-1))
self.assertAlmostEqual(self.mode.inertia.value, mode.inertia.value, 6)
self.assertEqual(self.mode.inertia.units, mode.inertia.units)
self.assertEqual(self.mode.symmetry, mode.symmetry)
self.assertEqual(self.mode.quantum, mode.quantum)
|
|
import sys
sys.path.append("..")
from myLib.camera import CameraClient as CamSpectro
from flask import Flask, jsonify, abort, make_response, request
import json,time,logging
import subprocess,os,sys
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
import astropy.io.fits
import numpy as np
app = Flask(__name__)
def solveAstro(filename,scale):
print("debut resolution astrometrique ",scale,"arcsec per pixel, file=",filename)
name, extension = os.path.splitext(filename)
scale_low = str(scale*80.0/100.0)
scale_high = str(scale*120.0/100.0)
#solve-field --downsample 2 --tweak-order 2 --overwrite finderAutoSolver-Astro.fits
subprocess.call(["/usr/bin/solve-field","--cpulimit","12","--downsample", "2", "--tweak-order", "2", "--scale-units", "arcsecperpix", "--scale-low", scale_low, "--scale-high", scale_high, "--no-plots", "--overwrite", filename])
if os.path.isfile(name+'.solved'):
print("succes resolution astrometrique, get wcs data")
wcs = astropy.wcs.WCS(astropy.io.fits.open(name+'.wcs')[0].header)
try:
os.remove(name+'-indx.xyls')
os.remove(name+'.axy')
os.remove(name+'.corr')
os.remove(name+'.match')
os.remove(name+'.new')
os.remove(name+'.rdls')
os.remove(name+'.solved')
#os.remove(name+'.wcs')
except:
print(" Some file was not here.")
return wcs
else:
print("echec resolution astrometrique")
raise
def connectCam(config):
# acquisition of picture thru INDI server
camSpectro=CamSpectro(config["camera"]["name"],config["camera"]["indiServerAddress"],config["camera"]["indiServerPort"])
print("Connecting to indiserver")
if (not(camSpectro.connectServer())):
print(f"Fail to connect to indi Server {camSpectro.getHost()}:{camSpectro.getPort()}")
print("Try to run:")
print(" indiserver indi_simulator_ccd")
abort(500,description="Fail to connect to indi Server")
print("connecting to camera")
if (not(camSpectro.waitCameraConnected())):
print("Fail to connect to camera")
camSpectro.disconnectServer()
abort(500,description="Fail to connect to camera")
#set binning
camSpectro.setBinning({'X':config["camera"]["binning"],'Y':config["camera"]["binning"]})
return camSpectro
def stackSerie(sourcePath,nbImage):
print("stackSerie()")
fileNamePath=os.path.split(sourcePath)[0]
fileNameRoot=os.path.split(sourcePath)[1].split('.')[0]
fileNameImage=fileNamePath+'/'+fileNameRoot+"-1.fits"
print(f" fileNamePath = {fileNamePath}")
print(f" fileNameRoot = {fileNameRoot}")
print(f" fileNameImage = {fileNameImage}")
hdul = astropy.io.fits.open(fileNameImage)
hdu = hdul[0]
imgStack = np.float64(hdu.data)
print(f" open {fileNameImage} EXPTIME = {hdu.header['EXPTIME']}")
myHeader = hdu.header
expTime = hdu.header['EXPTIME']
hdul.close()
for i in range(1,nbImage):
fileNameImage=fileNamePath+'/'+fileNameRoot+"-"+str(i+1)+".fits"
hdul = astropy.io.fits.open(fileNameImage)
hdu = hdul[0]
expTime = expTime + hdu.header['EXPTIME']
print(f" open {fileNameImage} EXPTIME = {hdu.header['EXPTIME']}")
imgStack = imgStack + hdu.data
hdul.close()
imgStack = imgStack / nbImage
hdu = astropy.io.fits.PrimaryHDU(imgStack)
hdu.header = myHeader
myHeader['EXPTIME'] = expTime
fileNameImage = fileNamePath+'/'+fileNameRoot+".fits"
print(f" Save staked image to {fileNameImage} total EXPTIME = {expTime}")
try:
os.remove(fileNameImage)
except:
print(f" no previous stack file {fileNameImage}")
hdu.writeto(fileNameImage)
def substrackFits(fileName1,fileName2,fileNameDest):
print("subStrackFits()")
print(f" Write ({fileNameDest}) with ({fileName1})-({fileName2})")
hdul1 = astropy.io.fits.open(fileName1)
hdu1 = hdul1[0]
img1 = hdu1.data
hdul2 = astropy.io.fits.open(fileName2)
hdu2 = hdul2[0]
img2 = hdu2.data
destData= img1 - img2
hdu = astropy.io.fits.PrimaryHDU(destData)
hdu.header = hdu1.header
try:
os.remove(fileNameDest)
except:
print(f" no previous dest File {fileNameDest}")
hdu.writeto(fileNameDest)
hdul1.close()
hdul2.close()
# called by API
def doFinderCalib(config):
print("doFinderCalib()")
fileNamePath=os.path.split(config["path"]["offset"])[0]
print(f" fileNamePath = {fileNamePath}")
fileNameRoot=os.path.split(config["path"]["offset"])[1].split('.')[0]
print(f" fileNameRoot = {fileNameRoot}")
exposureTime = 0
camSpectro = connectCam(config)
camSpectro.newAcquSerie(fileNamePath,fileNameRoot+"-",config["calib"]["nbExposure"],config["calib"]["exposureTime"])
camSpectro.waitEndAcqSerie()
print(" acquisition finished")
camSpectro.disconnectServer()
stackSerie(config["path"]["offset"],config["calib"]["nbExposure"])
return {"protoVersion":"1.00", "calib":"done"}
# called by API
def doFinderSolveAstro(config):
print("doFinderSolveAstro()")
### Picture acquisition with Finder Scope
if config['testWithoutSky']:
#False image
fileNameAstro=config["imageTest"]
print(f" Test without sky, use {fileNameAstro}")
time.sleep(2)
else:
#Real image on the sky
fileNamePath=os.path.split(config["path"]["image"])[0]
print(f" fileNamePath = {fileNamePath}")
fileNameRoot=os.path.split(config["path"]["image"])[1].split('.')[0]
print(f" fileNameRoot = {fileNameRoot}")
#acquisition
print(" run acquisition")
camSpectro = connectCam(config)
camSpectro.newAcquSerie(fileNamePath,fileNameRoot+"-",config["acquisition"]["nbExposure"],config["acquisition"]["exposureTime"])
camSpectro.waitEndAcqSerie()
print(" acquisition finished")
camSpectro.disconnectServer()
fileNameImage=config["path"]["image"]
stackSerie(fileNameImage,config["acquisition"]["nbExposure"])
fileNameAstro = fileNameImage.replace(".fits","-Astro.fits")
print(f" fileNameAstro = {fileNameAstro}")
substrackFits(fileNameImage,config["path"]["offset"],fileNameAstro)
# return {"debug":"true"}
### Plate Solving
print("Plate Solving")
fenteXpix=config["centerX"]
fenteYpix=config["centerY"]
scaleArcPerPixelFinder=((config["camera"]["pixelSize"]*0.001*config["camera"]["binning"])/config["FocalLength"]) /6.28 * 360 * 60 *60
print(f" Calculated Scale is {scaleArcPerPixelFinder:0.2f} ArcSecond per pixel")
try:
w=solveAstro(fileNameAstro,scaleArcPerPixelFinder)
wx, wy = w.wcs_pix2world(fenteXpix, fenteYpix,1)
except:
print("error during plate solving...")
abort(500,description='plate solving')
return { 'error':'error' }
### Store & Display Result
print(" fente X=",fenteXpix," ,Y=",fenteYpix)
print(' RA={0}deg DEC={1}deg '.format(wx, wy))
pi = 3.141592653589793
result= { "protoVersion":"1.00", "coord": {"RA": wx/180.0*pi, "DEC":wy/180.0*pi , "unit":"RADIAN"}}
return result
# called by API
def doFinderSetCenter(config,coords):
print ("Enter dofinderSetCenter()")
alpha, delta = coords.split('&')
print (f" The real optical center is at: alpha = {alpha} delta = {delta}")
fileNameAstro = config["path"]["image"].replace(".fits","-Astro.fits")
name, extension = os.path.splitext(fileNameAstro)
print(f" reload astrometry data from {name}")
wcs = astropy.wcs.WCS(astropy.io.fits.open(name+'.wcs')[0].header)
sky = SkyCoord(alpha,delta,frame = 'icrs')
fenteXpix, fenteYpix =(sky.to_pixel(wcs))
fenteXpix = float(fenteXpix)
fenteYpix = float(fenteYpix)
deltaXpix , deltaYpix = fenteXpix-config['centerX'] , fenteYpix-config['centerY']
config['centerX']= fenteXpix
config['centerY']= fenteYpix
#write to config file here ??
result = { 'newCenter' : {'x': fenteXpix , 'y':fenteYpix} , 'deltaCenter': {'x':deltaXpix, 'y':deltaYpix} }
print(f" result={result}")
return result
############ main ###############
@app.route('/api/finder/setCenter', defaults = {'coords': 'none'})
@app.route('/api/finder/setCenter/<coords>', methods=['GET'])
def setCenter(coords):
global config
if coords == 'none':
return "you must use a argument for this API\n" + "example: http://localhost:5000/api/finder/setCenter/14h25m45.6s&+65d11m"
if not '&' in coords:
return f"argument must contains & to separate alpha&delta \n argument is {coords}"
return jsonify(doFinderSetCenter(config,coords))
@app.route('/api/finder/calib', methods=['GET'])
def calib_finder():
global config
return jsonify(doFinderCalib(config))
@app.route('/api/finder', methods=['GET'])
def get_finder():
global config
return jsonify(doFinderSolveAstro(config))
@app.errorhandler(500)
def internal_error(e):
return jsonify(error=str(e)), 500
if __name__ == '__main__':
if len(sys.argv)!=2:
print("Invalid number of argument")
print("correct syntax is")
print(" python3 apiFinder.py configAutoSolver.json")
exit()
else:
print(f"Confirguration file is {sys.argv[1]}")
config=json.loads(open(sys.argv[1]).read())
logging.basicConfig(filename=config['path']['logFile'],level=logging.DEBUG,format='%(asctime)s %(message)s')
app.run(host='0.0.0.0',port=5000,debug=True)
|
|
# Copyright 2022 The mT5 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add Tasks to registry."""
import functools
from multilingual_t5 import preprocessors
from multilingual_t5 import utils
from multilingual_t5.evaluation import metrics as mt5_metrics
import seqio
import t5.data
import t5.data.tasks
from t5.evaluation import metrics
import tensorflow_datasets as tfds
DEFAULT_SPM_PATH = "gs://t5-data/vocabs/mc4.250000.100extra/sentencepiece.model"
DEFAULT_TEMPERATURE = 1.0 / 0.3
DEFAULT_MIX_RATE = functools.partial(
t5.data.rate_num_examples, temperature=DEFAULT_TEMPERATURE)
DEFAULT_VOCAB = t5.data.SentencePieceVocabulary(DEFAULT_SPM_PATH)
DEFAULT_OUTPUT_FEATURES = {
"inputs": t5.data.Feature(
vocabulary=DEFAULT_VOCAB, add_eos=True, required=False),
"targets": t5.data.Feature(
vocabulary=DEFAULT_VOCAB, add_eos=True)
}
MC4_LANGS = tfds.text.c4.MC4_LANGUAGES
# Multilingual BERT was trained on 104 languages. We include 103 of these
# languages, as tfds.wikipedia doesn't distinguish between simplified and
# traditional Chinese, and only contains "zh" (which is a mix of simplified
# and traditional).
# https://github.com/google-research/bert/blob/master/multilingual.md
WIKI_LANGS = [
"af", "an", "ar", "ast", "az", "azb", "ba", "bar", "be", "bg", "bn", "bpy",
"br", "bs", "ca", "ce", "ceb", "cs", "cv", "cy", "da", "de", "el", "en",
"es", "et", "eu", "fa", "fi", "fr", "fy", "ga", "gl", "gu", "he", "hi",
"hr", "ht", "hu", "hy", "id", "io", "is", "it", "ja", "jv", "ka", "kk",
"kn", "ko", "ky", "la", "lb", "lmo", "lt", "lv", "mg", "min", "mk", "ml",
"mn", "mr", "ms", "my", "nds-nl", "ne", "new", "nl", "nn", "no", "oc",
"pa", "pl", "pms", "pnb", "pt", "ro", "ru", "scn", "sco", "sh", "sk", "sl",
"sq", "sr", "su", "sv", "sw", "ta", "te", "tg", "th", "tl", "tr", "tt",
"uk", "ur", "uz", "vi", "vo", "war", "yo", "zh"
]
# =========================== Pretraining Tasks/Mixtures =======================
# mC4
for lang in MC4_LANGS:
seqio.TaskRegistry.add(
"mc4.{}".format(lang.replace("-", "_")),
source=seqio.TfdsDataSource(
tfds_name="c4/multilingual:3.0.1",
splits={
"train": lang,
"validation": f"{lang}-validation"
}),
preprocessors=[
functools.partial(
t5.data.preprocessors.rekey,
key_map={
"inputs": None,
"targets": "text"
}),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
t5.data.preprocessors.span_corruption,
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[])
mc4 = ["mc4.{}".format(lang.replace("-", "_")) for lang in MC4_LANGS]
seqio.MixtureRegistry.add("mc4", mc4, default_rate=DEFAULT_MIX_RATE)
# Wikipedia
for lang in WIKI_LANGS:
seqio.TaskRegistry.add(
"mt5_wiki.{}".format(lang.replace("-", "_")),
source=seqio.TfdsDataSource(
tfds_name="wikipedia/20200301.{}:1.0.0".format(lang)),
preprocessors=[
functools.partial(
t5.data.preprocessors.rekey,
key_map={
"inputs": None,
"targets": "text"
}),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
t5.data.preprocessors.span_corruption,
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[])
wiki = ["mt5_wiki.{}".format(lang.replace("-", "_")) for lang in WIKI_LANGS]
seqio.MixtureRegistry.add("mt5_wiki", wiki, default_rate=DEFAULT_MIX_RATE)
# Mixture of mC4 and WIKI
seqio.MixtureRegistry.add("mc4_wiki", mc4 + wiki, default_rate=DEFAULT_MIX_RATE)
# =========================== Fine-tuning Tasks/Mixtures =======================
# ----- XNLI -----
# XNLI zero-shot task. This fine-tunes on English MNLI training data and then
# evaluates on multilingual XNLI dev/test data.
XNLI_LANGS = [
"ar", "bg", "de", "el", "en", "es", "fr", "hi", "ru", "sw", "th", "tr",
"ur", "vi", "zh"
]
def create_xnli_tasks_and_mixtures(task_prefix, task_suffix, output_features):
"""Helper function to create XNLI tasks and mixtures."""
if task_suffix:
task_suffix = "_" + task_suffix
seqio.TaskRegistry.add(
f"{task_prefix}xnli_train{task_suffix}",
source=seqio.TfdsDataSource(
tfds_name="multi_nli:1.1.0", splits=["train"]),
preprocessors=[
preprocessors.process_mnli,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=output_features,
metric_fns=[metrics.accuracy])
for xnli_lang in XNLI_LANGS:
seqio.TaskRegistry.add(
f"{task_prefix}xnli_dev_test{task_suffix}.{xnli_lang}",
source=seqio.TfdsDataSource(
tfds_name="xnli:1.1.0", splits=["validation", "test"]),
preprocessors=[
functools.partial(
preprocessors.process_xnli, target_languages=[xnli_lang]),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=output_features,
metric_fns=[metrics.accuracy])
if xnli_lang == "en":
continue
seqio.TaskRegistry.add(
f"{task_prefix}xnli_translate_train{task_suffix}.{xnli_lang}",
source=seqio.TfdsDataSource(
tfds_name="xtreme_xnli:1.1.0", splits=["train"]),
preprocessors=[
functools.partial(
preprocessors.process_xnli, target_languages=[xnli_lang]),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=output_features,
metric_fns=[metrics.accuracy])
seqio.TaskRegistry.add(
f"{task_prefix}xnli_dev_test{task_suffix}.all_langs",
source=seqio.TfdsDataSource(
tfds_name="xnli:1.1.0", splits=["validation", "test"]),
preprocessors=[
functools.partial(
preprocessors.process_xnli, target_languages=XNLI_LANGS),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=output_features,
metric_fns=[metrics.accuracy])
xnli_zeroshot = ([
f"{task_prefix}xnli_train{task_suffix}",
f"{task_prefix}xnli_dev_test{task_suffix}.all_langs"
] + [
f"{task_prefix}xnli_dev_test{task_suffix}.{lang}" for lang in XNLI_LANGS
])
seqio.MixtureRegistry.add(
f"{task_prefix}xnli_zeroshot{task_suffix}",
xnli_zeroshot,
default_rate=1.0)
xnli_translate_train = xnli_zeroshot + [
f"{task_prefix}xnli_translate_train{task_suffix}.{lang}"
for lang in XNLI_LANGS
if lang != "en"
]
seqio.MixtureRegistry.add(
f"{task_prefix}xnli_translate_train{task_suffix}",
xnli_translate_train,
default_rate=1.0)
create_xnli_tasks_and_mixtures(
task_prefix="mt5_", task_suffix="", output_features=DEFAULT_OUTPUT_FEATURES)
# ----- PAWS -----
label_names = ["different_meaning", "paraphrase"]
text_preprocessor = functools.partial(
t5.data.preprocessors.glue,
benchmark_name="paws",
label_names=label_names,
feature_names=["sentence1", "sentence2"],
id_key=None)
postprocess_fn = functools.partial(
t5.data.postprocessors.string_label_to_class_id,
label_classes=label_names)
seqio.TaskRegistry.add(
"mt5_paws",
source=seqio.TfdsDataSource(
tfds_name="paws_x_wiki/en:1.0.0", splits=["train"]),
preprocessors=[
text_preprocessor,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=postprocess_fn,
metric_fns=[metrics.accuracy])
for lang in utils.PAWSX_LANGS:
seqio.TaskRegistry.add(
"mt5_pawsx_dev_test.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="paws_x_wiki/{}:1.0.0".format(lang),
splits=["validation", "test"]),
preprocessors=[
text_preprocessor,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=postprocess_fn,
metric_fns=[metrics.accuracy])
# This uses machine translations provided by the PAWS-X paper.
seqio.TaskRegistry.add(
"mt5_pawsx_translate_train_original.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="paws_x_wiki/{}:1.0.0".format(lang), splits=["train"]),
preprocessors=[
text_preprocessor,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=postprocess_fn,
metric_fns=[metrics.accuracy])
if lang != "en":
# This uses machine translations provided by the XTREME paper.
seqio.TaskRegistry.add(
"mt5_pawsx_translate_train.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="xtreme_pawsx/{}:1.0.0".format(lang), splits=["train"]),
preprocessors=[
text_preprocessor,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=postprocess_fn,
metric_fns=[metrics.accuracy])
seqio.TaskRegistry.add(
"mt5_pawsx_dev_test.all_langs",
source=seqio.FunctionDataSource(
dataset_fn=utils.pawsx_all_langs_dataset_fn,
splits=["validation", "test"]),
preprocessors=[
text_preprocessor,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=postprocess_fn,
metric_fns=[metrics.accuracy])
# PAWSX Zero-Shot
pawsx_eval = [
"mt5_pawsx_dev_test.{}".format(lang) for lang in utils.PAWSX_LANGS
] + ["mt5_pawsx_dev_test.all_langs"]
pawsx = ["mt5_paws"] + pawsx_eval
seqio.MixtureRegistry.add("mt5_pawsx_zeroshot", pawsx, default_rate=1.0)
pawsx_translate_train = ["mt5_paws"] + [
"mt5_pawsx_translate_train.{}".format(lang)
for lang in utils.PAWSX_LANGS
if lang != "en"
] + pawsx_eval
seqio.MixtureRegistry.add(
"mt5_pawsx_translate_train", pawsx_translate_train, default_rate=1.0)
pawsx_translate_train_original = [
"mt5_pawsx_translate_train_original.{}".format(lang)
for lang in utils.PAWSX_LANGS
] + pawsx_eval
seqio.MixtureRegistry.add(
"mt5_pawsx_translate_train_original",
pawsx_translate_train,
default_rate=1.0)
# ----- TyDiQA GoldP-----
# The "validation" split contains all the validation examples for all the
# individual languages together.
TYDIQA_LANGS = ["ar", "bn", "en", "fi", "id", "ko", "ru", "sw", "te"]
seqio.TaskRegistry.add(
"mt5_tydiqa_train_dev",
source=seqio.TfdsDataSource(
tfds_name="tydi_qa/goldp:2.1.0", splits=["train", "validation"]),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[metrics.squad])
for lang in TYDIQA_LANGS:
seqio.TaskRegistry.add(
"mt5_tydiqa_dev.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="tydi_qa/goldp:2.1.0",
splits={"validation": "validation-{}".format(lang)}),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[metrics.squad])
tydiqa = (["mt5_tydiqa_train_dev"] +
["mt5_tydiqa_dev.{}".format(lang) for lang in TYDIQA_LANGS])
seqio.MixtureRegistry.add("mt5_tydiqa", tydiqa, default_rate=1.0)
# ----- TyDiQA GoldP Zero-Shot-----
# This Zero-Shot setting matches the XTREME setup, where training is done on
# the English data of TyDiQA. In the TyDiQA paper, fine-tuning was done on
# SQuAD for zero-shot evaluation.
TYDIQA_LANGS = ["ar", "bn", "en", "fi", "id", "ko", "ru", "sw", "te"]
seqio.TaskRegistry.add(
"mt5_tydiqa_train.en",
source=seqio.TfdsDataSource(
tfds_name="tydi_qa/goldp:2.1.0", splits=["train"]),
preprocessors=[
preprocessors.xquad,
functools.partial(
preprocessors.filter_tydiqa_by_language, lang="english"),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[metrics.squad])
tydiqa_zeroshot = (["mt5_tydiqa_train.en"] +
["mt5_tydiqa_dev.{}".format(lang) for lang in TYDIQA_LANGS])
seqio.MixtureRegistry.add(
"mt5_tydiqa_zeroshot", tydiqa_zeroshot, default_rate=1.0)
# Defining translate-train tasks.
for lang in TYDIQA_LANGS:
# Skipping English, since translate-train is not available.
if lang == "en":
continue
seqio.TaskRegistry.add(
"mt5_tydiqa_translate_train.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="tydi_qa/goldp:2.1.0",
splits={"train": "translate-train-{}".format(lang)}),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[metrics.squad])
tydiqa_translate_train = (
["mt5_tydiqa_train.en"]
+ [f"mt5_tydiqa_translate_train.{lang}"
for lang in TYDIQA_LANGS if lang != "en"]
+ [f"mt5_tydiqa_dev.{lang}" for lang in TYDIQA_LANGS])
seqio.MixtureRegistry.add(
"mt5_tydiqa_translate_train", tydiqa_translate_train, default_rate=1.0)
# ----- XQuAD -----
def create_xquad_tasks_and_mixtures(task_prefix, task_suffix, output_features):
"""Helper function for XQuad tasks and mixtures."""
# ----- English SQUAD -----
seqio.TaskRegistry.add(
f"{task_prefix}squad_train_dev{task_suffix}",
source=seqio.TfdsDataSource(
tfds_name="squad/v1.1:3.0.0", splits=["train", "validation"]),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=output_features,
metric_fns=[metrics.squad])
for xquad_lang in utils.XQUAD_LANGS_TRAIN_DEV:
seqio.TaskRegistry.add(
f"{task_prefix}xquad_translate_train_dev{task_suffix}.{xquad_lang}",
source=seqio.TfdsDataSource(
tfds_name="xquad/{}:3.0.0".format(xquad_lang),
splits={
"train": "translate-train",
"validation": "translate-dev"
}),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=output_features,
metric_fns=[metrics.squad])
for xquad_lang in utils.XQUAD_LANGS_TEST:
seqio.TaskRegistry.add(
f"{task_prefix}xquad_test{task_suffix}.{xquad_lang}",
source=seqio.TfdsDataSource(
tfds_name=f"xquad/{xquad_lang}:3.0.0", splits=["test"]),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=output_features,
metric_fns=[metrics.squad])
# Additional test task containing all the languages.
seqio.TaskRegistry.add(
f"{task_prefix}xquad_test{task_suffix}.all_langs",
source=seqio.FunctionDataSource(
dataset_fn=utils.xquad_all_langs_dataset_fn, splits=["test"]),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=output_features,
metric_fns=[metrics.squad])
# XQuAD Zero-Shot (SQuAD train, SQuAD dev, XQuAD test).
xquad_test = ([f"{task_prefix}xquad_test{task_suffix}.{xquad_lang}"
for xquad_lang in utils.XQUAD_LANGS_TEST])
xquad_zeroshot = ([f"{task_prefix}squad_train_dev{task_suffix}",
f"{task_prefix}xquad_test{task_suffix}.all_langs"] +
xquad_test)
seqio.MixtureRegistry.add(
f"{task_prefix}xquad_zeroshot{task_suffix}",
xquad_zeroshot,
default_rate=1.0)
# XQuAD Translate-Train (English SQuAD, XQuAD translate-train,
# XQuAD translate-dev, XQuAD test)
# Note that the QA translate-train baselines from Hu et al (XTREME)
# do not include the English data. However, Fang et al (FILTER) do include
# English data.
xquad_translate_train = [
f"{task_prefix}xquad_translate_train_dev{task_suffix}.{xquad_lang}"
for xquad_lang in utils.XQUAD_LANGS_TRAIN_DEV
] + [f"{task_prefix}squad_train_dev{task_suffix}"
] + [f"{task_prefix}xquad_test{task_suffix}.all_langs"] + xquad_test
seqio.MixtureRegistry.add(
f"{task_prefix}xquad_translate_train{task_suffix}",
xquad_translate_train,
default_rate=1.0)
create_xquad_tasks_and_mixtures(
task_prefix="mt5_", task_suffix="", output_features=DEFAULT_OUTPUT_FEATURES)
# ----- MLQA -----
MLQA_LANGS = ["ar", "de", "en", "es", "hi", "vi", "zh"]
for lang in MLQA_LANGS:
seqio.TaskRegistry.add(
"mt5_mlqa_dev_test.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="mlqa/{}:1.0.0".format(lang), splits=["validation",
"test"]),
preprocessors=[
preprocessors.xquad,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.qa,
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[functools.partial(mt5_metrics.mlqa, lang=lang)])
# MLQA Zero-Shot
mlqa_dev_test = [f"mt5_mlqa_dev_test.{lang}" for lang in MLQA_LANGS]
mlqa_zeroshot = ["mt5_squad_train_dev"] + mlqa_dev_test
seqio.MixtureRegistry.add("mt5_mlqa_zeroshot", mlqa_zeroshot, default_rate=1.0)
# MLQA Translate-Train
mlqa_translate_train = [
"mt5_xquad_translate_train_dev.{}".format(lang)
for lang in MLQA_LANGS
if lang != "en"
] + ["mt5_squad_train_dev"] + mlqa_dev_test
seqio.MixtureRegistry.add(
"mt5_mlqa_translate_train", mlqa_translate_train, default_rate=1.0)
# ----- WikiAnn NER -----
NER_LANGS = [
"af", "ar", "bg", "bn", "de", "el", "en", "es", "et", "eu", "fa", "fi",
"fr", "he", "hi", "hu", "id", "it", "ja", "jv", "ka", "kk", "ko", "ml",
"mr", "ms", "my", "nl", "pt", "ru", "sw", "ta", "te", "th", "tl", "tr",
"ur", "vi", "yo", "zh"
]
for lang in NER_LANGS:
seqio.TaskRegistry.add(
"mt5_ner_train.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="wikiann/{}:1.0.0".format(lang), splits=["train"]),
preprocessors=[
preprocessors.wikiann,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[mt5_metrics.span_f1])
seqio.TaskRegistry.add(
"mt5_ner_eval.{}".format(lang),
source=seqio.TfdsDataSource(
tfds_name="wikiann/{}:1.0.0".format(lang),
splits=["validation", "test"]),
preprocessors=[
preprocessors.wikiann,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
output_features=DEFAULT_OUTPUT_FEATURES,
metric_fns=[mt5_metrics.span_f1])
# NER zero-shot
seqio.MixtureRegistry.add(
"mt5_ner_zeroshot", ["mt5_ner_train.{}".format("en")] +
["mt5_ner_eval.{}".format(lang) for lang in NER_LANGS],
default_rate=1.0)
# NER multilingual
seqio.MixtureRegistry.add(
"mt5_ner_multilingual",
["mt5_ner_train.{}".format(lang) for lang in NER_LANGS] +
["mt5_ner_eval.{}".format(lang) for lang in NER_LANGS],
default_rate=1.0)
# ----- GLUE -----
# The glue tasks are already present in the task registry from importing the
# t5.data.tasks file. However, that task and mixture use the t5 sentence piece
# vocabulary. This task and mixture use the mt5 vocabulary.
for b in tfds.text.glue.Glue.builder_configs.values():
seqio.TaskRegistry.add(
"mt5_glue_%s_v002" % b.name,
source=seqio.TfdsDataSource(
tfds_name="glue/%s:1.0.0" % b.name,
splits=["test"] if b.name == "ax" else None),
preprocessors=[
t5.data.glue_utils.get_glue_text_preprocessor(b),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=t5.data.glue_utils.get_glue_metric(b.name),
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=t5.data.glue_utils.get_glue_postprocess_fn(b))
seqio.MixtureRegistry.add(
"mt5_glue_v002_proportional",
list({
f"mt5_{k}": v
for k, v in t5.data.glue_utils.get_glue_weight_mapping().items()
}.items()))
# ============== Summarization ==============
# ----- CNN/DailyMail -----
seqio.TaskRegistry.add(
"mt5_cnn_dailymail_v002",
source=seqio.TfdsDataSource(tfds_name="cnn_dailymail:3.1.0"),
preprocessors=[
functools.partial(
t5.data.preprocessors.summarize,
article_key="article",
summary_key="highlights"),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=[metrics.rouge],
output_features=DEFAULT_OUTPUT_FEATURES)
# ----- GEM-XSum -----
_rouge_fn = functools.partial(
metrics.rouge,
score_keys=["rouge1", "rouge2", "rougeL", "rougeLsum"])
seqio.TaskRegistry.add(
"mt5_gem_xsum",
source=seqio.TfdsDataSource(tfds_name="gem/xsum:1.0.1"),
preprocessors=[
functools.partial(
t5.data.preprocessors.summarize,
article_key="document",
summary_key="target"),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=[metrics.bleu, _rouge_fn],
output_features=DEFAULT_OUTPUT_FEATURES,
)
# ----- SuperGLUE -----
# The superglue tasks are already present in the task registry from importing
# the t5.data.tasks file. However, that task and mixture use the t5 sentence
# piece vocabulary. This task and mixture use the mt5 vocabulary.
for b in tfds.text.super_glue.SuperGlue.builder_configs.values():
# We use a simplified version of WSC, defined below
if "wsc" in b.name:
continue
if b.name == "axb":
text_preprocessor = [
functools.partial(
t5.data.preprocessors.rekey,
key_map={
"premise": "sentence1",
"hypothesis": "sentence2",
"label": "label",
"idx": "idx",
}),
t5.data.glue_utils.get_glue_text_preprocessor(b),
]
else:
text_preprocessor = [t5.data.glue_utils.get_glue_text_preprocessor(b)]
seqio.TaskRegistry.add(
"mt5_super_glue_%s_v102" % b.name,
source=seqio.TfdsDataSource(
tfds_name="super_glue/%s:1.0.2" % b.name,
splits=["test"] if b.name in ["axb", "axg"] else None),
preprocessors=text_preprocessor + [
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=t5.data.glue_utils.get_super_glue_metric(b.name),
output_features=DEFAULT_OUTPUT_FEATURES,
postprocess_fn=t5.data.glue_utils.get_glue_postprocess_fn(b))
# ----- DPR -----
seqio.TaskRegistry.add(
"mt5_dpr_v001_simple",
source=seqio.TfdsDataSource(tfds_name="definite_pronoun_resolution:1.1.0"),
preprocessors=[
t5.data.preprocessors.definite_pronoun_resolution_simple,
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=[metrics.accuracy],
output_features=DEFAULT_OUTPUT_FEATURES)
# ------ WSC ----------
seqio.TaskRegistry.add(
"mt5_super_glue_wsc_v102_simple_train",
source=seqio.TfdsDataSource(
tfds_name="super_glue/wsc.fixed:1.0.2", splits=["train"]),
preprocessors=[
functools.partial(
t5.data.preprocessors.wsc_simple, correct_referent_only=True),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
metric_fns=[],
output_features=DEFAULT_OUTPUT_FEATURES)
seqio.TaskRegistry.add(
"mt5_super_glue_wsc_v102_simple_eval",
source=seqio.TfdsDataSource(
tfds_name="super_glue/wsc.fixed:1.0.2", splits=["validation", "test"]),
preprocessors=[
functools.partial(
t5.data.preprocessors.wsc_simple, correct_referent_only=False),
seqio.preprocessors.tokenize,
seqio.CacheDatasetPlaceholder(),
seqio.preprocessors.append_eos_after_trim,
],
postprocess_fn=t5.data.postprocessors.wsc_simple,
metric_fns=[metrics.accuracy],
output_features=DEFAULT_OUTPUT_FEATURES)
_mt5_super_glue_tasks = {}
for task, value in t5.data.get_super_glue_weight_mapping().items():
_mt5_super_glue_tasks["mt5_" + task] = value
seqio.MixtureRegistry.add("mt5_super_glue_v102_proportional",
list(_mt5_super_glue_tasks.items()))
|
|
from __future__ import absolute_import, division, print_function
import argparse
import inspect
import json
import optparse
import os
import tempfile
from imp import reload
import mock
import pytest
import stomp as stomppy
import workflows
import workflows.transport
from workflows.transport.stomp_transport import StompTransport
def test_lookup_and_initialize_stomp_transport_layer():
'''Find the stomp transport layer via the lookup mechanism and run
its constructor with default settings.'''
stomp = workflows.transport.lookup("StompTransport")
assert stomp == StompTransport
stomp()
def test_add_command_line_help_optparse():
'''Check that command line parameters are registered in the parser.'''
parser = mock.MagicMock()
StompTransport().add_command_line_options(parser)
parser.add_argument.assert_not_called()
parser.add_option.assert_called()
assert parser.add_option.call_count > 4
for call in parser.add_option.call_args_list:
assert call[1]['action'] == 'callback'
def test_add_command_line_help_argparse():
'''Check that command line parameters are registered in the parser.'''
parser = mock.MagicMock()
parser.add_argument = mock.Mock()
StompTransport().add_command_line_options(parser)
parser.add_argument.assert_called()
parser.add_option.assert_not_called()
assert parser.add_argument.call_count > 4
for call in parser.add_argument.call_args_list:
assert inspect.isclass(call[1]['action'])
def test_adding_arguments_to_argparser():
'''Check that command line parameters can be added to the parser.'''
parser = argparse.ArgumentParser()
StompTransport().add_command_line_options(parser)
result = parser.parse_args([])
assert result.stomp_host
assert result.stomp_port
assert result.stomp_user
assert result.stomp_pass
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_check_config_file_behaviour(mockstomp):
'''Check that a specified configuration file is read, that command line
parameters have precedence and are passed on to the stomp layer.'''
mockconn = mock.Mock()
mockstomp.Connection.return_value = mockconn
parser = optparse.OptionParser()
stomp = StompTransport()
stomp.add_command_line_options(parser)
# Temporarily create an example stomp configuration file
cfgfile = tempfile.NamedTemporaryFile(delete=False)
try:
cfgfile.write('''
# An example stomp configuration file
# Only lines in the [stomp] block will be interpreted
[stomp]
#host = 127.0.0.1
port = 1234
username = someuser
password = somesecret
prefix = namespace
'''.encode('utf-8'))
cfgfile.close()
parser.parse_args([
'--stomp-conf', cfgfile.name,
'--stomp-user', mock.sentinel.user])
# Command line parameters are shared for all instances
stomp = StompTransport()
stomp.connect()
# Reset configuration for subsequent tests by reloading StompTransport
reload(workflows.transport.stomp_transport)
globals()['StompTransport'] = workflows.transport.stomp_transport.StompTransport
mockstomp.Connection.assert_called_once_with([('localhost', 1234)])
mockconn.connect.assert_called_once_with(mock.sentinel.user, 'somesecret', wait=False)
assert stomp.get_namespace() == 'namespace'
finally:
os.remove(cfgfile.name)
# Loading a non-existing configuration file
with pytest.raises(workflows.Error):
parser.parse_args(['--stomp-conf', ''])
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_anonymous_connection(mockstomp):
'''Check that a specified configuration file is read, that command line
parameters have precedence and are passed on to the stomp layer.'''
mockconn = mock.Mock()
mockstomp.Connection.return_value = mockconn
parser = optparse.OptionParser()
stomp = StompTransport()
stomp.add_command_line_options(parser)
parser.parse_args([ '--stomp-user=', '--stomp-pass=' ])
# Command line parameters are shared for all instances
stomp = StompTransport()
stomp.connect()
# Reset configuration for subsequent tests by reloading StompTransport
reload(workflows.transport.stomp_transport)
globals()['StompTransport'] = workflows.transport.stomp_transport.StompTransport
mockconn.connect.assert_called_once_with(wait=False)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_instantiate_link_and_connect_to_broker(mockstomp):
'''Test the Stomp connection routine.'''
stomp = StompTransport()
mockconn = mockstomp.Connection.return_value
assert not stomp.is_connected()
stomp.connect()
mockstomp.Connection.assert_called_once()
mockconn.start.assert_called_once()
mockconn.connect.assert_called_once()
assert stomp.is_connected()
stomp.connect()
mockstomp.Connection.assert_called_once()
mockconn.start.assert_called_once()
mockconn.connect.assert_called_once()
assert stomp.is_connected()
stomp.disconnect()
mockstomp.Connection.assert_called_once()
mockconn.start.assert_called_once()
mockconn.connect.assert_called_once()
mockconn.disconnect.assert_called_once()
assert not stomp.is_connected()
stomp.disconnect()
mockstomp.Connection.assert_called_once()
mockconn.start.assert_called_once()
mockconn.connect.assert_called_once()
mockconn.disconnect.assert_called_once()
assert not stomp.is_connected()
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_error_handling_when_connecting_to_broker(mockstomp):
'''Test the Stomp connection routine.'''
stomp = StompTransport()
mockconn = mockstomp.Connection.return_value
mockconn.start.side_effect = stomppy.exception.ConnectFailedException()
mockstomp.exception = stomppy.exception
with pytest.raises(workflows.Disconnected):
stomp.connect()
assert not stomp.is_connected()
mockconn.start.side_effect = None
mockconn.connect.side_effect = stomppy.exception.ConnectFailedException()
with pytest.raises(workflows.AuthenticationFailed):
stomp.connect()
assert not stomp.is_connected()
@mock.patch('workflows.transport.stomp_transport.time')
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_broadcast_status(mockstomp, mocktime):
'''Test the status broadcast function.'''
mocktime.time.return_value = 20000
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp.broadcast_status({ 'status': str(mock.sentinel.status) })
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
# expiration should be 15 seconds in the future
assert int(kwargs['headers']['expires']) == 1000 * (20000 + 15)
destination, message = args
assert destination.startswith('/topic/transient.status')
statusdict = json.loads(message)
assert statusdict['status'] == str(mock.sentinel.status)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_send_message(mockstomp):
'''Test the message sending function.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp._send( str(mock.sentinel.channel), mock.sentinel.message )
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs.get('headers') == { 'persistent': 'true' }
stomp._send( str(mock.sentinel.channel), mock.sentinel.message,
headers={ 'hdr': mock.sentinel.header }, delay=123 )
assert mockconn.send.call_count == 2
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs == { 'headers': { 'hdr': mock.sentinel.header,
'persistent': 'true',
'AMQ_SCHEDULED_DELAY': 123000 } }
@mock.patch('workflows.transport.stomp_transport.stomp')
@mock.patch('workflows.transport.stomp_transport.time')
def test_sending_message_with_expiration(time, mockstomp):
'''Test sending a message that expires some time in the future.'''
system_time = 1234567.1234567
message_lifetime = 120
expiration_time = int((system_time + message_lifetime) * 1000)
time.time.return_value = system_time
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp._send( str(mock.sentinel.channel), mock.sentinel.message,
expiration=120 )
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs.get('headers') == { 'persistent': 'true', 'expires': expiration_time }
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_error_handling_on_send(mockstomp):
'''Unrecoverable errors during sending should mark the connection as disconnected.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
mockconn.send.side_effect = stomppy.exception.NotConnectedException()
mockstomp.exception = stomppy.exception
with pytest.raises(workflows.Disconnected):
stomp._send( str(mock.sentinel.channel), mock.sentinel.message )
assert not stomp.is_connected()
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_send_broadcast(mockstomp):
'''Test the broadcast sending function.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp._broadcast( str(mock.sentinel.channel), mock.sentinel.message )
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs.get('headers') in (None, {})
stomp._broadcast( str(mock.sentinel.channel), mock.sentinel.message, headers=mock.sentinel.headers )
assert mockconn.send.call_count == 2
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs == { 'headers': mock.sentinel.headers }
stomp._broadcast( str(mock.sentinel.channel), mock.sentinel.message, delay=123 )
assert mockconn.send.call_count == 3
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs['headers'].get('AMQ_SCHEDULED_DELAY') == 123000
@mock.patch('workflows.transport.stomp_transport.stomp')
@mock.patch('workflows.transport.stomp_transport.time')
def test_broadcasting_message_with_expiration(time, mockstomp):
'''Test sending a message that expires some time in the future.'''
system_time = 1234567.1234567
message_lifetime = 120
expiration_time = int((system_time + message_lifetime) * 1000)
time.time.return_value = system_time
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp._broadcast( str(mock.sentinel.channel), mock.sentinel.message,
expiration=120 )
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel), mock.sentinel.message)
assert kwargs.get('headers') == { 'expires': expiration_time }
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_error_handling_on_broadcast(mockstomp):
'''Unrecoverable errors during broadcasting should mark the connection as disconnected.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
mockconn.send.side_effect = stomppy.exception.NotConnectedException()
mockstomp.exception = stomppy.exception
with pytest.raises(stomppy.exception.NotConnectedException):
stomp._broadcast( str(mock.sentinel.channel), mock.sentinel.message )
assert not stomp.is_connected()
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_messages_are_serialized_for_transport(mockstomp):
'''Test the message serialization.'''
banana = { 'entry': [ 0, 'banana' ] }
banana_str = '{"entry": [0, "banana"]}'
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp.send(str(mock.sentinel.channel1), banana)
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel1), banana_str)
stomp.broadcast(str(mock.sentinel.channel2), banana)
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel2), banana_str)
with pytest.raises(Exception):
stomp.send(str(mock.sentinel.channel), mock.sentinel.unserializable)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_messages_are_not_serialized_for_raw_transport(mockstomp):
'''Test the raw sending methods.'''
banana = '{"entry": [0, "banana"]}'
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp.raw_send(str(mock.sentinel.channel1), banana)
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel1), banana)
mockconn.send.reset_mock()
stomp.raw_broadcast(str(mock.sentinel.channel2), banana)
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/topic/' + str(mock.sentinel.channel2), banana)
mockconn.send.reset_mock()
stomp.raw_send(str(mock.sentinel.channel), mock.sentinel.unserializable)
mockconn.send.assert_called_once()
args, kwargs = mockconn.send.call_args
assert args == ('/queue/' + str(mock.sentinel.channel), mock.sentinel.unserializable)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_messages_are_deserialized_after_transport(mockstomp):
'''Test the message serialization.'''
banana = { 'entry': [ 0, 'banana' ] }
banana_str = '{"entry": [0, "banana"]}'
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
message_handler = mockconn.set_listener.call_args[0][1].on_message
# Test subscriptions
callback = mock.Mock()
stomp.subscribe('channel', callback)
subscription_id = mockconn.subscribe.call_args[0][1]
message_handler({'subscription': subscription_id}, banana_str)
callback.assert_called_once_with({'subscription': subscription_id}, banana)
message_handler({'subscription': subscription_id}, mock.sentinel.undeserializable)
callback.assert_called_with({'subscription': subscription_id}, mock.sentinel.undeserializable)
# Test broadcast subscriptions
callback = mock.Mock()
stomp.subscribe_broadcast('channel', callback)
subscription_id = mockconn.subscribe.call_args[0][1]
message_handler({'subscription': subscription_id}, banana_str)
callback.assert_called_once_with({'subscription': subscription_id}, banana)
message_handler({'subscription': subscription_id}, mock.sentinel.undeserializable)
callback.assert_called_with({'subscription': subscription_id}, mock.sentinel.undeserializable)
# Test subscriptions with mangling disabled
callback = mock.Mock()
stomp.subscribe('channel', callback, disable_mangling=True)
subscription_id = mockconn.subscribe.call_args[0][1]
message_handler({'subscription': subscription_id}, banana_str)
callback.assert_called_once_with({'subscription': subscription_id}, banana_str)
# Test broadcast subscriptions with mangling disabled
callback = mock.Mock()
stomp.subscribe_broadcast('channel', callback, disable_mangling=True)
subscription_id = mockconn.subscribe.call_args[0][1]
message_handler({'subscription': subscription_id}, banana_str)
callback.assert_called_once_with({'subscription': subscription_id}, banana_str)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_subscribe_to_queue(mockstomp):
'''Test subscribing to a queue (producer-consumer), callback functions and unsubscribe.'''
mock_cb1 = mock.Mock()
mock_cb2 = mock.Mock()
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
def callback_resolver(cbid):
if cbid == 1: return mock_cb1
if cbid == 2: return mock_cb2
raise ValueError('Unknown subscription ID %r' % cbid)
stomp.subscription_callback = callback_resolver
mockconn.set_listener.assert_called_once()
listener = mockconn.set_listener.call_args[0][1]
assert listener is not None
stomp._subscribe(1, str(mock.sentinel.channel1), mock_cb1, transformation=mock.sentinel.transformation)
mockconn.subscribe.assert_called_once()
args, kwargs = mockconn.subscribe.call_args
assert args == ('/queue/' + str(mock.sentinel.channel1), 1)
assert kwargs == { 'headers': {'transformation': mock.sentinel.transformation}, 'ack': 'auto' }
stomp._subscribe(2, str(mock.sentinel.channel2), mock_cb2, retroactive=True, selector=mock.sentinel.selector, exclusive=True, transformation=True, priority=42)
assert mockconn.subscribe.call_count == 2
args, kwargs = mockconn.subscribe.call_args
assert args == ('/queue/' + str(mock.sentinel.channel2), 2)
assert kwargs == { 'headers': {'activemq.retroactive':'true', 'selector':mock.sentinel.selector, 'activemq.exclusive':'true', 'transformation': 'jms-object-json', 'activemq.priority':42}, 'ack': 'auto' }
assert mock_cb1.call_count == 0
listener.on_message({'subscription': 1}, mock.sentinel.message1)
mock_cb1.assert_called_once_with({'subscription': 1}, mock.sentinel.message1)
assert mock_cb2.call_count == 0
listener.on_message({'subscription': 2}, mock.sentinel.message2)
mock_cb2.assert_called_once_with({'subscription': 2}, mock.sentinel.message2)
stomp._subscribe(3, str(mock.sentinel.channel3), mock_cb2, acknowledgement=True)
assert mockconn.subscribe.call_count == 3
args, kwargs = mockconn.subscribe.call_args
assert args == ('/queue/' + str(mock.sentinel.channel3), 3)
assert kwargs == { 'headers': {}, 'ack': 'client-individual' }
stomp._unsubscribe(1)
mockconn.unsubscribe.assert_called_once_with(id=1)
stomp._unsubscribe(2)
mockconn.unsubscribe.assert_called_with(id=2)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_subscribe_to_broadcast(mockstomp):
'''Test subscribing to a topic (publish-subscribe) and callback functions.'''
mock_cb1 = mock.Mock()
mock_cb2 = mock.Mock()
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
def callback_resolver(cbid):
if cbid == 1: return mock_cb1
if cbid == 2: return mock_cb2
raise ValueError('Unknown subscription ID %r' % cbid)
stomp.subscription_callback = callback_resolver
mockconn.set_listener.assert_called_once()
listener = mockconn.set_listener.call_args[0][1]
assert listener is not None
stomp._subscribe_broadcast(1, str(mock.sentinel.channel1), mock_cb1, transformation=mock.sentinel.transformation)
mockconn.subscribe.assert_called_once()
args, kwargs = mockconn.subscribe.call_args
assert args == ('/topic/' + str(mock.sentinel.channel1), 1)
assert kwargs == { 'headers': {'transformation': mock.sentinel.transformation} }
stomp._subscribe_broadcast(2, str(mock.sentinel.channel2), mock_cb2, retroactive=True, transformation=True)
assert mockconn.subscribe.call_count == 2
args, kwargs = mockconn.subscribe.call_args
assert args == ('/topic/' + str(mock.sentinel.channel2), 2)
assert kwargs == { 'headers': {'activemq.retroactive':'true', 'transformation': 'jms-object-json'} }
assert mock_cb1.call_count == 0
listener.on_message({'subscription': 1}, mock.sentinel.message1)
mock_cb1.assert_called_once_with({'subscription': 1}, mock.sentinel.message1)
assert mock_cb2.call_count == 0
listener.on_message({'subscription': 2}, mock.sentinel.message2)
mock_cb2.assert_called_once_with({'subscription': 2}, mock.sentinel.message2)
stomp._unsubscribe(1)
mockconn.unsubscribe.assert_called_once_with(id=1)
stomp._unsubscribe(2)
mockconn.unsubscribe.assert_called_with(id=2)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_transaction_calls(mockstomp):
'''Test that calls to create, commit, abort transactions are passed to stomp properly.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
stomp._transaction_begin(mock.sentinel.txid)
mockconn.begin.assert_called_once_with(transaction=mock.sentinel.txid)
stomp._send('destination', mock.sentinel.message, transaction=mock.sentinel.txid)
mockconn.send.assert_called_once_with('/queue/destination', mock.sentinel.message, headers={'persistent': 'true'}, transaction=mock.sentinel.txid)
stomp._transaction_abort(mock.sentinel.txid)
mockconn.abort.assert_called_once_with(mock.sentinel.txid)
stomp._transaction_commit(mock.sentinel.txid)
mockconn.commit.assert_called_once_with(mock.sentinel.txid)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_ack_message(mockstomp):
'''Test that the _ack function is properly forwarded to stomp.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
subid = stomp._subscribe(1, str(mock.sentinel.channel3), None, acknowledgement=True)
stomp._ack(mock.sentinel.messageid, subid)
mockconn.ack.assert_called_once_with(mock.sentinel.messageid, subid)
stomp._ack(mock.sentinel.messageid, subid, transaction=mock.sentinel.txn)
mockconn.ack.assert_called_with(mock.sentinel.messageid, subid,
transaction=mock.sentinel.txn)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_nack_message(mockstomp):
'''Test that the _nack function is properly forwarded to stomp.'''
stomp = StompTransport()
stomp.connect()
mockconn = mockstomp.Connection.return_value
subid = stomp._subscribe(1, str(mock.sentinel.channel3), None, acknowledgement=True)
stomp._nack(mock.sentinel.messageid, subid)
mockconn.nack.assert_called_once_with(mock.sentinel.messageid, subid)
stomp._nack(mock.sentinel.messageid, subid, transaction=mock.sentinel.txn)
mockconn.nack.assert_called_with(mock.sentinel.messageid, subid,
transaction=mock.sentinel.txn)
@mock.patch('workflows.transport.stomp_transport.stomp')
def test_namespace_is_used_correctly(mockstomp):
'''Test that a configured namespace is correctly used when subscribing and sending messages.'''
mockconn = mockstomp.Connection.return_value
StompTransport.defaults['--stomp-prfx'] = ''
stomp = StompTransport()
stomp.connect()
assert stomp.get_namespace() == ''
StompTransport.defaults['--stomp-prfx'] = 'ns.'
stomp = StompTransport()
stomp.connect()
assert stomp.get_namespace() == 'ns'
stomp._send( 'some_queue', mock.sentinel.message1 )
mockconn.send.assert_called_once()
assert mockconn.send.call_args[0] == ('/queue/ns.some_queue', mock.sentinel.message1)
stomp._send( 'some_queue', mock.sentinel.message2, ignore_namespace=True )
assert mockconn.send.call_args[0] == ('/queue/some_queue', mock.sentinel.message2)
StompTransport.defaults['--stomp-prfx'] = 'ns'
stomp = StompTransport()
stomp.connect()
assert stomp.get_namespace() == 'ns'
stomp._send( 'some_queue', mock.sentinel.message1 )
assert mockconn.send.call_args[0] == ('/queue/ns.some_queue', mock.sentinel.message1)
stomp._broadcast( 'some_topic', mock.sentinel.message2 )
assert mockconn.send.call_args[0] == ('/topic/ns.some_topic', mock.sentinel.message2)
stomp._broadcast( 'some_topic', mock.sentinel.message3, ignore_namespace=True )
assert mockconn.send.call_args[0] == ('/topic/some_topic', mock.sentinel.message3)
stomp._subscribe( 1, 'sub_queue', None )
mockconn.subscribe.assert_called_once()
assert mockconn.subscribe.call_args[0] == ('/queue/ns.sub_queue', 1)
stomp._subscribe( 2, 'sub_queue', None, ignore_namespace=True )
assert mockconn.subscribe.call_args[0] == ('/queue/sub_queue', 2)
stomp._subscribe_broadcast( 3, 'sub_topic', None )
assert mockconn.subscribe.call_args[0] == ('/topic/ns.sub_topic', 3)
stomp._subscribe_broadcast( 4, 'sub_topic', None, ignore_namespace=True )
assert mockconn.subscribe.call_args[0] == ('/topic/sub_topic', 4)
stomp.broadcast_status('some status')
assert mockconn.send.call_args[0] == ('/topic/ns.transient.status', '"some status"')
|
|
"""Tests for :mod:`numpy.core.fromnumeric`."""
import numpy as np
A = np.array(True, ndmin=2, dtype=bool)
B = np.array(1.0, ndmin=2, dtype=np.float32)
A.setflags(write=False)
B.setflags(write=False)
a = np.bool_(True)
b = np.float32(1.0)
c = 1.0
d = np.array(1.0, dtype=np.float32) # writeable
reveal_type(np.take(a, 0)) # E: Any
reveal_type(np.take(b, 0)) # E: Any
reveal_type(np.take(c, 0)) # E: Any
reveal_type(np.take(A, 0)) # E: Any
reveal_type(np.take(B, 0)) # E: Any
reveal_type(np.take(A, [0])) # E: Any
reveal_type(np.take(B, [0])) # E: Any
reveal_type(np.reshape(a, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.reshape(b, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.reshape(c, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.reshape(A, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.reshape(B, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.choose(a, [True, True])) # E: Any
reveal_type(np.choose(A, [True, True])) # E: Any
reveal_type(np.repeat(a, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.repeat(b, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.repeat(c, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.repeat(A, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.repeat(B, 1)) # E: numpy.ndarray[Any, Any]
# TODO: Add tests for np.put()
reveal_type(np.swapaxes(A, 0, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.swapaxes(B, 0, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.transpose(a)) # E: numpy.ndarray[Any, Any]
reveal_type(np.transpose(b)) # E: numpy.ndarray[Any, Any]
reveal_type(np.transpose(c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.transpose(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.transpose(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.partition(a, 0, axis=None)) # E: numpy.ndarray[Any, Any]
reveal_type(np.partition(b, 0, axis=None)) # E: numpy.ndarray[Any, Any]
reveal_type(np.partition(c, 0, axis=None)) # E: numpy.ndarray[Any, Any]
reveal_type(np.partition(A, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.partition(B, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argpartition(a, 0)) # E: Any
reveal_type(np.argpartition(b, 0)) # E: Any
reveal_type(np.argpartition(c, 0)) # E: Any
reveal_type(np.argpartition(A, 0)) # E: Any
reveal_type(np.argpartition(B, 0)) # E: Any
reveal_type(np.sort(A, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.sort(B, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argsort(A, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argsort(B, 0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argmax(A)) # E: {intp}
reveal_type(np.argmax(B)) # E: {intp}
reveal_type(np.argmax(A, axis=0)) # E: Any
reveal_type(np.argmax(B, axis=0)) # E: Any
reveal_type(np.argmin(A)) # E: {intp}
reveal_type(np.argmin(B)) # E: {intp}
reveal_type(np.argmin(A, axis=0)) # E: Any
reveal_type(np.argmin(B, axis=0)) # E: Any
reveal_type(np.searchsorted(A[0], 0)) # E: {intp}
reveal_type(np.searchsorted(B[0], 0)) # E: {intp}
reveal_type(np.searchsorted(A[0], [0])) # E: numpy.ndarray[Any, Any]
reveal_type(np.searchsorted(B[0], [0])) # E: numpy.ndarray[Any, Any]
reveal_type(np.resize(a, (5, 5))) # E: numpy.ndarray[Any, Any]
reveal_type(np.resize(b, (5, 5))) # E: numpy.ndarray[Any, Any]
reveal_type(np.resize(c, (5, 5))) # E: numpy.ndarray[Any, Any]
reveal_type(np.resize(A, (5, 5))) # E: numpy.ndarray[Any, Any]
reveal_type(np.resize(B, (5, 5))) # E: numpy.ndarray[Any, Any]
reveal_type(np.squeeze(a)) # E: numpy.bool_
reveal_type(np.squeeze(b)) # E: {float32}
reveal_type(np.squeeze(c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.squeeze(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.squeeze(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.diagonal(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.diagonal(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.trace(A)) # E: Any
reveal_type(np.trace(B)) # E: Any
reveal_type(np.ravel(a)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ravel(b)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ravel(c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ravel(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ravel(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.nonzero(a)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.nonzero(b)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.nonzero(c)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.nonzero(A)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.nonzero(B)) # E: tuple[numpy.ndarray[Any, Any]]
reveal_type(np.shape(a)) # E: tuple[builtins.int]
reveal_type(np.shape(b)) # E: tuple[builtins.int]
reveal_type(np.shape(c)) # E: tuple[builtins.int]
reveal_type(np.shape(A)) # E: tuple[builtins.int]
reveal_type(np.shape(B)) # E: tuple[builtins.int]
reveal_type(np.compress([True], a)) # E: numpy.ndarray[Any, Any]
reveal_type(np.compress([True], b)) # E: numpy.ndarray[Any, Any]
reveal_type(np.compress([True], c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.compress([True], A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.compress([True], B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.clip(a, 0, 1.0)) # E: Any
reveal_type(np.clip(b, -1, 1)) # E: Any
reveal_type(np.clip(c, 0, 1)) # E: Any
reveal_type(np.clip(A, 0, 1)) # E: Any
reveal_type(np.clip(B, 0, 1)) # E: Any
reveal_type(np.sum(a)) # E: Any
reveal_type(np.sum(b)) # E: Any
reveal_type(np.sum(c)) # E: Any
reveal_type(np.sum(A)) # E: Any
reveal_type(np.sum(B)) # E: Any
reveal_type(np.sum(A, axis=0)) # E: Any
reveal_type(np.sum(B, axis=0)) # E: Any
reveal_type(np.all(a)) # E: numpy.bool_
reveal_type(np.all(b)) # E: numpy.bool_
reveal_type(np.all(c)) # E: numpy.bool_
reveal_type(np.all(A)) # E: numpy.bool_
reveal_type(np.all(B)) # E: numpy.bool_
reveal_type(np.all(A, axis=0)) # E: Any
reveal_type(np.all(B, axis=0)) # E: Any
reveal_type(np.all(A, keepdims=True)) # E: Any
reveal_type(np.all(B, keepdims=True)) # E: Any
reveal_type(np.any(a)) # E: numpy.bool_
reveal_type(np.any(b)) # E: numpy.bool_
reveal_type(np.any(c)) # E: numpy.bool_
reveal_type(np.any(A)) # E: numpy.bool_
reveal_type(np.any(B)) # E: numpy.bool_
reveal_type(np.any(A, axis=0)) # E: Any
reveal_type(np.any(B, axis=0)) # E: Any
reveal_type(np.any(A, keepdims=True)) # E: Any
reveal_type(np.any(B, keepdims=True)) # E: Any
reveal_type(np.cumsum(a)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumsum(b)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumsum(c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumsum(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumsum(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ptp(a)) # E: Any
reveal_type(np.ptp(b)) # E: Any
reveal_type(np.ptp(c)) # E: Any
reveal_type(np.ptp(A)) # E: Any
reveal_type(np.ptp(B)) # E: Any
reveal_type(np.ptp(A, axis=0)) # E: Any
reveal_type(np.ptp(B, axis=0)) # E: Any
reveal_type(np.ptp(A, keepdims=True)) # E: Any
reveal_type(np.ptp(B, keepdims=True)) # E: Any
reveal_type(np.amax(a)) # E: Any
reveal_type(np.amax(b)) # E: Any
reveal_type(np.amax(c)) # E: Any
reveal_type(np.amax(A)) # E: Any
reveal_type(np.amax(B)) # E: Any
reveal_type(np.amax(A, axis=0)) # E: Any
reveal_type(np.amax(B, axis=0)) # E: Any
reveal_type(np.amax(A, keepdims=True)) # E: Any
reveal_type(np.amax(B, keepdims=True)) # E: Any
reveal_type(np.amin(a)) # E: Any
reveal_type(np.amin(b)) # E: Any
reveal_type(np.amin(c)) # E: Any
reveal_type(np.amin(A)) # E: Any
reveal_type(np.amin(B)) # E: Any
reveal_type(np.amin(A, axis=0)) # E: Any
reveal_type(np.amin(B, axis=0)) # E: Any
reveal_type(np.amin(A, keepdims=True)) # E: Any
reveal_type(np.amin(B, keepdims=True)) # E: Any
reveal_type(np.prod(a)) # E: Any
reveal_type(np.prod(b)) # E: Any
reveal_type(np.prod(c)) # E: Any
reveal_type(np.prod(A)) # E: Any
reveal_type(np.prod(B)) # E: Any
reveal_type(np.prod(A, axis=0)) # E: Any
reveal_type(np.prod(B, axis=0)) # E: Any
reveal_type(np.prod(A, keepdims=True)) # E: Any
reveal_type(np.prod(B, keepdims=True)) # E: Any
reveal_type(np.prod(b, out=d)) # E: Any
reveal_type(np.prod(B, out=d)) # E: Any
reveal_type(np.cumprod(a)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumprod(b)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumprod(c)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumprod(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.cumprod(B)) # E: numpy.ndarray[Any, Any]
reveal_type(np.ndim(a)) # E: int
reveal_type(np.ndim(b)) # E: int
reveal_type(np.ndim(c)) # E: int
reveal_type(np.ndim(A)) # E: int
reveal_type(np.ndim(B)) # E: int
reveal_type(np.size(a)) # E: int
reveal_type(np.size(b)) # E: int
reveal_type(np.size(c)) # E: int
reveal_type(np.size(A)) # E: int
reveal_type(np.size(B)) # E: int
reveal_type(np.around(a)) # E: Any
reveal_type(np.around(b)) # E: Any
reveal_type(np.around(c)) # E: Any
reveal_type(np.around(A)) # E: Any
reveal_type(np.around(B)) # E: Any
reveal_type(np.mean(a)) # E: Any
reveal_type(np.mean(b)) # E: Any
reveal_type(np.mean(c)) # E: Any
reveal_type(np.mean(A)) # E: Any
reveal_type(np.mean(B)) # E: Any
reveal_type(np.mean(A, axis=0)) # E: Any
reveal_type(np.mean(B, axis=0)) # E: Any
reveal_type(np.mean(A, keepdims=True)) # E: Any
reveal_type(np.mean(B, keepdims=True)) # E: Any
reveal_type(np.mean(b, out=d)) # E: Any
reveal_type(np.mean(B, out=d)) # E: Any
reveal_type(np.std(a)) # E: Any
reveal_type(np.std(b)) # E: Any
reveal_type(np.std(c)) # E: Any
reveal_type(np.std(A)) # E: Any
reveal_type(np.std(B)) # E: Any
reveal_type(np.std(A, axis=0)) # E: Any
reveal_type(np.std(B, axis=0)) # E: Any
reveal_type(np.std(A, keepdims=True)) # E: Any
reveal_type(np.std(B, keepdims=True)) # E: Any
reveal_type(np.std(b, out=d)) # E: Any
reveal_type(np.std(B, out=d)) # E: Any
reveal_type(np.var(a)) # E: Any
reveal_type(np.var(b)) # E: Any
reveal_type(np.var(c)) # E: Any
reveal_type(np.var(A)) # E: Any
reveal_type(np.var(B)) # E: Any
reveal_type(np.var(A, axis=0)) # E: Any
reveal_type(np.var(B, axis=0)) # E: Any
reveal_type(np.var(A, keepdims=True)) # E: Any
reveal_type(np.var(B, keepdims=True)) # E: Any
reveal_type(np.var(b, out=d)) # E: Any
reveal_type(np.var(B, out=d)) # E: Any
|
|
# -*- coding: utf-8 -*-
"""Base TestCase classes for nbextensions tests."""
from __future__ import (
absolute_import, division, print_function, unicode_literals,
)
import logging
import os
from threading import Event, Thread
from jupyter_contrib_core.notebook_compat import serverextensions
from jupyter_contrib_core.testing_utils import (
GlobalMemoryHandler, get_wrapped_logger, wrap_logger_handlers,
)
from jupyter_contrib_core.testing_utils.jupyter_env import patch_jupyter_dirs
from nose.plugins.skip import SkipTest
from notebook.notebookapp import NotebookApp
from notebook.tests.launchnotebook import NotebookTestBase
from tornado.ioloop import IOLoop
from traitlets.config import Config
from traitlets.traitlets import default
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock # py2
no_selenium = True
try:
from selenium import webdriver
except ImportError:
pass
else:
no_selenium = False
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.remote import remote_connection
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.support.ui import WebDriverWait
# don't show selenium debug logs
remote_connection.LOGGER.setLevel(logging.INFO)
class NoseyNotebookApp(NotebookApp):
"""Wrap the regular logging handler(s). For use inside nose tests."""
@default('log')
def _log_default(self):
"""wrap loggers for this application."""
return wrap_logger_handlers(NotebookApp._log_default(self))
class NbextensionTestBase(NotebookTestBase):
"""
Base class for nbextensions test case classes.
We override the setup_class method from NotebookTestBase in order to
install things, and also to set log_level to debug.
Also split some of the setup_class method into separate methods in order to
simplify subclassing.
"""
config = Config(NotebookApp={'log_level': logging.DEBUG})
# these are added for notebook < 4.1, where url_prefix wasn't defined.
# However, due to the fact that the base_url body data attribute in the
# page template isn't passed through the urlencode jinja2 filter,
# we can't expect a base_url which would need encoding to work :(
if not hasattr(NotebookTestBase, 'url_prefix'):
url_prefix = '/ab/'
@classmethod
def base_url(cls):
return 'http://localhost:%i%s' % (cls.port, cls.url_prefix)
_install_user = False
_install_sys_prefix = False
@classmethod
def pre_server_setup(cls):
"""Setup extensions etc before running the notebook server."""
# added to install things!
cls.log.info('Enabling jupyter_nbextensions_configurator')
inst_func = serverextensions.toggle_serverextension_python
inst_funcname = '.'.join([inst_func.__module__, inst_func.__name__])
logger = get_wrapped_logger(
name=inst_funcname, log_level=logging.DEBUG)
serverextensions.toggle_serverextension_python(
'jupyter_nbextensions_configurator', enabled=True, logger=logger,
user=cls._install_user, sys_prefix=cls._install_sys_prefix)
@classmethod
def get_server_kwargs(cls, **overrides):
kwargs = dict(
port=cls.port,
port_retries=0,
open_browser=False,
runtime_dir=cls.jupyter_dirs['server']['runtime'],
notebook_dir=cls.jupyter_dirs['server']['notebook'],
base_url=cls.url_prefix,
config=cls.config,
)
# disable auth-by-default, introduced in notebook PR #1831
if 'token' in NotebookApp.class_trait_names():
kwargs['token'] = ''
kwargs.update(overrides)
return kwargs
@classmethod
def start_server_thread(cls, started_event):
"""
Start a notebook server in a separate thread.
The start is signalled using the passed Event instance.
"""
cls.log.info('Starting notebook server app thread')
app = cls.notebook = NoseyNotebookApp(**cls.get_server_kwargs())
# don't register signal handler during tests
app.init_signal = lambda: None
app.initialize(argv=[])
loop = IOLoop.current()
loop.add_callback(started_event.set)
try:
app.start()
finally:
# set the event, so failure to start doesn't cause a hang
started_event.set()
# app.session_manager.close call was added after notebook 4.0
if hasattr(app.session_manager, 'close'):
app.session_manager.close()
@classmethod
def _setup_patches(cls):
(cls.jupyter_patches, cls.jupyter_dirs,
remove_jupyter_dirs) = patch_jupyter_dirs()
# store in a list to avoid confusion over bound/unbound method in pypy
cls.removal_funcs = [remove_jupyter_dirs]
try:
for ptch in cls.jupyter_patches:
ptch.start()
# patches for items called in NotebookTestBase.teardown_class
# env_patch needs a start method as well because of a typo in
# notebook 4.0 which calls it in the teardown_class method
cls.env_patch = cls.path_patch = Mock(['start', 'stop'])
cls.home_dir = cls.config_dir = cls.data_dir = Mock(['cleanup'])
cls.runtime_dir = cls.notebook_dir = Mock(['cleanup'])
cls.tmp_dir = Mock(['cleanup'])
except Exception:
for func in cls.removal_funcs:
func()
raise
@classmethod
def setup_class(cls):
"""Install things & setup a notebook server in a separate thread."""
cls.log = get_wrapped_logger(cls.__name__)
cls._setup_patches()
cls.pre_server_setup()
try:
started = Event()
cls.notebook_thread = Thread(
target=cls.start_server_thread, args=[started])
cls.notebook_thread.start()
started.wait()
cls.wait_until_alive()
except Exception:
for func in cls.removal_funcs:
func()
raise
@classmethod
def teardown_class(cls):
try:
# call superclass to stop notebook server
super(NbextensionTestBase, cls).teardown_class()
finally:
try:
for ptch in cls.jupyter_patches:
ptch.stop()
finally:
for func in cls.removal_funcs:
func()
def _skip_if_no_selenium():
if no_selenium:
raise SkipTest('Selenium not installed. '
'Skipping selenium-based test.')
if os.environ.get('TRAVIS_OS_NAME') == 'osx':
raise SkipTest("Don't do selenium tests on travis osx")
class SeleniumNbextensionTestBase(NbextensionTestBase):
# browser logs from selenium aren't very useful currently, but if you want
# them, you can set the class attribute show_driver_logs to have them
# output via the GlobalMemoryHandler on test failure
show_driver_logs = False
@classmethod
def setup_class(cls):
cls.init_webdriver()
cls._failure_occurred = False # flag for logging
super(SeleniumNbextensionTestBase, cls).setup_class()
@classmethod
def init_webdriver(cls):
cls.log = get_wrapped_logger(cls.__name__)
_skip_if_no_selenium()
if hasattr(cls, 'driver'):
return cls.driver
if (os.environ.get('CI') and os.environ.get('TRAVIS') and
os.environ.get('SAUCE_ACCESS_KEY')):
cls.log.info(
'Running in CI environment. Using Sauce remote webdriver.')
username = os.environ['SAUCE_USERNAME']
access_key = os.environ['SAUCE_ACCESS_KEY']
capabilities = {
# 'platform': 'Mac OS X 10.9',
'platform': 'Linux',
'browserName': 'firefox',
'version': 'latest',
'tags': [os.environ['TOXENV'], 'CI'],
'name': cls.__name__
}
hub_url = 'http://{}:{}@ondemand.saucelabs.com:80/wd/hub'.format(
username, access_key)
if os.environ.get('TRAVIS'):
# see https://docs.travis-ci.com/user/gui-and-headless-browsers
# and https://docs.travis-ci.com/user/sauce-connect
capabilities.update({
'tunnel-identifier': os.environ['TRAVIS_JOB_NUMBER'],
'build': os.environ['TRAVIS_BUILD_NUMBER'],
})
cls.driver = webdriver.Remote(
desired_capabilities=capabilities, command_executor=hub_url)
else:
cls.log.info('Using local webdriver.')
cls.driver = webdriver.Firefox()
return cls.driver
def run(self, results):
"""Run a given test. Overridden in order to access results."""
# in py2 unittest, run doesn't return the results object, so we need to
# create one in order to have a reference to it.
if results is None:
results = self.defaultTestResult()
super(SeleniumNbextensionTestBase, self).run(results)
if results.failures or results.errors:
self.__class__._failure_occurred = True
return results
@classmethod
def _print_logs_on_failure(cls):
if cls._failure_occurred:
cls.log.info('\n'.join([
'',
'\t\tFailed test!',
'\t\tCaptured logging:',
]))
GlobalMemoryHandler.rotate_buffer(1)
GlobalMemoryHandler.flush_to_target()
browser_logger = get_wrapped_logger(
name=cls.__name__ + '.driver', log_level=logging.DEBUG)
if cls.show_driver_logs:
cls.log.info('\n\t\tjavascript console logs below...\n\n')
for entry in cls.driver.get_log('browser'):
level = logging._nameToLevel.get(
entry['level'], logging.ERROR)
msg = entry['message'].strip()
browser_logger.log(level, msg)
record, target = GlobalMemoryHandler._buffer[-1]
record.ct = entry['timestamp'] / 1000.
GlobalMemoryHandler._buffer[-1] = record, target
GlobalMemoryHandler.flush_to_target()
if (not cls._failure_occurred) or os.environ.get('CI'):
cls.log.info('closing webdriver')
cls.driver.quit()
else:
cls.log.info('keeping webdriver open')
@classmethod
def teardown_class(cls):
cls._print_logs_on_failure()
super(SeleniumNbextensionTestBase, cls).teardown_class()
@classmethod
def wait_for_element(cls, presence_cond, message, timeout=5):
"""WebDriverWait for an element to appear, fail test on timeout."""
try:
return WebDriverWait(cls.driver, timeout).until(
ec.presence_of_element_located(presence_cond))
except TimeoutException:
if message:
raise cls.failureException(message)
else:
raise cls.failureException(
'{}No element matching condition {!r} found in {}s'.format(
message, presence_cond, timeout))
@classmethod
def wait_for_selector(cls, css_selector, message='', timeout=5):
"""WebDriverWait for a selector to appear, fail test on timeout."""
if message:
message += '\n'
message = '{}No element matching selector {!r} found in {}s'.format(
message, css_selector, timeout)
return cls.wait_for_element(
(By.CSS_SELECTOR, css_selector), message=message, timeout=timeout)
@classmethod
def wait_for_partial_link_text(cls, link_text, message='', timeout=5):
"""WebDriverWait for a link to appear, fail test on timeout."""
if message:
message += '\n'
message = (
'{}No element matching partial link text '
'{!r} found in {}s').format(message, link_text, timeout)
return cls.wait_for_element((By.PARTIAL_LINK_TEXT, link_text),
message=message, timeout=timeout)
@classmethod
def wait_for_xpath(cls, xpath, message='', timeout=5):
"""WebDriverWait for a selector to appear, fail test on timeout."""
if message:
message += '\n'
message = '{}No element matching xpath {!r} found in {}s'.format(
message, xpath, timeout)
return cls.wait_for_element(
(By.XPATH, xpath), message=message, timeout=timeout)
|
|
# Copyright (c) 2012-2015 Tycho Andersen
# Copyright (c) 2013 xarvh
# Copyright (c) 2013 horsik
# Copyright (c) 2013-2014 roger
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from . import command
from . import hook
from . import utils
from . import xcbq
from six import MAXSIZE
import warnings
class Key(object):
"""Defines a keybinding.
Parameters
==========
modifiers:
A list of modifier specifications. Modifier specifications are one of:
"shift", "lock", "control", "mod1", "mod2", "mod3", "mod4", "mod5".
key:
A key specification, e.g. "a", "Tab", "Return", "space".
commands:
A list of lazy command objects generated with the command.lazy helper.
If multiple Call objects are specified, they are run in sequence.
kwds:
A dictionary containing "desc", allowing a description to be added
"""
def __init__(self, modifiers, key, *commands, **kwds):
self.modifiers = modifiers
self.key = key
self.commands = commands
self.desc = kwds.get("desc", "")
if key not in xcbq.keysyms:
raise utils.QtileError("Unknown key: %s" % key)
self.keysym = xcbq.keysyms[key]
try:
self.modmask = utils.translate_masks(self.modifiers)
except KeyError as v:
raise utils.QtileError(v)
def __repr__(self):
return "<Key (%s, %s)>" % (self.modifiers, self.key)
class Drag(object):
"""Defines binding of a mouse to some dragging action
On each motion event command is executed with two extra parameters added x
and y offset from previous move
It focuses clicked window by default. If you want to prevent it pass,
`focus=None` as an argument
"""
def __init__(self, modifiers, button, *commands, **kwargs):
self.start = kwargs.get("start", None)
self.focus = kwargs.get("focus", "before")
self.modifiers = modifiers
self.button = button
self.commands = commands
try:
self.button_code = int(self.button.replace('Button', ''))
self.modmask = utils.translate_masks(self.modifiers)
except KeyError as v:
raise utils.QtileError(v)
def __repr__(self):
return "<Drag (%s, %s)>" % (self.modifiers, self.button)
class Click(object):
"""Defines binding of a mouse click
It focuses clicked window by default. If you want to prevent it, pass
`focus=None` as an argument
"""
def __init__(self, modifiers, button, *commands, **kwargs):
self.focus = kwargs.get("focus", "before")
self.modifiers = modifiers
self.button = button
self.commands = commands
try:
self.button_code = int(self.button.replace('Button', ''))
self.modmask = utils.translate_masks(self.modifiers)
except KeyError as v:
raise utils.QtileError(v)
def __repr__(self):
return "<Click (%s, %s)>" % (self.modifiers, self.button)
class EzConfig(object):
"""
Helper class for defining key and button bindings in an emacs-like format.
Inspired by Xmonad's XMonad.Util.EZConfig.
"""
modifier_keys = {
'M': 'mod4',
'A': 'mod1',
'S': 'shift',
'C': 'control',
}
def parse(self, spec):
"""
Splits an emacs keydef into modifiers and keys. For example:
"M-S-a" -> ['mod4', 'shift'], 'a'
"A-<minus>" -> ['mod1'], 'minus'
"C-<Tab>" -> ['control'], 'Tab'
"""
mods = []
keys = []
for key in spec.split('-'):
if not key:
break
if key in self.modifier_keys:
if keys:
msg = 'Modifiers must always come before key/btn: %s'
raise utils.QtileError(msg % spec)
mods.append(self.modifier_keys[key])
continue
if len(key) == 1:
keys.append(key)
continue
if len(key) > 3 and key[0] == '<' and key[-1] == '>':
keys.append(key[1:-1])
continue
if not keys:
msg = 'Invalid key/btn specifier: %s'
raise utils.QtileError(msg % spec)
if len(keys) > 1:
msg = 'Key chains are not supported: %s' % spec
raise utils.QtileError(msg)
return mods, keys[0]
class EzKey(EzConfig, Key):
def __init__(self, keydef, *commands):
modkeys, key = self.parse(keydef)
super(EzKey, self).__init__(modkeys, key, *commands)
class EzClick(EzConfig, Click):
def __init__(self, btndef, *commands, **kwargs):
modkeys, button = self.parse(btndef)
button = 'Button%s' % button
super(EzClick, self).__init__(modkeys, button, *commands, **kwargs)
class EzDrag(EzConfig, Drag):
def __init__(self, btndef, *commands, **kwargs):
modkeys, button = self.parse(btndef)
button = 'Button%s' % button
super(EzDrag, self).__init__(modkeys, button, *commands, **kwargs)
class ScreenRect(object):
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
def __repr__(self):
return '<%s %d,%d %d,%d>' % (
self.__class__.__name__,
self.x, self.y,
self.width, self.height
)
def hsplit(self, columnwidth):
assert columnwidth > 0
assert columnwidth < self.width
return (
self.__class__(self.x, self.y, columnwidth, self.height),
self.__class__(
self.x + columnwidth, self.y,
self.width - columnwidth, self.height
)
)
def vsplit(self, rowheight):
assert rowheight > 0
assert rowheight < self.height
return (
self.__class__(self.x, self.y, self.width, rowheight),
self.__class__(
self.x, self.y + rowheight,
self.width, self.height - rowheight
)
)
class Screen(command.CommandObject):
"""A physical screen, and its associated paraphernalia.
Define a screen with a given set of Bars of a specific geometry. Note that
bar.Bar objects can only be placed at the top or the bottom of the screen
(bar.Gap objects can be placed anywhere). Also, ``x``, ``y``, ``width``,
and ``height`` aren't specified usually unless you are using 'fake
screens'.
Parameters
==========
top: List of Gap/Bar objects, or None.
bottom: List of Gap/Bar objects, or None.
left: List of Gap/Bar objects, or None.
right: List of Gap/Bar objects, or None.
x : int or None
y : int or None
width : int or None
height : int or None
"""
def __init__(self, top=None, bottom=None, left=None, right=None,
x=None, y=None, width=None, height=None):
self.group = None
self.previous_group = None
self.top = top
self.bottom = bottom
self.left = left
self.right = right
self.qtile = None
self.index = None
# x position of upper left corner can be > 0
# if one screen is "right" of the other
self.x = x
self.y = y
self.width = width
self.height = height
def _configure(self, qtile, index, x, y, width, height, group):
self.qtile = qtile
self.index = index
self.x = x
self.y = y
self.width = width
self.height = height
self.setGroup(group)
for i in self.gaps:
i._configure(qtile, self)
@property
def gaps(self):
return (i for i in [self.top, self.bottom, self.left, self.right] if i)
@property
def dx(self):
return self.x + self.left.size if self.left else self.x
@property
def dy(self):
return self.y + self.top.size if self.top else self.y
@property
def dwidth(self):
val = self.width
if self.left:
val -= self.left.size
if self.right:
val -= self.right.size
return val
@property
def dheight(self):
val = self.height
if self.top:
val -= self.top.size
if self.bottom:
val -= self.bottom.size
return val
def get_rect(self):
return ScreenRect(self.dx, self.dy, self.dwidth, self.dheight)
def setGroup(self, new_group, save_prev=True):
"""Put group on this screen"""
if new_group.screen == self:
return
if save_prev:
self.previous_group = self.group
if new_group is None:
return
if new_group.screen:
# g1 <-> s1 (self)
# g2 (new_group) <-> s2 to
# g1 <-> s2
# g2 <-> s1
g1 = self.group
s1 = self
g2 = new_group
s2 = new_group.screen
s2.group = g1
g1._setScreen(s2)
s1.group = g2
g2._setScreen(s1)
else:
old_group = self.group
self.group = new_group
# display clients of the new group and then hide from old group
# to remove the screen flickering
new_group._setScreen(self)
if old_group is not None:
old_group._setScreen(None)
hook.fire("setgroup")
hook.fire("focus_change")
hook.fire(
"layout_change",
self.group.layouts[self.group.currentLayout],
self.group
)
def _items(self, name):
if name == "layout":
return (True, list(range(len(self.group.layouts))))
elif name == "window":
return (True, [i.window.wid for i in self.group.windows])
elif name == "bar":
return (False, [x.position for x in self.gaps])
def _select(self, name, sel):
if name == "layout":
if sel is None:
return self.group.layout
else:
return utils.lget(self.group.layouts, sel)
elif name == "window":
if sel is None:
return self.group.currentWindow
else:
for i in self.group.windows:
if i.window.wid == sel:
return i
elif name == "bar":
return getattr(self, sel)
def resize(self, x=None, y=None, w=None, h=None):
x = x or self.x
y = y or self.y
w = w or self.width
h = h or self.height
self._configure(self.qtile, self.index, x, y, w, h, self.group)
for bar in [self.top, self.bottom, self.left, self.right]:
if bar:
bar.draw()
self.qtile.call_soon(self.group.layoutAll())
def cmd_info(self):
"""
Returns a dictionary of info for this screen.
"""
return dict(
index=self.index,
width=self.width,
height=self.height,
x=self.x,
y=self.y
)
def cmd_resize(self, x=None, y=None, w=None, h=None):
"""Resize the screen"""
self.resize(x, y, w, h)
def cmd_next_group(self, skip_empty=False, skip_managed=False):
"""Switch to the next group"""
n = self.group.nextGroup(skip_empty, skip_managed)
self.setGroup(n)
return n.name
def cmd_prev_group(self, skip_empty=False, skip_managed=False):
"""Switch to the previous group"""
n = self.group.prevGroup(skip_empty, skip_managed)
self.setGroup(n)
return n.name
def cmd_toggle_group(self, group_name=None):
"""Switch to the selected group or to the previously active one"""
group = self.qtile.groupMap.get(group_name)
if group in (self.group, None):
group = self.previous_group
self.setGroup(group)
def cmd_togglegroup(self, groupName=None):
"""Switch to the selected group or to the previously active one
Deprecated: use toggle_group()"""
warnings.warn("togglegroup is deprecated, use toggle_group", DeprecationWarning)
self.cmd_toggle_group(groupName)
class Group(object):
"""Represents a "dynamic" group
These groups can spawn apps, only allow certain Matched windows to be on
them, hide when they're not in use, etc.
Parameters
==========
name : string
the name of this group
matches : default ``None``
list of ``Match`` objects whose windows will be assigned to this group
exclusive : boolean
when other apps are started in this group, should we allow them here or not?
spawn : string or list of strings
this will be ``exec()`` d when the group is created, you can pass
either a program name or a list of programs to ``exec()``
layout : string
the default layout for this group (e.g. 'max' or 'stack')
layouts : list
the group layouts list overriding global layouts
persist : boolean
should this group stay alive with no member windows?
init : boolean
is this group alive when qtile starts?
position : int
group position
"""
def __init__(self, name, matches=None, exclusive=False,
spawn=None, layout=None, layouts=None, persist=True, init=True,
layout_opts=None, screen_affinity=None, position=MAXSIZE):
self.name = name
self.exclusive = exclusive
self.spawn = spawn
self.layout = layout
self.layouts = layouts or []
self.persist = persist
self.init = init
self.matches = matches or []
self.layout_opts = layout_opts or {}
self.screen_affinity = screen_affinity
self.position = position
def __repr__(self):
attrs = utils.describe_attributes(self,
['exclusive', 'spawn', 'layout', 'layouts', 'persist', 'init',
'matches', 'layout_opts', 'screen_affinity'])
return '<config.Group %r (%s)>' % (self.name, attrs)
class Match(object):
"""Match for dynamic groups
It can match by title, class or role.
``Match`` supports both regular expression objects (i.e. the result of
``re.compile()``) or strings (match as a "include" match). If a window
matches any of the things in any of the lists, it is considered a match.
Parameters
==========
title:
things to match against the title (WM_NAME)
wm_class:
things to match against the second string in WM_CLASS atom
role:
things to match against the WM_ROLE atom
wm_type:
things to match against the WM_TYPE atom
wm_instance_class:
things to match against the first string in WM_CLASS atom
net_wm_pid:
things to match against the _NET_WM_PID atom (only int allowed in this
rule)
"""
def __init__(self, title=None, wm_class=None, role=None, wm_type=None,
wm_instance_class=None, net_wm_pid=None):
if not title:
title = []
if not wm_class:
wm_class = []
if not role:
role = []
if not wm_type:
wm_type = []
if not wm_instance_class:
wm_instance_class = []
if not net_wm_pid:
net_wm_pid = []
try:
net_wm_pid = list(map(int, net_wm_pid))
except ValueError:
error = 'Invalid rule for net_wm_pid: "%s" '\
'only ints allowed' % str(net_wm_pid)
raise utils.QtileError(error)
self._rules = [('title', t) for t in title]
self._rules += [('wm_class', w) for w in wm_class]
self._rules += [('role', r) for r in role]
self._rules += [('wm_type', r) for r in wm_type]
self._rules += [('wm_instance_class', w) for w in wm_instance_class]
self._rules += [('net_wm_pid', w) for w in net_wm_pid]
def compare(self, client):
for _type, rule in self._rules:
if _type == "net_wm_pid":
def match_func(value):
return rule == value
else:
match_func = getattr(rule, 'match', None) or \
getattr(rule, 'count')
if _type == 'title':
value = client.name
elif _type == 'wm_class':
value = None
_value = client.window.get_wm_class()
if _value and len(_value) > 1:
value = _value[1]
elif _type == 'wm_instance_class':
value = client.window.get_wm_class()
if value:
value = value[0]
elif _type == 'wm_type':
value = client.window.get_wm_type()
elif _type == 'net_wm_pid':
value = client.window.get_net_wm_pid()
else:
value = client.window.get_wm_window_role()
if value and match_func(value):
return True
return False
def map(self, callback, clients):
"""Apply callback to each client that matches this Match"""
for c in clients:
if self.compare(c):
callback(c)
def __repr__(self):
return '<Match %s>' % self._rules
class Rule(object):
"""How to act on a Match
A Rule contains a Match object, and a specification about what to do when
that object is matched.
Parameters
==========
match :
``Match`` object associated with this ``Rule``
float :
auto float this window?
intrusive :
override the group's exclusive setting?
break_on_match :
Should we stop applying rules if this rule is matched?
"""
def __init__(self, match, group=None, float=False, intrusive=False,
break_on_match=True):
self.match = match
self.group = group
self.float = float
self.intrusive = intrusive
self.break_on_match = break_on_match
def matches(self, w):
return self.match.compare(w)
def __repr__(self):
actions = utils.describe_attributes(self, ['group', 'float',
'intrusive', 'break_on_match'])
return '<Rule match=%r actions=(%s)>' % (self.match, actions)
|
|
# ----------------------------------------------------------------------------
# Copyright (c) 2014-2016, 'prx' developers (see AUTHORS file)
# All rights reserved.
#
# Distributed under the terms of the MIT license.
#
# The full license is in the LICENSE file, distributed with this software.
# ----------------------------------------------------------------------------
"""Primal-dual algorithms."""
from __future__ import division
import numpy as np
from . import base as _base
from ..fun.norms import l2norm, l2normsqhalf
from ._common import docstring_wrapper as _docstring_wrapper
__all__ = ('_pdhg', 'PDHG')
@_docstring_wrapper
def _pdhg(F, G, A, Astar, b, x0, y0=None, step_p=1.0, step_d=1.0,
reltol=1e-6, abstol=1e-10, maxits=10000,
moreinfo=False, printrate=100, xstar=None):
"""Solve: argmin_x ( F(x) + G(A(x) - b) ) using PDHG.
PDHG stands for Primal Dual Hybrid Gradient.
Notes
-----
The basic algorithm is described in [1]_, along with a description of how
this algorithm relates to similar algorithms like ADMM and linearized ADMM.
References
----------
.. [1] E. Esser, X. Zhang, and T. F. Chan, "A General Framework for a
Class of First Order Primal-Dual Algorithms for Convex Optimization in
Imaging Science," SIAM Journal on Imaging Sciences, vol. 3, no. 4, pp.
1015-1046, Jan. 2010.
"""
proxF = F.prox
Fconj = F.conjugate
Gconj = G.conjugate
dualproxG = Gconj.prox
if y0 is None:
y0 = np.zeros_like(A(x0))
pstep = float(step_p)
dstep = float(step_d)
if moreinfo:
histdtype = [('it', np.int32), ('val', np.float64),
('step_p', np.float64), ('step_d', np.float64),
('resid_p', np.float64), ('thresh_p', np.float64),
('resid_d', np.float64), ('thresh_d', np.float64),
('err', np.float64)]
hist = np.zeros((maxits - 1)//printrate + 1, dtype=histdtype)
ptheta = 1
dtheta = 0
x = x0
Ax = A(x)
y = y0
Asy = Astar(y)
y_old = y
Asy_old = Asy
tolnorm = l2norm
pabstol = abstol*tolnorm(np.ones_like(x0))
dabstol = abstol*tolnorm(np.ones_like(y0))
bts = 0
for k in range(maxits):
while True:
# primal update
# acceleration
ybar = y + dtheta*(y - y_old)
Asybar = Asy + dtheta*(Asy - Asy_old)
# gradient descent step wrt Lagrange multiplier term
xhat = x - pstep*Asybar
# prox step
x_new = proxF(xhat, pstep)
Ax_new = A(x_new)
break
# if backtrack is None:
# break
# else:
# Axmb = Ax_new - b
# plhs = G(Axmb) + Gconj(ybar) - np.vdot(ybar, Axmb).real
# prhs = l2normsqhalf(x_new - x)/pstep
# if plhs <= prhs:
# break
# else:
# print(plhs, prhs)
# # backtrack
# pstep = pstep*backtrack
while True:
# dual update
# acceleration
xbar = x_new + ptheta*(x_new - x)
Axbar = Ax_new + ptheta*(Ax_new - Ax)
# gradient ascent step wrt Lagrange multiplier term
yhat = y + dstep*(Axbar - b)
# prox step
y_new = dualproxG(yhat, dstep)
Asy_new = Astar(y_new)
break
# if backtrack is None:
# break
# else:
# dlhs = Fconj(-Asy_new) + F(xbar) - np.vdot(xbar, -Asy_new).real
# drhs = l2normsqhalf(y_new - y)/dstep
# if dlhs <= drhs:
# break
# else:
# print(dlhs, drhs)
# # backtrack
# dstep = dstep*backtrack
# calculate residuals
p = (x - x_new)/pstep - (Asy - Asy_new) - dtheta*(Asy - Asy_old)
d = (y - y_new)/dstep - ptheta*(Ax - Ax_new)
# update state variables for which we had to track previous values
x = x_new
Ax = Ax_new
y_old = y
Asy_old = Asy
y = y_new
Asy = Asy_new
# norms for convergence check
pnorm = tolnorm(p)
dnorm = tolnorm(d)
xnorm = tolnorm(x)
Asynorm = tolnorm(Asy)
ynorm = tolnorm(y)
Axnorm = tolnorm(Ax)
pstopthresh = pabstol + reltol*max(xnorm/pstep, Asynorm)
dstopthresh = dabstol + reltol*max(ynorm/dstep, Axnorm)
if printrate is not None and (k % printrate) == 0:
val = float(F(x) + G(Ax - b))
dval = -Fconj(y) - Gconj(y) - np.vdot(y, b).real
dkt = dict(it=k, val=val, step_p=pstep, step_d=dstep,
resid_p=pnorm, thresh_p=pstopthresh,
resid_d=dnorm, thresh_d=dstopthresh)
print(('{it}: val={val:.5}, step_p={step_p:.4}, ' +
'step_d={step_d:.4}, ' +
'res_p={resid_p:.4} ({thresh_p:.3}), ' +
'res_d={resid_d:.4} ({thresh_d:.3})').format(**dkt))
if moreinfo:
if xstar is not None:
dkt['err'] = tolnorm(x_new - xstar)/tolnorm(xstar)
else:
dkt['err'] = np.nan
hist[k//printrate] = tuple(dkt[ky] for ky in hist.dtype.names)
# can't calculate dual function value, so best stopping criterion
# is to see if primal and dual residuals are small
if pnorm < pstopthresh and dnorm < dstopthresh:
break
# # penalty parameter adjustment
# if k < 100:
# if rnorm > residgap*snorm:
# pen = pen/penfactor
# # scaled dual variable u=y*pen, so update u with y constant
# u = u/penfactor
# Asu = Asu/penfactor
# Asu_old = Asu_old/penfactor
# elif snorm > residgap*rnorm:
# pen = pen*penfactor
# # scaled dual variable u=y*pen, so update u with y constant
# u = u*penfactor
# Asu = Asu*penfactor
# Asu_old = Asu_old*penfactor
#
# expand stepsize
# if backtrack is not None and dAx > 0:
# stepsize = dx*pen/dAx
# if expand is not None and backtrack is not None:
# pstep = pstep*expand
# dstep = dstep*expand
if printrate is not None:
if k + 1 >= maxits:
msg = 'Failed to converge'
else:
msg = 'Converged'
msg += ' after {0} iterations'.format(k + 1)
# if backtrack is not None:
# msg += ' (and {0} backtracks)'.format(bts)
print(msg)
if moreinfo:
return dict(x=x, y=y, numits=k+1, p=p, d=d, pstep=pstep, dstep=dstep,
hist=hist[:k//printrate])
else:
return x
class PDHG(_base.AffineOptArgsMixin, _base.BaseIterativeAlgorithm):
"""Class for the Primal Dual Hybrid Gradient (PDHG) algorithm.
{algorithm_description}
Attributes
----------
{algorithm_attributes}
See Also
--------
{algorithm_see_also}
Notes
-----
{algorithm_notes}
References
----------
{algorithm_references}
"""
_doc_algorithm_description = """
Primal Dual Hybrid Gradient (PDHG) solves convex optimization problems with
a split objective of the form ``F(x) + G(A(x) - b)``. It is often
non-trivial to accommodate a linear transformation when evaluating the prox
operator of most functions, so its explicit inclusion in the algorithm
itself can bring a large benefit. This implementation includes a adaptive
step sizes that help improve convergence.
"""
_doc_algorithm_self = ':class:`.PDHG`'
_doc_algorithm_see_also = ':class:`.ADMMLin`, :class:`.ProdGradAccel`'
_doc_algorithm_notes = """
The basic algorithm is described in [#EZC10]_, along with a description of
how this algorithm relates to similar algorithms like ADMM and linearized
ADMM.
"""
_doc_algorithm_references = """
.. [#EZC10] {EZC10}
"""
_doc_algorithm_objective_attributes = """
x_ : array_like
Value of the optimization variable, set after minimization has
converged through :meth:`minimize` or :meth:`self.alg.iterate`.
"""
_doc_initial_state_argument = """
state : dict
Initial state dictionary containing:
x : array_like
Initial value for the optimization variable.
y : array_like, optional
Initial value for `y`, the dual variable.
"""
_doc_algorithm_parameters = """
step_size_p : float, optional
Initial primal step size when backtracking is used, or constant primal
step size for all iterations when backtracking is not used. Must be
between 0 and ``penalty/opnorm(A)**2`` to guarantee convergence, where
``opnorm`` is the l2-induced operator norm of `A`.
step_size_d : float, optional
Initial dual step size when backtracking is used, or constant dual
step size for all iterations when backtracking is not used. Must be
between 0 and ``penalty/opnorm(A)**2`` to guarantee convergence, where
``opnorm`` is the l2-induced operator norm of `A`.
backtrack : float | ``None``, optional
Backtracking multiplication factor between 0 and 1 that decreases
the step size when the local Lipschitz condition is violated. If
``None``, no backtracking is used.
expand : float | ``None``, optional
Expansion multiplication factor greater than 1 that increases the
step size after every iteration. This allows the step size to adapt
to a decreasing local Lipschitz constant and improve convergence.
If ``None``, no expansion is used.
{algorithm_parameters}
"""
def __init__(
self, objective, step_size_p=1.0, step_size_d=1.0, backtrack=0.5,
expand=1.25, **kwargs
):
"""."""
super(PDHG, self).__init__(objective, **kwargs)
self.step_size_p = step_size_p
self.step_size_d = step_size_d
self.backtrack = backtrack
self.expand = expand
self.print_str = (
'{_iter}: val={_val:.5}, step_p={step_size_p:.4},'
' step_d={step_size_d:.4},'
'\n\tresid_p={_resid_p_nrm:.4} ({_resid_p_thresh:.3}),'
' resid_d={_resid_d_nrm:.4} ({_resid_d_thresh:.3})'
)
def validate_params(self):
"""."""
self.step_size_p = float(self.step_size_p)
if self.step_size_p <= 0:
raise ValueError('step_size_p must be positive')
self.step_size_d = float(self.step_size_d)
if self.step_size_d <= 0:
raise ValueError('step_size_d must be positive')
if self.backtrack is not None:
self.backtrack = float(self.backtrack)
if self.backtrack <= 0 or self.backtrack >= 1:
raise ValueError('backtrack must be None or between 0 and 1')
if self.expand is not None:
self.expand = float(self.expand)
if self.expand <= 1:
raise ValueError('expand must be None or greater than 1')
# check for Objective compatibility
for pname in ('F', 'proxF', 'G', 'proxGconj'): # , 'Fconj', 'Gconj'):
p = getattr(self.objective, pname)
if p is None or not callable(p):
errstr = 'self.objective.{0} must be set to a callable'
raise ValueError(errstr.format(pname))
return super(PDHG, self).validate_params()
def get_params(self, deep=True):
"""."""
params = super(PDHG, self).get_params(deep=deep)
params.update(
step_size=self.step_size, backtrack=self.backtrack,
expand=self.expand, penalty=self.penalty, resid_gap=self.resid_gap,
penalty_factor=self.penalty_factor, relax=self.relax,
)
return params
def prepare(self, state, A=None, Astar=None, b=None):
"""."""
return super(PDHG, self).prepare(state, A=A, Astar=Astar, b=b)
def minimize(self, state, A=None, Astar=None, b=None):
"""."""
return super(PDHG, self).minimize(state, A=A, Astar=Astar, b=b)
def iterate(self, state, A=None, Astar=None, b=None):
"""."""
# validate parameters and arguments
kwargs = self.prepare(state, A=A, Astar=Astar, b=b)
A, Astar, b = (kwargs['A'], kwargs['Astar'], kwargs['b'])
# get initial iterate value
x0 = state['x']
try:
y0 = state['y']
except KeyError:
y0 = np.zeros_like(A(x0))
# set absolute tolerance threshold based on taking the tolerance norm
# of a residual vector with all entries equal to abs_tol
abs_tol_p_thresh = self.abs_tol * self.tol_norm(np.ones_like(x0))
abs_tol_d_thresh = self.abs_tol * self.tol_norm(np.ones_like(y0))
theta_p = 1
theta_d = 0
self.backtrack = None
# initialize state
Axmb0 = A(x0) - b
z0 = Axmb0
Asy0 = Astar(y0)
self.objective.state_ = dict(
x=x0, Axmb=Axmb0, z=z0, y=y0, y_old=y0, Asy=Asy0, Asy_old=Asy0,
h=Axmb0-z0,
step_size_p=self.step_size_p, step_size_d=self.step_size_d,
_iter=0, _backtracks=0,
_resid_p=np.full_like(x0, np.inf), _resid_p_nrm=np.inf,
_resid_d=np.full_like(y0, np.inf), _resid_d_nrm=np.inf,
_resid_p_thresh=abs_tol_p_thresh, _resid_d_thresh=abs_tol_d_thresh,
)
# update with passed-in state
self.objective.state_.update(state)
yield self.objective.state_
s = self.objective.state_
for s['_iter'] in range(1, self.max_iter + 1):
# inner loop for backtracking line search
while True:
# primal update
# acceleration
ybar = (1 + theta_d) * s['y'] - theta_d * s['y_old']
Asybar = (1 + theta_d) * s['Asy'] - theta_d * s['Asy_old']
# gradient descent step wrt Lagrange multiplier term
xhat = s['x'] - s['step_size_p'] * Asybar
# prox step
x = self.objective.proxF(xhat, s['step_size_p'])
Axmb = A(x) - b
if self.backtrack is None:
# no backtracking specified
break
else:
plhs = (
self.objective.G(Axmb) + self.objective.Gconj(ybar)
- np.vdot(ybar, Axmb).real
)
prhs = l2normsqhalf(x - s['x']) / s['step_size_p']
# test Lipschitz bound, don't need to backtrack if it holds
if plhs <= prhs:
break
else:
# backtrack
s['step_size_p'] = s['step_size_p'] * self.backtrack
s['_backtracks'] += 1
# inner loop for backtracking line search
while True:
# dual update
# acceleration
xbar = (1 + theta_p) * x - theta_p * s['x']
Axmbbar = (1 + theta_p) * Axmb - theta_p * s['Axmb']
# gradient descent step wrt Lagrange multiplier term
yhat = s['y'] - s['step_size_d'] * Axmbbar
# prox step
y = self.objective.proxGconj(yhat, s['step_size_d'])
Asy = Astar(y)
if self.backtrack is None:
# no backtracking specified
break
else:
dlhs = (
self.objective.Fconj(-Asy) + self.objective.F(xbar)
- np.vdot(xbar, -Asy).real
)
drhs = l2normsqhalf(y - s['y']) / s['step_size_d']
# test Lipschitz bound, don't need to backtrack if it holds
if dlhs <= drhs:
break
else:
# backtrack
s['step_size_d'] = s['step_size_d'] * self.backtrack
s['_backtracks'] += 1
# calculate residuals
resid_p = (
(s['x'] - x) / s['step_size_p'] - (s['Asy'] - Asy)
- theta_d * (s['Asy'] - s['Asy_old'])
)
resid_d = (
(s['y'] - y) / s['step_size_d'] - theta_p * (s['Axmb'] - Axmb)
)
# norms for convergence check
resid_p_nrm = self.tol_norm(resid_p)
resid_d_nrm = self.tol_norm(resid_d)
x_nrm = self.tol_norm(x)
Axmb_nrm = self.tol_norm(Axmb)
y_nrm = self.tol_norm(y)
Asy_nrm = self.tol_norm(Asy)
# thresholds for convergence check
resid_p_thresh = (
abs_tol_p_thresh
+ self.rel_tol * min(x_nrm / s['step_size_p'], Asy_nrm)
)
resid_d_thresh = (
abs_tol_d_thresh
+ self.rel_tol * min(y_nrm / s['step_size_d'], Axmb_nrm)
)
# update state variables
s['x'] = x
s['Axmb'] = Axmb
s['y_old'] = s['y']
s['y'] = y
s['Asy_old'] = s['Asy']
s['Asy'] = Asy
s['z'] = Axmb
s['h'] = resid_p
# update informational state variables
s['_resid_p'] = resid_p
s['_resid_d'] = resid_d
s['_resid_p_nrm'] = resid_p_nrm
s['_resid_d_nrm'] = resid_d_nrm
s['_resid_p_thresh'] = resid_p_thresh
s['_resid_d_thresh'] = resid_d_thresh
# yield state at this iteration step
yield s
# check for convergence
# can't calculate dual function value, so best stopping criterion
# is to see if primal and dual feasibility residuals are small
resid_p_ratio = resid_p_nrm / resid_p_thresh
resid_d_ratio = resid_d_nrm / resid_d_thresh
if resid_p_ratio < 1 and resid_d_ratio < 1:
break
# expand stepsize
if self.expand is not None and self.backtrack is not None:
s['step_size_p'] = s['step_size_p'] * self.expand
s['step_size_d'] = s['step_size_d'] * self.expand
# iterations have converged, store the resulting iterate
self.objective.x_ = s['x']
|
|
from collections import defaultdict
from .._compat import PY2, with_metaclass, iteritems, to_unicode, to_bytes, \
string_types
from .._gae import gae
from ..helpers._internals import Dispatcher
from ..helpers.regex import REGEX_TYPE
representers = Dispatcher("representer")
class for_type(object):
def __init__(self, field_type, encode=False, adapt=True):
self.field_type = field_type
self.encode = encode
self.adapt = adapt
def __call__(self, f):
self.f = f
return self
class before_type(object):
def __init__(self, field_type):
self.field_type = field_type
def __call__(self, f):
self.f = f
return self
class for_instance(object):
def __init__(self, inst_type, repr_type=False):
self.inst_type = inst_type
self.repr_type = repr_type
def __call__(self, f):
self.f = f
return self
class pre(object):
_inst_count_ = 0
def __init__(self, is_breaking=None):
self.breaking = is_breaking
self._inst_count_ = pre._inst_count_
pre._inst_count_ += 1
def __call__(self, f):
self.f = f
return self
class MetaRepresenter(type):
def __new__(cls, name, bases, attrs):
new_class = type.__new__(cls, name, bases, attrs)
if bases == (object,):
return new_class
#: collect declared attributes
trepresenters = {}
irepresenters = {}
tbefore = {}
pres = {}
for key, value in list(attrs.items()):
if isinstance(value, for_type):
trepresenters[key] = value
elif isinstance(value, before_type):
tbefore[key] = value
elif isinstance(value, for_instance):
irepresenters[key] = value
elif isinstance(value, pre):
pres[key] = value
#: get super declared attributes
declared_trepresenters = {}
declared_irepresenters = {}
declared_tbefore = {}
declared_pres = {}
for base in reversed(new_class.__mro__[1:]):
if hasattr(base, '_declared_trepresenters_'):
declared_trepresenters.update(base._declared_trepresenters_)
if hasattr(base, '_declared_irepresenters_'):
declared_irepresenters.update(base._declared_irepresenters_)
if hasattr(base, '_declared_tbefore_'):
declared_tbefore.update(base._declared_tbefore_)
if hasattr(base, '_declared_pres_'):
declared_pres.update(base._declared_pres_)
#: set trepresenters
declared_trepresenters.update(trepresenters)
declared_irepresenters.update(irepresenters)
declared_tbefore.update(tbefore)
declared_pres.update(pres)
new_class._declared_trepresenters_ = declared_trepresenters
new_class._declared_irepresenters_ = declared_irepresenters
new_class._declared_tbefore_ = declared_tbefore
new_class._declared_pres_ = declared_pres
return new_class
class TReprMethodWrapper(object):
def __init__(self, representer, obj, extra=None):
self.representer = representer
self.obj = obj
if extra:
self.extra = extra
self.call = self._call_with_extras
else:
self.call = self._call
if self.obj.encode and PY2:
self.inner_call = self._inner_call_with_encode
else:
self.inner_call = self._inner_call
if self.obj.adapt:
self.adapt = self._adapt
else:
self.adapt = self._no_adapt
def _adapt(self, value):
return self.representer.adapt(value)
def _no_adapt(self, value):
return value
def _inner_call(self, value, **kwargs):
return self.obj.f(self.representer, value, **kwargs)
def _inner_call_with_encode(self, value, **kwargs):
if isinstance(value, unicode):
value = value.encode(self.representer.adapter.db_codec)
return self.obj.f(self.representer, value, **kwargs)
def _call_with_extras(self, value, field_type):
extras = self.extra(self.representer, field_type)
return self.inner_call(value, **extras)
def _call(self, value, field_type):
return self.inner_call(value)
def __call__(self, value, field_type):
return self.adapt(self.call(value, field_type))
class IReprMethodWrapper(object):
def __init__(self, representer, obj):
self.representer = representer
self.obj = obj
def __call__(self, value, field_type):
rv = self.obj.f(self.representer, value, field_type)
return self.obj.repr_type, rv
class PreMethodWrapper(object):
def __init__(self, representer, obj):
self.representer = representer
self.obj = obj
if self.obj.breaking is None:
self.call = self._call_autobreak
elif self.obj.breaking == True:
self.call = self._call_break
else:
self.call = self._call_nobreak
def _call_autobreak(self, value, field_type):
rv = self.obj.f(self.representer, value, field_type)
if rv is not None:
return True, rv
return False, value
def _call_break(self, value, field_type):
return self.obj.f(
self.representer, value, field_type)
def _call_nobreak(self, value, field_type):
return False, self.obj.f(self.representer, value, field_type)
def __call__(self, value, field_type):
return self.call(value, field_type)
class Representer(with_metaclass(MetaRepresenter)):
def __init__(self, adapter):
self.adapter = adapter
self.dialect = adapter.dialect
self._tbefore_registry_ = {}
for name, obj in iteritems(self._declared_tbefore_):
self._tbefore_registry_[obj.field_type] = obj.f
self.registered_t = defaultdict(lambda self=self: self._default)
for name, obj in iteritems(self._declared_trepresenters_):
if obj.field_type in self._tbefore_registry_:
self.registered_t[obj.field_type] = TReprMethodWrapper(
self, obj, self._tbefore_registry_[obj.field_type]
)
else:
self.registered_t[obj.field_type] = TReprMethodWrapper(
self, obj
)
self.registered_i = {}
for name, obj in iteritems(self._declared_irepresenters_):
self.registered_i[obj.inst_type] = IReprMethodWrapper(self, obj)
self._pre_registry_ = []
pres = []
for name, obj in iteritems(self._declared_pres_):
pres.append(obj)
pres.sort(key=lambda x: x._inst_count_)
for pre in pres:
self._pre_registry_.append(PreMethodWrapper(self, pre))
def _default(self, value, field_type):
return self.adapt(value)
def _default_instance(self, value, field_type):
return True, value
def get_representer_for_instance(self, value):
for inst, representer in iteritems(self.registered_i):
if isinstance(value, inst):
return representer
return self._default_instance
def get_representer_for_type(self, field_type):
key = REGEX_TYPE.match(field_type).group(0)
return self.registered_t[key]
def adapt(self, value):
if PY2:
if not isinstance(value, string_types):
value = str(value)
value = to_bytes(value)
try:
value.decode(self.adapter.db_codec)
except:
value = value.decode('latin1').encode(self.adapter.db_codec)
else:
value = to_unicode(value)
return self.adapter.adapt(value)
def exceptions(self, value, field_type):
return None
def represent(self, value, field_type):
pre_end = False
for pre in self._pre_registry_:
pre_end, value = pre(value, field_type)
if pre_end:
break
if pre_end:
return value
repr_type, rv = self.get_representer_for_instance(value)(
value, field_type)
if repr_type:
rv = self.get_representer_for_type(field_type)(rv, field_type)
return rv
from .base import BaseRepresenter, SQLRepresenter, NoSQLRepresenter
from .sqlite import SQLiteRepresenter, SpatialiteRepresenter
from .postgre import PostgreRepresenter
from .mysql import MySQLRepresenter
from .mssql import MSSQLRepresenter
from .mongo import MongoRepresenter
from .db2 import DB2Representer
from .informix import InformixRepresenter
from .oracle import OracleRepresenter
from .couchdb import CouchDBRepresenter
if gae is not None:
from .google import GoogleDatastoreRepresenter
|
|
#!/usr/bin/env python2.7
import h5py, os, sys
from fast5tools.f5class import *
from fast5tools.f5ops import *
from fast5tools.helperops import process_outdir, process_filesused
from fast5tools.plotops import qualhist
import argparse
from glob import glob
from collections import defaultdict
import numpy as np
import matplotlib
## may need following line for remote jobs (e.g. submitting batch scripts)
## matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
import matplotlib.pyplot as plt
from random import shuffle, seed
#################################################
## Argument Parser
#################################################
parser = argparse.ArgumentParser(description = """
Given path(s) to fast5 file(s) and/or directories of fast5s,
give histogram of base qualities encountered.
John Urban (2015, 2016, 2017, 2018)
""", formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('fast5', metavar='fast5', nargs='+',
type= str,
help='''Paths to as many fast5 files and/or directories filled with fast5 files as you want.
Assumes all fast5 files have '.fast5' extension.
If inside dir of dirs with .fast5 files, then can just do "*" to get all files from all dirs.''')
parser.add_argument('-Q', '--mean-quality-scores', dest='mean_quality_scores', default=False, action='store_true',
help='''Instead of extracting all base-qualities from each read, only extract its mean quality score (Q).''')
parser.add_argument('-B', '--bin-range', type=str, dest='bin_range', default=None,
help='''To be consistent between plots, the same bins should be populated. Give comma-sep trio of integers describing minRange, maxRange, step_size.
If all base qualities, default: 0,32,1
If obly mean qualities, default: 0,16,1''')
parser.add_argument('-D', '--density', action='store_true', default=False,
help='''Plot density instead of frequency.''')
parser.add_argument('-C', '--cumulative', action='store_true', default=False,
help='''Plot cumulative distribution instead of regular.''')
parser.add_argument('-N', '--no-plot', dest='no_plot', action='store_true', default=False,
help='''Instead of plotting, just print to stdout as 2-columns of text: qual_score, frequency/density/etc.
Recommend to use with --filename and making the --filename end with .txt, not an image extension.''')
##parser.add_argument('--bin-width', dest='bin_width', default=1000, type=int, help='''The width of bins (default: 1000 bp).''')
##parser.add_argument('-z', '--zscores', default=False, action='store_true', help='''For each read, its quals are converted to z-scores such that each position's score is relative to the distribution of qual scores on that read.
##This prevents issues where there appears to be a drop off in quality as reads get longer that arises simply b/c there are some long reads with low quality across the entire read.
##In that sceario, using Z-scores shows that the quality does not drop as the read gets longer.
##Can also filter reads for mean quality score before plotting to prevent this effect.''')
##
##parser.add_argument('-Z', '--robust-zscores', dest='robust_zscores', default=False, action='store_true', help='''Same as --zscores, only Median and MAD used here.''')
parser.add_argument('-n', '--nfiles', type=int, default=100,
help = '''Often when this command is run, looking at 100 files will suffice.
Therefore, this defaults to 1, and looks at the first file in the FileList. Aim this script at a specific file for that file's contents.
Adjust this number to get info from the first N files.
Use --random to select N at random from the list.
Set this to super high number for all fast5files -- e.g. 1000000000000''')
parser.add_argument('-R', '--random', action='store_true', default=False,
help = '''Randomize what files are looked at.''')
parser.add_argument('-S', '--randomseed', type=int, default=False,
help = '''Randomize what files are looked at, but use given seed for reproducibility.''')
parser.add_argument('-o', '--outdir', type=str, default="./",
help = '''....''')
parser.add_argument('--filename', type=str, default=None, help='''This will use the extension given to determine outputfile type.
Examples: qualhist.jpg, qualhist.png, qualhist.png.
Default: None
Default will have a window pop up with the plot instead. The file can be saved from there too.
If you choose this option, the filesused will be reported to stderr.
If a filename is given, filesused will be reported in a similarly named file ending with .filesused.fofn''')
parser.add_argument('--filesused', type=str, default='qual_v_pos', help='''
''')
parser.add_argument('-r', '--readtype', default="molecule",
type= str,
help='''Choose type of fasta to get.
Choices: 'template', 'complement', '2d', 'molecule', 'all', 'MoleQual'.
Default: molecule.
There is no need to write full word for options - can do: t, c, 2, m, a, M.
Molecule returns single fasta for each fast5 by following rules:
if 2d present, return 2d.
elif complement present with no 2d, return longer of template or complement.
elif only template present, return template.
'MoleQual' is similar to molecule.
It differs only in choosing between template and complement when a 2D is not present.
Instead of choosing the longer one, it chooses the one with a higher quality mean quality score.''')
parser.add_argument('--minlen', type=int, default=0, help='''Only report reads >= minlen. Default: 0 bp.''')
parser.add_argument('--maxlen', type=int, default=int(3e9), help='''Only report reads <= maxlen. Default: 3 billion bp.''')
parser.add_argument('--minq', type=float, default=0, help='''Only report reads with mean quality scores >= Q. Default: 0.''')
parser.add_argument('--maxq', type=float, default=int(10e3), help='''Only report reads with mean quality scores <= Q.
Default: 10000 (this is orders of magnitude higher than normal max which are always < 20)''')
parser.add_argument('--notarlite', action='store_true', default=False, help=''' The default methof (called tarlite) extracts 1 file from a given tarchive at a time, processes, and deletes it.
This options says to turn tarlite off resulting in extracting entire tarchive before proceeding (and finally deleting).
It is possible that --notarlite is faster, but at the expense of exceeding file number limits or disk storage quotas.
Nonetheless, the difference in speed is a lot smaller than the difference in space needed.
For example, not using tarlite will require >2*tarchive amount of disk space (i.e. the tar.gz and its extracted contents).
The tarlite method only requires the disk space already taken by the tarchive and enough for 1 additional file at a time.
A corollary is that tarlite just needs to be allowed to form 1 (or a few) files compared to what could be thousands to millions.
''')
parser.add_argument('--tarlite', action='store_true', default=False, help='''This legacy option is outdated.
However, it is kept here to avoid breaking pipelines that make use of it.
The tarlite approach is now default. Specifying this will not change that default behavior.
It will just prevent pipelines from breaking.
However, not specifying this will still also result in the tarlite approach.
Use --notarlite to turn it off.''')
args = parser.parse_args()
#################################################
## This can be done from f5stats
#################################################
## qual v readlen scatter
##if read_lengths:
## logger.info("Constructing quality vs. readlen scatter plot...")
## plt.scatter(read_lengths, mean_scores)
## plt.xlabel("Read Length (bp)")
## plt.ylabel("Mean Quality score")
## plot_out(args, "meanqual-Vs-readlen")
## plt.close()
##else: logger.warn("No reads that meet criteria: cannot construct quality vs. readlen scatter plot...")
## if args.qualVsLen:
## x = list()
## y = list()
## elif args.qualVsLen:
## qscores = []
## for q in fq.qual:
## qscores.append(ord(q)-33)
## qmean = np.mean(qscores)
## qLen = len(qscores)
## x.append(qLen)
## y.append(qmean)
##
## else:
## for q in fq.qual:
## ctr += 1
## qualpos[1+int(ctr//bin_width)].append(ord(q)-33)
##
#################################################
##
#################################################
if __name__ == "__main__":
# Process Args
args.outdir = process_outdir(args.outdir)
outfile = args.outdir + args.filename if (args.filename is not None) else None
if args.bin_range is None:
args.bin_range = '0,16,1' if args.mean_quality_scores else '0,32,1'
minrange, maxrange, step = (int(e) for e in args.bin_range.split(','))
# Initialize
quals = list()
filesused = ''
## Iterate over fast5s
for f5 in Fast5List(args.fast5, keep_tar_footprint_small=(not args.notarlite), filemode='r', downsample=args.nfiles, random=args.random, randomseed=args.randomseed):
if meets_all_criteria(f5, args.readtype, args.minlen, args.maxlen, args.minq, args.maxq):
filesused += f5.abspath + '\n'
readtype = define_read_type(f5, args.readtype)
if args.mean_quality_scores:
quals.append( f5.get_mean_qscore(readtype) )
else:
## TODO: make a direct way in f5class to get intquals (even if this terrible way)
quals += [int(e) for e in (' '.join(f5.get_quals_as_int(readtype).split('\n')[1:])).split()]
## Plot
qualhist(quals, filename=outfile, minrange=minrange, maxrange=maxrange, step=step, density=args.density, cumulative=args.cumulative, text_only=args.no_plot)
## Files used
process_filesused(trigger=args.filename, filesused=filesused, outdir=args.outdir)
|
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
from re import compile, escape, IGNORECASE, sub
from os.path import splitext
from ..scraper import _BasicScraper, _ParserScraper
from ..helpers import indirectStarter, bounceStarter
from ..util import tagre, getPageContent
class SabrinaOnline(_BasicScraper):
url = 'http://sabrina-online.com/'
imageSearch = compile(tagre("a", "href", r'(strips/[^"]*)'))
prevSearch = compile(tagre("a", "href", r"(\d\d\d\d-\d\d.html)") +
tagre("img", "src", "b_back.gif"))
help = 'Index format: n (unpadded)'
adult = True
multipleImagesPerStrip = True
@classmethod
def starter(cls):
"""Pick last one in a list of archive pages."""
archive = cls.url + 'archive.html'
data = getPageContent(archive, cls.session)
search = compile(tagre("a", "href", r"(\d\d\d\d-\d\d.html)"))
archivepages = search.findall(data)
return cls.url + archivepages[-1]
class SafelyEndangered(_BasicScraper):
url = 'http://www.safelyendangered.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % 'ignored'
imageSearch = compile(tagre("img", "src", r'(http://www\.safelyendangered\.com/wp-content/uploads/\d+/\d+/[^"]+\.[a-z]+).*'))
prevSearch = compile(tagre("a", "href", r'([^"]+)', after="navi navi-prev"))
textSearch = compile(tagre("img", "title", r'([^"]+)', before=r'http://www\.safelyendangered\.com/wp-content/uploads'))
help = 'Index format: yyyy/mm/stripname'
class SamAndFuzzy(_BasicScraper):
url = 'http://www.samandfuzzy.com/'
stripUrl = 'http://samandfuzzy.com/%s'
firstStripUrl = stripUrl % '1'
imageSearch = compile(r'(/comics/.+?)" alt')
prevSearch = compile(r'"><a href="(.+?)"><img src="imgint/nav_prev.gif"')
help = 'Index format: nnnn'
class SandraOnTheRocks(_BasicScraper):
url = 'http://www.sandraontherocks.com/'
stripUrl = url + 'strips-sotr/%s'
firstStripUrl = stripUrl % 'start_by_running'
imageSearch = compile(tagre("img", "src", r'([^"]*/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'([^"]*/strips-sotr/[^"]+)', before="cn[id]prev"))
help = 'Index format: name'
class ScandinaviaAndTheWorld(_ParserScraper):
url = 'http://satwcomic.com/'
stripUrl = url + '%s'
firstStripUrl = stripUrl % 'sweden-denmark-and-norway'
starter = indirectStarter(url, '//a[text()="View latest comic"]')
imageSearch = '//img[@itemprop="image"]'
prevSearch = '//a[@accesskey="p"]'
textSearch = '//span[@itemprop="articleBody"]'
help = 'Index format: stripname'
class ScaryGoRound(_BasicScraper):
url = 'http://www.scarygoround.com/'
stripUrl = url + '?date=%s'
firstStripUrl = stripUrl % '20090918'
imageSearch = compile(tagre("img", "src", r'(strips/\d+\.png)'))
prevSearch = compile(tagre("a", "href", r'(\?date=\d+)') + "Previous")
help = 'Index format: n (unpadded)'
class ScenesFromAMultiverse(_BasicScraper):
url = 'http://amultiverse.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2010/06/14/parenthood'
imageSearch = (
compile(tagre("div", "id", "comic") + r"\s*" +
tagre("img", "src", r'(.*amultiverse.com/wp-content/uploads/\d+/\d+/[^"]+)')),
compile(tagre("div", "id", "comic") + r"\s*" + tagre("a", "href", r'[^"]*') +
tagre("img", "src", r'(.*amultiverse.com/wp-content/uploads/\d+/\d+/[^"]+)')),
)
prevSearch = compile(tagre("a", "href", r'(%scomic/\d+\d+/\d+/\d+/[^"]+)' % rurl, after="prev"))
help = 'Index format: yyyy/mm/dd/stripname'
class SchlockMercenary(_BasicScraper):
url = 'http://www.schlockmercenary.com/'
stripUrl = url + '%s'
firstStripUrl = stripUrl % '2000-06-12'
imageSearch = compile(tagre("img", "src", r'(http://static\.schlockmercenary\.com/comics/[^"]+)'))
multipleImagesPerStrip = True
prevSearch = compile(tagre("a", "href", r'(/\d+-\d+-\d+)', quote="'", after="nav-previous"))
help = 'Index format: yyyy-mm-dd'
class SchoolBites(_BasicScraper):
url = 'http://schoolbites.net/'
stripUrl = url + 'd/%s.html'
imageSearch = compile(tagre("img", "src", r'(http://cdn\.schoolbites\.net/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(http://schoolbites\.net/d/\d+\.html)', after="prev"))
help = 'Index format: yyyymmdd'
class Schuelert(_BasicScraper):
url = 'http://www.schuelert.de/'
rurl = escape(url)
stripUrl = url + 'index.php?paged=%s'
firstStripUrl = stripUrl % '5'
imageSearch = compile(tagre("img", "src", r"(%swp-content/[^']+)" % rurl, quote="'"))
prevSearch = compile(tagre("a", "href", r'(%sindex\.php\?paged=\d+)' % rurl) + "«")
multipleImagesPerStrip = True
help = 'Index format: none'
lang = 'de'
class Science(_BasicScraper):
url = 'http://sci-ence.org/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % 'periodic-table-element-ass'
prevSearch = compile(tagre("a", "href", r'(%s[^"]+/)' % rurl, after="prev"))
imageSearch = compile(tagre("img", "src", r'(%scomics/\d+-\d+-\d+[^"]+)' % rurl))
help = 'Index format: stripname'
class SequentialArt(_BasicScraper):
url = 'http://www.collectedcurios.com/sequentialart.php'
stripUrl = url + '?s=%s'
firstStripUrl = stripUrl % '1'
imageSearch = compile(tagre("img", "src", r'([^"]+)', before="strip"))
prevSearch = compile(tagre("a", "href", r'(/sequentialart\.php\?s=\d+)')
+ tagre("img", "src", "Nav_BackOne\.gif"))
help = 'Index format: name'
class SexyLosers(_BasicScraper):
adult = True
url = 'http://www.sexylosers.com/'
stripUrl = url + '%s.html'
imageSearch = compile(r'<img src\s*=\s*"\s*(comics/[\w\.]+?)"', IGNORECASE)
prevSearch = compile(r'<a href="(/\d{3}\.\w+?)"><font color = FFAAAA><<', IGNORECASE)
help = 'Index format: nnn'
starter = indirectStarter(url,
compile(r'SEXY LOSERS <A HREF="(.+?)">Latest SL Comic \(#\d+\)</A>', IGNORECASE))
@classmethod
def namer(cls, imageUrl, pageUrl):
index = pageUrl.split('/')[-1].split('.')[0]
title = imageUrl.split('/')[-1].split('.')[0]
return index + '-' + title
# XXX site has been hacked
class _ShadowGirls(_BasicScraper):
url = 'http://www.shadowgirlscomic.com/'
stripUrl = url + 'comics/%s'
firstStripUrl = stripUrl % 'book-1/chapter-1-broken-dreams/welcome'
imageSearch = compile(tagre("img", "src", r'([^"]*/comics/[^"]*)'))
prevSearch = compile(tagre("a", "href", r'([^"]*)', after='navi-prev'))
help = 'Index format: custom'
starter = indirectStarter(url, compile(tagre("a", "href", r'([^"]*/comics/[^"]+)')))
class Sheldon(_BasicScraper):
url = 'http://www.sheldoncomics.com/'
rurl = escape(url)
stripUrl = url + 'archive/%s.html'
firstStripUrl = stripUrl % '011130'
imageSearch = compile(tagre("img", "src", r'(http://cdn\.sheldoncomics\.com/strips/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(%sarchive/\d+\.html)' % rurl, after="sidenav-prev"))
help = 'Index format: yymmdd'
class ShermansLagoon(_BasicScraper):
url = 'http://shermanslagoon.com/'
stripUrl = url + 'comics/%s'
firstStripUrl = stripUrl % '/december-29-2003/'
imageSearch = compile(tagre("img", "src", r'(http://safr\.kingfeatures\.com/idn/(etv|cnfeed)/zone/(xml|js)/content\.php\?file=.+?)'))
prevSearch = compile(r'id="previouscomic" class="button white"><a href="(%scomics/[a-z0-9-]+/)"' % url)
help = 'Index format: monthname-day-year'
@classmethod
def namer(cls, imageUrl, pageUrl):
name = pageUrl.rsplit('/', 3)[2]
if name == "shermanslagoon.com":
import datetime
name = datetime.date.today().strftime("%B-%d-%Y").lower()
# name is monthname-day-year
month, day, year = name.split('-')
return "%s-%s-%s" % (year, month, day)
class Shivae(_BasicScraper):
url = 'http://shivae.net/'
rurl = escape(url)
stripUrl = url + 'blog/%s/'
firstStripUrl = stripUrl % '2007/09/21/09212007'
imageSearch = compile(tagre("img", "src", r'(%swp-content/blogs\.dir/\d+/files/\d+/\d+/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%sblog/[^"]+)' % rurl, after="navi-prev"))
help = 'Index format: yyyy/mm/dd/stripname'
# XXX disallowed by robots.txt
class _Shortpacked(_BasicScraper):
url = 'http://www.shortpacked.com/'
rurl = escape(url)
stripUrl = url + '%s/'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\d+/comic/[^"]+)' % rurl, after="prev"))
help = 'Index format: yyyy/comic/book-nn/mm-name1/name2'
class ShotgunShuffle(_BasicScraper):
url = 'http://shotgunshuffle.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % 'pilot/'
imageSearch = compile(tagre("img", "src", r'(http://shotgunshuffle.com/wp-content/uploads/\d+/\d+/\d+-[^"]+)'))
prevSearch = compile(tagre("a", "href", r'([^"]+)', after="navi navi-prev"))
help = 'Index format: stripname'
class SinFest(_BasicScraper):
name = 'KeenSpot/SinFest'
url = 'http://www.sinfest.net/'
stripUrl = url + 'view.php?date=%s'
imageSearch = compile(tagre("img","src", r'(btphp/comics/.+)', after="alt"))
prevSearch = compile(tagre("a", "href", r'(view\.php\?date=.+)') + '\\s*' + tagre("img", "src", r'\.\./images/prev\.gif'))
help = 'Index format: yyyy-mm-dd'
# XXX disallowed by robots.txt
class _Sketchesnatched(_BasicScraper):
url = 'http://sketchesnatched.blogspot.com/'
stripUrl = url + 'search?updated-max=%s%%2B01:00&max-results=1'
firstStripUrl = stripUrl % '2011-01-27T08:32:00'
imageSearch = compile(tagre("meta", "content", r"(http://\d+\.bp\.blogspot\.com/[^']+)",
after=r'image_url', quote="'"))
prevSearch = compile(tagre("a", "href", r"(http://sketchesnatched\.blogspot\.[a-z]+/search[^']+)",
before=r"blog-pager-older-link", quote="'"))
help = 'Index format: yyyy-mm-ddThh:mm:ss'
class SkinDeep(_BasicScraper):
url = 'http://www.skindeepcomic.com/'
stripUrl = url + 'archive/%s/'
imageSearch = compile(r'<span class="webcomic-object[^>]*><img src="([^"]*)"')
prevSearch = compile(tagre("a", "href", r'([^"]+)', after="previous-webcomic-link"))
help = 'Index format: custom'
class SluggyFreelance(_BasicScraper):
url = 'http://www.sluggy.com/'
stripUrl = url + 'comics/archives/daily/%s'
imageSearch = compile(r'<img src="(/images/comics/.+?)"')
prevSearch = compile(r'<a href="(.+?)"[^>]+?><span class="ui-icon ui-icon-seek-prev">')
multipleImagesPerStrip = True
help = 'Index format: yymmdd'
class SMBC(_ParserScraper):
url = 'http://www.smbc-comics.com/'
rurl = escape(url)
stripUrl = url + '?id=%s'
firstStripUrl = stripUrl % '1'
imageSearch = '//img[@id="comic"]'
prevSearch = '//a[@class="prev"]'
help = 'Index format: nnnn'
@classmethod
def namer(cls, imageUrl, pageUrl):
"""Remove random noise from name."""
return imageUrl.rsplit('-', 1)[-1]
def shouldSkipUrl(self, url, data):
"""Skip promo or missing update pages."""
return url in (
self.stripUrl % '2865',
self.stripUrl % '2653',
self.stripUrl % '2424',
self.stripUrl % '2226',
self.stripUrl % '2069',
self.stripUrl % '1895',
self.stripUrl % '1896',
self.stripUrl % '1589',
)
class SnowFlakes(_BasicScraper):
url = 'http://www.snowflakescomic.com/'
stripUrl = url + '?id=%s&sl=%s'
firstStripUrl = stripUrl % ('103', '1')
endOfLife = True
imageSearch = (
compile(tagre("img", "src", r'(comics/[^"]+)')),
compile(tagre("img", "src", r'(http://www.snowflakescomic.com/comics/[^"]+)')),
)
prevSearch = compile(tagre("a", "href", r'(/\?id=\d+\&sl=\d)', quote="") +
tagre("img", "src", r'images/nav_prior-ON\.gif'))
help = 'Index format: number'
@classmethod
def starter(cls):
return cls.stripUrl % ('530', '5')
def getIndexStripUrl(self, index):
return self.stripUrl % (index, index[0])
@classmethod
def namer(cls, imageUrl, pageUrl):
"""Use strip index number for image name."""
index = int(compile(r'id=(\d+)').search(pageUrl).group(1))
ext = imageUrl.rsplit('.', 1)[1]
return "SnowFlakes-%d.%s" % (index, ext)
def shouldSkipUrl(self, url, data):
"""Skip pages without images."""
return url in (
self.stripUrl % ('279', '2'), # no comic
self.stripUrl % ('278', '2'), # no comic
self.stripUrl % ('277', '2'), # no comic
self.stripUrl % ('276', '2'), # no comic
self.stripUrl % ('275', '2'), # no comic
self.stripUrl % ('214', '2'), # no comic
)
class SnowFlame(_BasicScraper):
url = 'http://www.snowflamecomic.com/'
rurl = escape(url)
stripUrl = url + '?comic=snowflame-%s-%s'
firstStripUrl = stripUrl % ('01', '01')
imageSearch = compile(tagre("img", "src", r'(%swp-content/uploads/\d+/\d+/[^"]+)' % rurl, after="Snow[Ff]lame "))
prevSearch = compile(tagre("span", "class", "mininav-prev") +
tagre("a", "href", r'(%s\?comic=snowflame[^"]+)' % rurl))
starter = bounceStarter(url,
compile(tagre("span", "class", "mininav-next") +
tagre("a", "href", r'(%s\?comic=snowflame[^"]+)' % rurl)))
help = 'Index format: chapter-page'
def getIndexStripUrl(self, index):
return self.stripUrl % tuple(index.split('-'))
@classmethod
def namer(cls, imageUrl, pageUrl):
prefix, filename = imageUrl.rsplit('/', 1)
ro = compile(r'snowflame-([^-]+)-([^-]+)')
mo = ro.search(pageUrl)
chapter = mo.group(1)
page = mo.group(2)
return "%s-%s-%s" % (chapter, page, filename)
class SodiumEyes(_BasicScraper):
url = 'http://sodiumeyes.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2007/11/08/damning-evidence'
imageSearch = compile(tagre("img", "src", r'(%scomic/[^ ]+)' % rurl, quote=""))
prevSearch = compile(tagre("a", "href", r'(%s[^"]+)' % rurl, after="prev"))
help = 'Index format: yyyy/mm/dd/stripname'
class Sorcery101(_BasicScraper):
baseUrl = 'http://www.sorcery101.net/'
url = baseUrl + 'sorcery-101/'
rurl = escape(baseUrl)
stripUrl = url + '%s/'
imageSearch = compile(tagre("img", "src", r'(%swp-content/uploads/\d+/\d+/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%ssorcery-101/[^"]+)' % rurl, after="previous-"))
help = 'Index format: stripname'
class SpaceTrawler(_BasicScraper):
url = 'http://spacetrawler.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2010/01/01/spacetrawler-4'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\d+/\d+/\d+/[^"]+)' % rurl, after="navi-prev"))
help = 'Index format: yyyy/mm/dd/stripname'
class Spamusement(_BasicScraper):
url = 'http://spamusement.com/'
rurl = escape(url)
stripUrl = url + 'index.php/comics/view/%s'
imageSearch = compile(r'<img src="(%sgfx/\d+\..+?)"' % rurl, IGNORECASE)
prevSearch = compile(r'<a href="(%sindex.php/comics/view/.+?)">' % rurl, IGNORECASE)
help = 'Index format: n (unpadded)'
starter = indirectStarter(url, prevSearch)
class SpareParts(_BasicScraper):
baseUrl = 'http://www.sparepartscomics.com/'
url = baseUrl + 'comics/?date=20080328'
stripUrl = baseUrl + 'comics/index.php?date=%s'
firstStripUrl = stripUrl % '20031022'
imageSearch = compile(tagre("img", "src", r'(http://www\.sparepartscomics\.com/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'(index\.php\?date=\d+)', quote="'") + "Previous Comic")
help = 'Index format: yyyymmdd'
class Spinnerette(_ParserScraper):
url = 'http://www.spinnyverse.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % '02-09-2010'
imageSearch = '//div[@id="cc-comicbody"]//img'
prevSearch = '//a[@class="prev"]'
help = 'Index format: number'
class StandStillStaySilent(_ParserScraper):
url = 'http://www.sssscomic.com/comic.php'
rurl = escape(url)
stripUrl = url + '?page=%s'
firstStripUrl = stripUrl % '1'
imageSearch = '//img[@class="comicnormal"]'
prevSearch = '//a//div[@id="navprev"]'
help = 'Index Format: number'
# XXX disallowed by robots.txt
class _StationV3(_BasicScraper):
url = 'http://www.stationv3.com/'
rurl = escape(url)
stripUrl = url + 'd/%s.html'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%sd/\d+\.html)' % rurl) +
tagre("img", "src", r'http://www\.stationv3\.com/images/previous\.gif'))
help = 'Index format: yyyymmdd'
class StickyDillyBuns(_BasicScraper):
url = 'http://www.stickydillybuns.com/'
stripUrl = url + 'strips-sdb/%s'
firstStripUrl = stripUrl % 'awesome_leading_man'
imageSearch = compile(tagre("img", "src", r'([^"]*/comics/[^"]+)'))
prevSearch = compile(tagre("a", "href", r'([^"]*/strips-sdb/[^"]+)', before="cn[id]prev"))
help = 'Index format: name'
class Stubble(_BasicScraper):
url = 'http://stubblecomics.com/'
rurl = escape(url)
stripUrl = url + '?p=%s'
firstStripUrl = stripUrl % '4'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\?p=\d+)' % rurl, after="navi-prev"))
help = 'Index format: number'
class StuffNoOneToldMe(_BasicScraper):
url = 'http://www.snotm.com/'
stripUrl = url + '%s.html'
firstStripUrl = stripUrl % '2010/05/01'
olderHref = r"(http://www\.snotm\.com/\d+/\d+/[^']+\.html)"
starter = indirectStarter(url,
compile(tagre("a", "href", olderHref, quote="'")))
imageSearch = (
compile(tagre("img", "src", r'(http://i\.imgur\.com/[^"]+)') + r"(?:</a>|<br />)"),
compile(tagre("img", "src", r'(http://\d+\.bp\.blogspot\.com/[^"]+)') + r"(?:(?: )?</a>|<span |<br />)"),
compile(tagre("img", "src", r'(https://lh\d+\.googleusercontent\.com/[^"]+)') + r"</a>"),
)
prevSearch = compile(tagre("a", "href", olderHref, quote="'", before="older-link"))
multipleImagesPerStrip = True
help = 'Index format: yyyy/mm/stripname'
@classmethod
def namer(cls, imageUrl, pageUrl):
"""Use page URL to construct meaningful image name."""
parts, year, month, stripname = pageUrl.rsplit('/', 3)
stripname = stripname.rsplit('.', 1)[0]
parts, imagename = imageUrl.rsplit('/', 1)
return '%s-%s-%s-%s' % (year, month, stripname, imagename)
def shouldSkipUrl(self, url, data):
"""Skip pages without images."""
return url in (
self.stripUrl % '2012/08/self-rant', # no comic
self.stripUrl % '2012/06/if-you-wonder-where-ive-been', # video
self.stripUrl % '2011/10/i-didnt-make-this-nor-have-anything-to', # video
self.stripUrl % '2010/12/first-snotm-fans-in-sao-paulo', # no comic
self.stripUrl % '2010/11/ear-infection', # no comic
)
class StrawberryDeathCake(_BasicScraper):
url = 'http://strawberrydeathcake.com/'
rurl = escape(url)
stripUrl = url + 'archive/%s/'
imageSearch = compile(tagre("img", "src", r'(%swp-content/webcomic/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%sarchive/[^"]+)' % rurl, after="previous"))
help = 'Index format: stripname'
class StrongFemaleProtagonist(_ParserScraper):
url = 'http://strongfemaleprotagonist.com/'
stripUrl = url + '%s/'
css = True
imageSearch = 'article p:first-child img'
prevSearch = 'div.nav-previous > a'
help = 'Index format: issue-?/page-??'
def shouldSkipUrl(self, url, data):
"""Skip hiatus & non-comic pages."""
return url in (
self.stripUrl % 'guest-art/tuesday',
self.stripUrl % 'guest-art/friday',
self.stripUrl % 'guest-art/wednesday',
self.stripUrl % 'issue-5/newspaper',
self.stripUrl % 'issue-5/hiatus-1',
self.stripUrl % 'issue-5/hiatus-2',
)
class SuburbanTribe(_BasicScraper):
url = 'http://www.pixelwhip.com/'
rurl = escape(url)
stripUrl = url + '?p=%s'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\?p=\d+)' % rurl, after="prev"))
help = 'Index format: nnnn'
class SomethingPositive(_BasicScraper):
url = 'http://www.somethingpositive.net/'
stripUrl = url + 'sp%s.shtml'
imageSearch = (
compile(tagre("img", "src", r'(sp\d+\.png)')),
compile(tagre("img", "src", r'(twither\.gif)')),
)
prevSearch = compile(tagre("a", "href", r'(sp\d+\.shtml)') +
"(?:" + tagre("img", "src", r'images/previous\.gif') + "|Previous)")
help = 'Index format: mmddyyyy'
class StarCrossdDestiny(_BasicScraper):
baseUrl = 'http://www.starcrossd.net/'
rurl = escape(baseUrl)
url = baseUrl + 'comic.html'
stripUrl = baseUrl + 'archives/%s.html'
firstStripUrl = stripUrl % '00000001'
imageSearch = compile(tagre("img", "src", r'(http://(?:www\.)?starcrossd\.net/(?:ch1|strips|book2)/[^"]+)'))
prevSearch = compile(r'<a href="(%s(?:ch1/)?archives/\d+\.html)"[^>]*"[^"]*"[^>]*>prev' % rurl, IGNORECASE)
help = 'Index format: nnnnnnnn'
@classmethod
def namer(cls, imageUrl, pageUrl):
if imageUrl.find('ch1') == -1:
# At first all images were stored in a strips/ directory but that was changed with the introduction of book2
imageUrl = sub('(?:strips)|(?:images)','book1',imageUrl)
elif not imageUrl.find('strips') == -1:
imageUrl = imageUrl.replace('strips/','')
directory, filename = imageUrl.split('/')[-2:]
filename, extension = splitext(filename)
return directory + '-' + filename
# XXX disallowed by robots.txt
class _StrangeCandy(_BasicScraper):
url = 'http://www.strangecandy.net/'
stripUrl = url + 'd/%s.html'
imageSearch = compile(tagre("img", "src", r'(/comics/\d+\.jpg)'))
prevSearch = compile(tagre("a", "href", r'(/d/\d+\.html)') + tagre("img", "alt", "Previous comic"))
help = 'Index format: yyyyddmm'
class SupernormalStep(_BasicScraper):
url = 'http://supernormalstep.com/'
rurl = escape(url)
stripUrl = url + '?p=%s'
firstStripUrl = stripUrl % '8'
imageSearch = compile(tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\?p=\d+)' % rurl, after="prev"))
help = 'Index format: number'
|
|
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import json
import urllib.parse
from azure import (
WindowsAzureError,
TABLE_SERVICE_HOST_BASE,
DEFAULT_HTTP_TIMEOUT,
DEV_TABLE_HOST,
_convert_class_to_xml,
_dont_fail_not_exist,
_dont_fail_on_exist,
_get_request_body,
_int_or_none,
_parse_response_for_dict,
_parse_response_for_dict_filter,
_str,
_str_or_none,
_update_request_uri_query_local_storage,
_validate_not_none,
_ETreeXmlToObject,
_ERROR_STORAGE_MISSING_INFO,
)
from azure.http import HTTPRequest
from azure.http.batchclient import _BatchClient
from azure.storage import (
SignedIdentifiers,
StorageServiceProperties,
StorageSASAuthentication,
StorageTableSharedKeyAuthentication,
TableSharedAccessPermissions,
StorageConnectionParameters,
_convert_entity_to_xml,
_convert_etree_element_to_entity,
_convert_etree_element_to_table,
_convert_response_to_entity,
_convert_signed_identifiers_to_xml,
_convert_table_to_xml,
_update_storage_table_header,
X_MS_VERSION,
)
from azure.storage.sharedaccesssignature import (
SharedAccessSignature,
)
from azure.storage.storageclient import _StorageClient
class TableService(_StorageClient):
'''
This is the main class managing Table resources.
'''
def __init__(self, account_name=None, account_key=None, protocol='https',
host_base=TABLE_SERVICE_HOST_BASE, dev_host=DEV_TABLE_HOST,
timeout=DEFAULT_HTTP_TIMEOUT, sas_token=None, connection_string=None):
'''
account_name:
your storage account name, required for all operations.
account_key:
your storage account key, required for all operations.
protocol:
Optional. Protocol. Defaults to http.
host_base:
Optional. Live host base url. Defaults to Azure url. Override this
for on-premise.
dev_host:
Optional. Dev host url. Defaults to localhost.
timeout:
Optional. Timeout for the http request, in seconds.
sas_token:
Optional. Token to use to authenticate with shared access signature.
connection_string:
Optional. If specified, the first four parameters (account_name,
account_key, protocol, host_base) may be overridden
by values specified in the connection_string. The next three parameters
(dev_host, timeout, sas_token) cannot be specified with a
connection_string. See
http://azure.microsoft.com/en-us/documentation/articles/storage-configure-connection-string/
for the connection string format.
'''
if connection_string is not None:
connection_params = StorageConnectionParameters(connection_string)
account_name = connection_params.account_name
account_key = connection_params.account_key
protocol = connection_params.protocol
host_base = connection_params.host_base_table
super(TableService, self).__init__(
account_name, account_key, protocol, host_base, dev_host, timeout, sas_token)
if self.account_key:
self.authentication = StorageTableSharedKeyAuthentication(
self.account_name,
self.account_key,
)
elif self.sas_token:
self.authentication = StorageSASAuthentication(self.sas_token)
else:
raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO)
def generate_shared_access_signature(self, table_name,
shared_access_policy=None,
sas_version=X_MS_VERSION):
'''
Generates a shared access signature for the table.
Use the returned signature with the sas_token parameter of TableService.
table_name:
Required. Name of table.
shared_access_policy:
Instance of SharedAccessPolicy class.
sas_version:
x-ms-version for storage service, or None to get a signed query
string compatible with pre 2012-02-12 clients, where the version
is not included in the query string.
'''
_validate_not_none('table_name', table_name)
_validate_not_none('shared_access_policy', shared_access_policy)
_validate_not_none('self.account_name', self.account_name)
_validate_not_none('self.account_key', self.account_key)
sas = SharedAccessSignature(self.account_name, self.account_key)
return sas.generate_signed_query_string(
table_name,
None,
shared_access_policy,
sas_version,
table_name=table_name,
)
def begin_batch(self):
if self._batchclient is None:
self._batchclient = _BatchClient(
service_instance=self,
authentication=self.authentication,
timeout=self._httpclient.timeout)
return self._batchclient.begin_batch()
def commit_batch(self):
try:
ret = self._batchclient.commit_batch()
finally:
self._batchclient = None
return ret
def cancel_batch(self):
self._batchclient = None
def get_table_service_properties(self):
'''
Gets the properties of a storage account's Table service, including
Windows Azure Storage Analytics.
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.parse_response(
response, StorageServiceProperties)
def set_table_service_properties(self, storage_service_properties):
'''
Sets the properties of a storage account's Table Service, including
Windows Azure Storage Analytics.
storage_service_properties:
StorageServiceProperties object.
'''
_validate_not_none('storage_service_properties',
storage_service_properties)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/?restype=service&comp=properties'
request.body = _get_request_body(
_convert_class_to_xml(storage_service_properties))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict(response)
def query_tables(self, table_name=None, top=None, next_table_name=None):
'''
Returns a list of tables under the specified account.
table_name:
Optional. The specific table to query.
top:
Optional. Maximum number of tables to return.
next_table_name:
Optional. When top is used, the next table name is stored in
result.x_ms_continuation['NextTableName']
'''
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
if table_name is not None:
uri_part_table_name = "('" + table_name + "')"
else:
uri_part_table_name = ""
request.path = '/Tables' + uri_part_table_name + ''
request.query = [
('$top', _int_or_none(top)),
('NextTableName', _str_or_none(next_table_name))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_table)
@staticmethod
def get_create_table_json_body(tablename=None):
json_body = None
if(tablename):
json_body = {"TableName":tablename}
return json_body
def create_table(self, table, fail_on_exist=False,use_json=True):
'''
Creates a new table in the storage account.
table:
Name of the table to create. Table name may contain only
alphanumeric characters and cannot begin with a numeric character.
It is case-insensitive and must be from 3 to 63 characters long.
fail_on_exist:
Specify whether throw exception when table exists.
'''
_validate_not_none('table', table)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/Tables'
if(use_json == True):
request.body = str(self.get_create_table_json_body(table)).encode("utf-8")
else:
request.body = _get_request_body(_convert_table_to_xml(table))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
print(str(request.headers))
print(str(request.body))
if not fail_on_exist:
try:
self._perform_request(request)
return True
except WindowsAzureError as ex:
_dont_fail_on_exist(ex)
return False
else:
self._perform_request(request)
return True
def delete_table(self, table_name, fail_not_exist=False):
'''
table_name:
Name of the table to delete.
fail_not_exist:
Specify whether throw exception when table doesn't exist.
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/Tables(\'' + _str(table_name) + '\')'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
if not fail_not_exist:
try:
self._perform_request(request)
return True
except WindowsAzureError as ex:
_dont_fail_not_exist(ex)
return False
else:
self._perform_request(request)
return True
def get_table_acl(self, table_name):
'''
Returns details about any stored access policies specified on the
table that may be used with Shared Access Signatures.
table_name:
Name of existing table.
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(table_name) + '?comp=acl'
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.parse_response(response, SignedIdentifiers)
def set_table_acl(self, table_name, signed_identifiers=None):
'''
Sets stored access policies for the table that may be used with
Shared Access Signatures.
table_name:
Name of existing table.
signed_identifiers:
SignedIdentifers instance
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + _str(table_name) + '?comp=acl'
request.body = _get_request_body(
_convert_signed_identifiers_to_xml(signed_identifiers))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request, content_type=None)
self._perform_request(request)
def get_entity(self, table_name, partition_key, row_key, select='',use_json=True,metadata="no"):
'''
Get an entity in a table; includes the $select options.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
select:
Property names to select.
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('select', select)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(table_name) + \
'(PartitionKey=\'' + _str(partition_key) + \
'\',RowKey=\'' + \
_str(row_key) + '\')?$select=' + \
_str(select) + ''
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request,jsonmetadata=metadata)
response = self._perform_request(request)
if not use_json:
return _convert_response_to_entity(response)
else:
return response.body
def query_entities(self, table_name, filter=None, select=None, top=None,
next_partition_key=None, next_row_key=None):
'''
Get entities in a table; includes the $filter and $select options.
table_name:
Table to query.
filter:
Optional. Filter as described at
http://msdn.microsoft.com/en-us/library/windowsazure/dd894031.aspx
select:
Optional. Property names to select from the entities.
top:
Optional. Maximum number of entities to return.
next_partition_key:
Optional. When top is used, the next partition key is stored in
result.x_ms_continuation['NextPartitionKey']
next_row_key:
Optional. When top is used, the next partition key is stored in
result.x_ms_continuation['NextRowKey']
'''
_validate_not_none('table_name', table_name)
request = HTTPRequest()
request.method = 'GET'
request.host = self._get_host()
request.path = '/' + _str(table_name) + '()'
request.query = [
('$filter', _str_or_none(filter)),
('$select', _str_or_none(select)),
('$top', _int_or_none(top)),
('NextPartitionKey', _str_or_none(next_partition_key)),
('NextRowKey', _str_or_none(next_row_key))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _ETreeXmlToObject.convert_response_to_feeds(
response, _convert_etree_element_to_entity)
def insert_entity(self, table_name, entity,
content_type='application/atom+xml',use_json=True):
'''
Inserts a new entity into a table.
table_name:
Table name.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type:
Required. Must be set to application/atom+xml
'''
if use_json:
content_type = 'application/json'
_validate_not_none('table_name', table_name)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'POST'
request.host = self._get_host()
request.path = '/' + _str(table_name) + ''
request.headers = [('Content-Type', _str_or_none(content_type))]
if use_json:
request.body = str(entity).encode("utf-8")
else:
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
if not use_json:
return _convert_response_to_entity(response)
else:
return response
def update_entity(self, table_name, partition_key, row_key, entity,
content_type='application/atom+xml', if_match='*',use_json='True'):
'''
Updates an existing entity in a table. The Update Entity operation
replaces the entire entity and can be used to remove properties.
table_name:
Table name.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type:
Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the merge should be
performed. To force an unconditional merge, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
if not use_json:
request.body = _get_request_body(_convert_entity_to_xml(entity))
else:
request.body = entity
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def merge_entity(self, table_name, partition_key, row_key, entity,
content_type='application/atom+xml', if_match='*'):
'''
Updates an existing entity by updating the entity's properties. This
operation does not replace the existing entity as the Update Entity
operation does.
table_name:
Table name.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
entity:
Required. The entity object to insert. Can be a dict format or
entity object.
content_type:
Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the merge should be
performed. To force an unconditional merge, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'MERGE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def delete_entity(self, table_name, partition_key, row_key,
content_type='application/atom+xml', if_match='*'):
'''
Deletes an existing entity in a table.
table_name:
Table name.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
content_type:
Required. Must be set to application/atom+xml
if_match:
Optional. Specifies the condition for which the delete should be
performed. To force an unconditional delete, set to the wildcard
character (*).
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('content_type', content_type)
_validate_not_none('if_match', if_match)
request = HTTPRequest()
request.method = 'DELETE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [
('Content-Type', _str_or_none(content_type)),
('If-Match', _str_or_none(if_match))
]
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
self._perform_request(request)
def insert_or_replace_entity(self, table_name, partition_key, row_key,
entity, content_type='application/atom+xml'):
'''
Replaces an existing entity or inserts a new entity if it does not
exist in the table. Because this operation can insert or update an
entity, it is also known as an "upsert" operation.
table_name:
Table name.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type:
Required. Must be set to application/atom+xml
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [('Content-Type', _str_or_none(content_type))]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def insert_or_merge_entity(self, table_name, partition_key, row_key,
entity, content_type='application/atom+xml'):
'''
Merges an existing entity or inserts a new entity if it does not exist
in the table. Because this operation can insert or update an entity,
it is also known as an "upsert" operation.
table_name:
Table name.
partition_key:
PartitionKey of the entity.
row_key:
RowKey of the entity.
entity:
Required. The entity object to insert. Could be a dict format or
entity object.
content_type:
Required. Must be set to application/atom+xml
'''
_validate_not_none('table_name', table_name)
_validate_not_none('partition_key', partition_key)
_validate_not_none('row_key', row_key)
_validate_not_none('entity', entity)
_validate_not_none('content_type', content_type)
request = HTTPRequest()
request.method = 'MERGE'
request.host = self._get_host()
request.path = '/' + \
_str(table_name) + '(PartitionKey=\'' + \
_str(partition_key) + '\',RowKey=\'' + _str(row_key) + '\')'
request.headers = [('Content-Type', _str_or_none(content_type))]
request.body = _get_request_body(_convert_entity_to_xml(entity))
request.path, request.query = _update_request_uri_query_local_storage(
request, self.use_local_storage)
request.headers = _update_storage_table_header(request)
response = self._perform_request(request)
return _parse_response_for_dict_filter(response, filter=['etag'])
def _perform_request_worker(self, request):
self.authentication.sign_request(request)
return self._httpclient.perform_request(request)
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from tempest.lib.common.utils import data_utils
from functional.common import test
from tempest.lib import exceptions
class ServerTests(test.TestCase):
"""Functional tests for openstack server commands."""
@classmethod
def get_flavor(cls):
# NOTE(rtheis): Get m1.tiny flavor since functional tests may
# create other flavors.
raw_output = cls.openstack('flavor show m1.tiny -c id -f value')
return raw_output.strip('\n')
@classmethod
def get_image(cls):
# NOTE(rtheis): Get public images since functional tests may
# create private images.
raw_output = cls.openstack('image list --public -f value -c ID')
ray = raw_output.split('\n')
idx = int(len(ray) / 2)
return ray[idx]
@classmethod
def get_network(cls):
try:
# NOTE(rtheis): Get private network since functional tests may
# create other networks.
raw_output = cls.openstack('network show private -c id -f value')
except exceptions.CommandFailed:
return ''
return ' --nic net-id=' + raw_output.strip('\n')
def server_create(self, name=None):
"""Create server. Add cleanup."""
name = name or data_utils.rand_uuid()
opts = self.get_show_opts(self.FIELDS)
flavor = self.get_flavor()
image = self.get_image()
network = self.get_network()
raw_output = self.openstack('--debug server create --flavor ' +
flavor +
' --image ' + image + network + ' ' +
name + opts)
if not raw_output:
self.fail('Server has not been created!')
self.addCleanup(self.server_delete, name)
def server_list(self, params=[]):
"""List servers."""
opts = self.get_list_opts(params)
return self.openstack('server list' + opts)
def server_delete(self, name):
"""Delete server by name."""
self.openstack('server delete ' + name)
def setUp(self):
"""Set necessary variables and create server."""
super(ServerTests, self).setUp()
self.NAME = data_utils.rand_name('TestServer')
self.OTHER_NAME = data_utils.rand_name('TestServer')
self.HEADERS = ['"Name"']
self.FIELDS = ['name']
self.IP_POOL = 'public'
self.server_create(self.NAME)
def test_server_rename(self):
"""Test server rename command.
Test steps:
1) Boot server in setUp
2) Rename server
3) Check output
4) Rename server back to original name
"""
raw_output = self.openstack('server set --name ' + self.OTHER_NAME +
' ' + self.NAME)
self.assertOutput("", raw_output)
self.assertNotIn(self.NAME, self.server_list(['Name']))
self.assertIn(self.OTHER_NAME, self.server_list(['Name']))
self.openstack('server set --name ' + self.NAME + ' ' +
self.OTHER_NAME)
def test_server_list(self):
"""Test server list command.
Test steps:
1) Boot server in setUp
2) List servers
3) Check output
"""
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('server list' + opts)
self.assertIn(self.NAME, raw_output)
def test_server_show(self):
"""Test server show command.
Test steps:
1) Boot server in setUp
2) Show server
3) Check output
"""
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\n", raw_output)
def test_server_metadata(self):
"""Test command to set server metadata.
Test steps:
1) Boot server in setUp
2) Set properties for server
3) Check server properties in server show output
4) Unset properties for server
5) Check server properties in server show output
"""
self.wait_for_status("ACTIVE")
# metadata
raw_output = self.openstack(
'server set --property a=b --property c=d ' + self.NAME)
opts = self.get_show_opts(["name", "properties"])
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
raw_output = self.openstack(
'server unset --property a ' + self.NAME)
opts = self.get_show_opts(["name", "properties"])
raw_output = self.openstack('server show ' + self.NAME + opts)
self.assertEqual(self.NAME + "\nc='d'\n", raw_output)
def test_server_suspend_resume(self):
"""Test server suspend and resume commands.
Test steps:
1) Boot server in setUp
2) Suspend server
3) Check for SUSPENDED server status
4) Resume server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# suspend
raw_output = self.openstack('server suspend ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("SUSPENDED")
# resume
raw_output = self.openstack('server resume ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_lock_unlock(self):
"""Test server lock and unlock commands.
Test steps:
1) Boot server in setUp
2) Lock server
3) Check output
4) Unlock server
5) Check output
"""
self.wait_for_status("ACTIVE")
# lock
raw_output = self.openstack('server lock ' + self.NAME)
self.assertEqual("", raw_output)
# unlock
raw_output = self.openstack('server unlock ' + self.NAME)
self.assertEqual("", raw_output)
def test_server_pause_unpause(self):
"""Test server pause and unpause commands.
Test steps:
1) Boot server in setUp
2) Pause server
3) Check for PAUSED server status
4) Unpause server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# pause
raw_output = self.openstack('server pause ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("PAUSED")
# unpause
raw_output = self.openstack('server unpause ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_rescue_unrescue(self):
"""Test server rescue and unrescue commands.
Test steps:
1) Boot server in setUp
2) Rescue server
3) Check for RESCUE server status
4) Unrescue server
5) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# rescue
opts = self.get_show_opts(["adminPass"])
raw_output = self.openstack('server rescue ' + self.NAME + opts)
self.assertNotEqual("", raw_output)
self.wait_for_status("RESCUE")
# unrescue
raw_output = self.openstack('server unrescue ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def test_server_attach_detach_floating_ip(self):
"""Test commands to attach and detach floating IP for server.
Test steps:
1) Boot server in setUp
2) Create floating IP
3) Add floating IP to server
4) Check for floating IP in server show output
5) Remove floating IP from server
6) Check that floating IP is not in server show output
7) Delete floating IP
8) Check output
"""
self.wait_for_status("ACTIVE")
# attach ip
opts = self.get_show_opts(["id", "floating_ip_address"])
raw_output = self.openstack('ip floating create ' +
self.IP_POOL +
opts)
ip, ipid, rol = tuple(raw_output.split('\n'))
self.assertNotEqual("", ipid)
self.assertNotEqual("", ip)
raw_output = self.openstack('ip floating add ' + ip + ' ' + self.NAME)
self.assertEqual("", raw_output)
raw_output = self.openstack('server show ' + self.NAME)
self.assertIn(ip, raw_output)
# detach ip
raw_output = self.openstack('ip floating remove ' + ip + ' ' +
self.NAME)
self.assertEqual("", raw_output)
raw_output = self.openstack('server show ' + self.NAME)
self.assertNotIn(ip, raw_output)
raw_output = self.openstack('ip floating delete ' + ipid)
self.assertEqual("", raw_output)
def test_server_reboot(self):
"""Test server reboot command.
Test steps:
1) Boot server in setUp
2) Reboot server
3) Check for ACTIVE server status
"""
self.wait_for_status("ACTIVE")
# reboot
raw_output = self.openstack('server reboot ' + self.NAME)
self.assertEqual("", raw_output)
self.wait_for_status("ACTIVE")
def wait_for_status(self, expected_status='ACTIVE', wait=900, interval=30):
"""Wait until server reaches expected status."""
# TODO(thowe): Add a server wait command to osc
failures = ['ERROR']
total_sleep = 0
opts = self.get_show_opts(['status'])
while total_sleep < wait:
status = self.openstack('server show ' + self.NAME + opts)
status = status.rstrip()
print('Waiting for {} current status: {}'.format(expected_status,
status))
if status == expected_status:
break
self.assertNotIn(status, failures)
time.sleep(interval)
total_sleep += interval
status = self.openstack('server show ' + self.NAME + opts)
status = status.rstrip()
self.assertEqual(status, expected_status)
# give it a little bit more time
time.sleep(5)
|
|
# PyMoBu - Python enhancement for Autodesk's MotionBuilder
# Copyright (C) 2010 Scott Englert
# scott@scottenglert.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Component module
Contains component classes and related functions
'''
import re
from pyfbsdk import FBComponent #@UnresolvedImport
from pyfbsdk import FBPropertyType #@UnresolvedImport
from pyfbsdk import FBMatrix #@UnresolvedImport
from pyfbsdk import FBVector3d #@UnresolvedImport
from pyfbsdk import FBModelTransformationMatrix #@UnresolvedImport
from pyfbsdk import FBNamespaceAction #@UnresolvedImport
# eclipseSyntax
if False: from pyfbsdk_gen_doc import * #@UndefinedVariable @UnusedWildImport
def ConvertToPyMoBu(component):
'''Utility to convert a FB class to a PMB class'''
if isinstance(component, PMBComponent):
return component
# get the first two inherited classes
componentClasses = component.__class__.__mro__
for fbClass in componentClasses:
pmbClassName = fbClass.__name__.replace('FB', 'PMB')
try:
pmbClass = eval(pmbClassName)
except:
continue
return pmbClass.Convert(component)
# add this function to FBComponent
FBComponent.ConvertToPyMoBu = ConvertToPyMoBu
# -----------------------------------------------------
# PyMoBu Component Classes
# -----------------------------------------------------
class PMBComponent(object):
'''PyMoBu class for FBComponent'''
# property type dictionary {Name : [enum, internal name]}
kPropertyTypes = dict(Action = [FBPropertyType.kFBPT_Action, 'Action'],
Enum = [FBPropertyType.kFBPT_enum, 'Enum'],
Integer = [FBPropertyType.kFBPT_int,'Integer'],
Bool = [FBPropertyType.kFBPT_bool,'Bool'],
Double = [FBPropertyType.kFBPT_double,'Number'],
CharPtr = [FBPropertyType.kFBPT_charptr, 'String'],
Float = [FBPropertyType.kFBPT_float,'Float'],
Time = [FBPropertyType.kFBPT_Time, 'Time'],
Object = [FBPropertyType.kFBPT_object, 'Object'],
StringList = [FBPropertyType.kFBPT_stringlist, 'StringList'],
Vector4D = [FBPropertyType.kFBPT_Vector4D, 'Vector'],
Vector3D = [FBPropertyType.kFBPT_Vector3D, 'Vector'],
Vector2D = [FBPropertyType.kFBPT_Vector2D, 'Vector'],
ColorRGB = [FBPropertyType.kFBPT_ColorRGB, 'Color'],
ColorRGBA = [FBPropertyType.kFBPT_ColorRGBA, 'ColorAndAlpha'],
TimeSpan = [FBPropertyType.kFBPT_TimeSpan, 'Time'])
def __init__(self, component):
self.component = component
def __repr__(self):
name = getattr(self.component, 'LongName', self.component.Name)
return "%s('%s')" % (self.__class__.__name__, name)
def __str__(self):
'''Returns the full object name'''
return getattr(self.component, 'LongName', self.component.Name)
@property
def Name(self):
'''Returns the object name'''
return getattr(self.component, 'Name', self.component.Name)
@property
def LongName(self):
'''Returns the full object name'''
return getattr(self.component, 'LongName', self.component.Name)
@classmethod
def Convert(cls, component):
return cls(component)
def ListProperties(self, pattern=None, _type=None, **kwargs):
'''
Returns a list of property names from the PropertyList with optional filters
@param pattern: list properties with specific names with optional wildcard *. Default is all.
@param _type: get properties of specific types. See self.kPropertyTypes.keys() for names
Optional parameters True/False for testing from FBProperty:
IsAnimatable
IsInternal
IsList
IsMaxClamp
IsMinClamp
IsObjectList
IsReadOnly
IsReferenceProperty
IsUserProperty
'''
# setup a test for the optional parameters
def passesOptionalTest(x):
for arg, challenge in kwargs.iteritems():
func = getattr(x, arg, None)
if func and func() != challenge:
return False
return True
# set up the name testing based on the pattern
if pattern:
# if there is a wild card in the pattern
if '*' in pattern:
pattern = pattern.replace('*', '.*')
# name testing function
passesNameTest = lambda x: re.match(pattern, x.GetName())
else:
passesNameTest = lambda x: pattern == x.GetName()
else:
passesNameTest = lambda x: True
# add type testing
if _type:
propertyType = self.kPropertyTypes[_type][0]
passesTypeTest = lambda x: x.GetPropertyType() == propertyType
else:
passesTypeTest = lambda x: True
properties = []
for p in self.component.PropertyList:
# odd bug that some items are None
if p is None:
continue
if not passesOptionalTest(p):
continue
if not passesTypeTest(p):
continue
if passesNameTest(p):
properties.append(p)
return properties
def GetPropertyValue(self, name):
'''Returns a property value from the components PropertyList'''
return self._findProperty(name).Data
def SetPropertyValue(self, name, value):
'''Sets a property value in the components PropertyList'''
self._findProperty(name).Data = value
def AddProperty(self, name, _type, animatable=True, user=True):
'''
Add a property to this component
@param name: the name of the property
@param _type: the data type of the property:
'''
if self.ListProperties(pattern=name):
raise Exception("Can not add property '%s'. Already exists on object '%'" % (name, self))
try:
typeData = self.kPropertyTypes[_type]
except KeyError:
raise Exception("Invalid property type '%s'. Valid types are: '%s'" % (_type, ', '.join(self.kPropertyTypes.keys())))
typeData.extend([animatable, user, None])
self.component.PropertyCreate(name, *typeData)
def RemoveProperty(self, name):
'''Remove a property from an object'''
_property = self._findProperty(name)
# test is we can remove a non-user property or not
if _property.IsUserProperty():
self.component.PropertyRemove(_property)
else:
raise Exception("Property is flagged as non-user. Unable to remove property '%s' from object '%s'" % (name, self))
def _findProperty(self, name):
# similar to the native way but raises and exception if property isn't found
_property = self.component.PropertyList.Find(name)
if _property:
return _property
else:
raise Exception("Could not find property named '%s' for object '%s'" % (name, self))
def GetNamespace(self):
'''Returns the namespace of the object'''
namespace = re.match(".*:", getattr(self.component, 'LongName', self.component.Name))
if namespace:
return namespace.group()
def AddNamespace(self, namespace, hierarchy=True, toRight=False):
'''
Adds a namespace to the object
@param hierarchy: Apply this action to hierarchy. Default True
@param toRight: Add namespace to the right of other namespaces. Default False (left)
'''
from pyfbsdk import FBConstraint #@UnresolvedImport @Reimport
action = FBNamespaceAction.kFBConcatNamespace
if hierarchy and not isinstance(self.component, FBConstraint):
self.component.ProcessNamespaceHierarchy(action, namespace, None, toRight)
else:
self.component.ProcessObjectNamespace(action, namespace, None, toRight)
def SwapNamespace(self, newNamespace, oldNamespace, hierarchy=True):
'''
Swaps a new namespace with an existing namespace
@param hierarchy: Apply this action to hierarchy. Default True
'''
from pyfbsdk import FBConstraint #@Reimport @UnresolvedImport
action = FBNamespaceAction.kFBReplaceNamespace
if hierarchy and not isinstance(self.component, FBConstraint):
self.component.ProcessNamespaceHierarchy(action, newNamespace, oldNamespace)
else:
self.component.ProcessObjectNamespace(action, newNamespace, oldNamespace)
def StripNamespace(self, hierarchy=True):
'''
Removes all the namespaces
@param hierarchy: Apply this action to hierarchy. Default True
'''
from pyfbsdk import FBConstraint #@Reimport @UnresolvedImport
action = FBNamespaceAction.kFBRemoveAllNamespace
if hierarchy and not isinstance(self.component, FBConstraint):
self.component.ProcessNamespaceHierarchy(action, '')
else:
self.component.ProcessObjectNamespace(action, '')
class PMBBox(PMBComponent):
'''PyMobu class for FBBox'''
pass
class PMBModel(PMBBox):
'''PyMoBu class for FBModel'''
kInverseMatrixTypeDict = dict(Transformation = FBModelTransformationMatrix.kModelInverse_Transformation,
Translation = FBModelTransformationMatrix.kModelInverse_Translation,
Rotation = FBModelTransformationMatrix.kModelInverse_Rotation,
Scaling = FBModelTransformationMatrix.kModelInverse_Scaling)
# Center = FBModelTransformationMatrix.kModelCenter,
# All = FBModelTransformationMatrix.kModelAll)
kMatrixTypeDict = dict(Transformation = FBModelTransformationMatrix.kModelTransformation,
Translation = FBModelTransformationMatrix.kModelTranslation,
Rotation = FBModelTransformationMatrix.kModelRotation,
Scaling = FBModelTransformationMatrix.kModelScaling,
ParentOffset = FBModelTransformationMatrix.kModelParentOffset)
# Center = FBModelTransformationMatrix.kModelCenter,
# All = FBModelTransformationMatrix.kModelAll)
@property
def Children(self):
return self.component.Children
@property
def Parent(self):
return self.component.Parent
def SetInverseMatrix(self, matrix, worldSpace=False, _type='Transformation'):
'''
Set the inverse matrix
@param worldSpace: world space matrix (True/False) Default False
@param _type: matrix type (Transformation, Translation, Rotation, Scaling, Center, All)
'''
try:
self.component.SetMatrix(matrix, self.kInverseMatrixTypeDict[_type], worldSpace)
except KeyError:
raise Exception("Invalid vector type '%s'. Valid types are: %s" % (_type, ', '.join(self.kInverseMatrixTypeDict.keys())))
def SetMatrix(self, matrix, worldSpace=False, _type='Transformation'):
'''
Set the matrix
@param worldSpace: world space matrix (True/False) Default False
@param _type: matrix type (Transformation, Translation, Rotation, Scaling, Center, All)
'''
try:
self.component.SetMatrix(matrix, self.kMatrixTypeDict[_type], worldSpace)
except KeyError:
raise Exception("Invalid vector type '%s'. Valid types are: %s" % (_type, ', '.join(self.kMatrixTypeDict.keys())))
def GetAnimationNode(self, transform='Translation'):
'''
Get AnimationNode
@param transform: transformation type
'''
animationNode= None
if transform=='Translation':
animationNode = self.component.Translation.GetAnimationNode()
if transform=='Rotation':
animationNode = self.component.Rotation.GetAnimationNode()
if transform=='Scale':
animationNode = self.component.Scale.GetAnimationNode()
return animationNode
def GetInverseMatrix(self, worldSpace=False, _type='Transformation'):
'''
Get the inverse matrix
@param worldSpace: world space matrix (True/False) Default False
@param _type: matrix type (Transformation, Translation, Rotation, Scaling, Center, All)
'''
matrix = FBMatrix()
try:
self.component.GetMatrix(matrix, self.kInverseMatrixTypeDict[_type], worldSpace)
except KeyError:
raise Exception("Invalid vector type '%s'. Valid types are: %s" % (_type, ', '.join(self.kInverseMatrixTypeDict.keys())))
return matrix
def GetMatrix(self, worldSpace=False, _type='Transformation'):
'''
Get the matrix
@param worldSpace: world space matrix (True/False) Default False
@param _type: matrix type (Transformation, Translation, Rotation, Scaling, Center, All)
'''
matrix = FBMatrix()
try:
self.component.GetMatrix(matrix, self.kMatrixTypeDict[_type], worldSpace)
except KeyError:
raise Exception("Invalid vector type '%s'. Valid types are: %s" % (_type, ', '.join(self.kMatrixTypeDict.keys())))
return matrix
def GetTranslation(self, worldSpace=False):
'''
Get translation vector
@param worldSpace: world space vector (True/False) Default False
'''
vector = FBVector3d()
self.component.GetVector(vector, self.kMatrixTypeDict['Translation'], worldSpace)
return vector
def GetRotation(self, worldSpace=False):
'''
Get rotation vector
@param worldSpace: world space vector (True/False) Default False
'''
vector = FBVector3d()
self.component.GetVector(vector, self.kMatrixTypeDict['Rotation'], worldSpace)
return vector
def GetScale(self, worldSpace=False):
'''
Get scale vector
@param worldSpace: world space vector (True/False) Default False
'''
vector = FBVector3d()
self.component.GetVector(vector, self.kMatrixTypeDict['Scaling'], worldSpace)
return vector
def SetTranslation(self, vector, worldSpace=False):
'''
Set the translation vector
@param worldSpace: world space vector (True/False) Default False
'''
self.component.SetVector(vector, self.kMatrixTypeDict['Translation'], worldSpace)
def SetRotation(self, vector, worldSpace=False):
'''
Set the rotation vector
@param worldSpace: world space vector (True/False) Default False
'''
self.component.SetVector(vector, self.kMatrixTypeDict['Rotation'], worldSpace)
def SetScale(self, vector, worldSpace=False):
'''
Set the scale vector
@param worldSpace: world space vector (True/False) Default False
'''
self.component.SetVector(vector, self.kMatrixTypeDict['Scaling'], worldSpace)
# import other component modules
from pymobu.components.constraints import *
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.ads.googleads.v10.services.types import customer_client_link_service
from .transports.base import (
CustomerClientLinkServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import CustomerClientLinkServiceGrpcTransport
class CustomerClientLinkServiceClientMeta(type):
"""Metaclass for the CustomerClientLinkService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[CustomerClientLinkServiceTransport]]
_transport_registry["grpc"] = CustomerClientLinkServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[CustomerClientLinkServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class CustomerClientLinkServiceClient(
metaclass=CustomerClientLinkServiceClientMeta
):
"""Service to manage customer client links."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CustomerClientLinkServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CustomerClientLinkServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> CustomerClientLinkServiceTransport:
"""Returns the transport used by the client instance.
Returns:
CustomerClientLinkServiceTransport: The transport used by the client
instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def customer_path(customer_id: str,) -> str:
"""Returns a fully-qualified customer string."""
return "customers/{customer_id}".format(customer_id=customer_id,)
@staticmethod
def parse_customer_path(path: str) -> Dict[str, str]:
"""Parses a customer path into its component segments."""
m = re.match(r"^customers/(?P<customer_id>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def customer_client_link_path(
customer_id: str, client_customer_id: str, manager_link_id: str,
) -> str:
"""Returns a fully-qualified customer_client_link string."""
return "customers/{customer_id}/customerClientLinks/{client_customer_id}~{manager_link_id}".format(
customer_id=customer_id,
client_customer_id=client_customer_id,
manager_link_id=manager_link_id,
)
@staticmethod
def parse_customer_client_link_path(path: str) -> Dict[str, str]:
"""Parses a customer_client_link path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/customerClientLinks/(?P<client_customer_id>.+?)~(?P<manager_link_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, CustomerClientLinkServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the customer client link service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, CustomerClientLinkServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, CustomerClientLinkServiceTransport):
# transport is a CustomerClientLinkServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def mutate_customer_client_link(
self,
request: Union[
customer_client_link_service.MutateCustomerClientLinkRequest, dict
] = None,
*,
customer_id: str = None,
operation: customer_client_link_service.CustomerClientLinkOperation = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> customer_client_link_service.MutateCustomerClientLinkResponse:
r"""Creates or updates a customer client link. Operation statuses
are returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__ `FieldError <>`__
`FieldMaskError <>`__ `HeaderError <>`__ `InternalError <>`__
`ManagerLinkError <>`__ `MutateError <>`__
`NewResourceCreationError <>`__ `QuotaError <>`__
`RequestError <>`__
Args:
request (Union[google.ads.googleads.v10.services.types.MutateCustomerClientLinkRequest, dict]):
The request object. Request message for
[CustomerClientLinkService.MutateCustomerClientLink][google.ads.googleads.v10.services.CustomerClientLinkService.MutateCustomerClientLink].
customer_id (str):
Required. The ID of the customer
whose customer link are being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operation (google.ads.googleads.v10.services.types.CustomerClientLinkOperation):
Required. The operation to perform on
the individual CustomerClientLink.
This corresponds to the ``operation`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v10.services.types.MutateCustomerClientLinkResponse:
Response message for a
CustomerClientLink mutate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, operation])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a customer_client_link_service.MutateCustomerClientLinkRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
customer_client_link_service.MutateCustomerClientLinkRequest,
):
request = customer_client_link_service.MutateCustomerClientLinkRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operation is not None:
request.operation = operation
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_customer_client_link
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("CustomerClientLinkServiceClient",)
|
|
# -*- coding: utf-8 -*-
from gluon import current
from s3 import *
from s3layouts import *
try:
from .layouts import *
except ImportError:
pass
import s3menus as default
red_cross_filter = {"organisation.organisation_type_id$name" : "Red Cross / Red Crescent"}
# =============================================================================
class S3MainMenu(default.S3MainMenu):
""" Custom Application Main Menu """
# -------------------------------------------------------------------------
@classmethod
def menu(cls):
""" Compose Menu """
# Modules menus
main_menu = MM()(
cls.menu_modules(),
)
# Additional menus
current.menu.personal = cls.menu_personal()
current.menu.dashboard = cls.menu_dashboard()
return main_menu
# -------------------------------------------------------------------------
@classmethod
def menu_modules(cls):
""" Custom Modules Menu """
T = current.T
return [
homepage("gis")(
),
homepage("hrm", "org", name=T("Staff"),
vars=dict(group="staff"))(
MM("Staff", c="hrm", f="staff"),
MM("Teams", c="hrm", f="group"),
MM("National Societies", c="org", f="organisation",
vars = red_cross_filter),
MM("Offices", c="org", f="office"),
MM("Job Titles", c="hrm", f="job_title"),
#MM("Skill List", c="hrm", f="skill"),
MM("Training Events", c="hrm", f="training_event"),
MM("Training Courses", c="hrm", f="course"),
MM("Certificate List", c="hrm", f="certificate"),
),
homepage("vol", name=T("Volunteers"))(
MM("Volunteers", c="vol", f="volunteer"),
MM("Teams", c="vol", f="group"),
MM("Volunteer Roles", c="vol", f="job_title"),
MM("Programmes", c="vol", f="programme"),
#MM("Skill List", c="vol", f="skill"),
MM("Training Events", c="vol", f="training_event"),
MM("Training Courses", c="vol", f="course"),
MM("Certificate List", c="vol", f="certificate"),
),
homepage("member")(
MM("Members", c="member", f="membership"),
),
homepage("inv", "supply", "req")(
MM("Warehouses", c="inv", f="warehouse"),
MM("Received Shipments", c="inv", f="recv"),
MM("Sent Shipments", c="inv", f="send"),
MM("Items", c="supply", f="item"),
MM("Item Catalogues", c="supply", f="catalog"),
MM("Item Categories", c="supply", f="item_category"),
M("Requests", c="req", f="req")(),
#M("Commitments", f="commit")(),
),
homepage("asset")(
MM("Assets", c="asset", f="asset"),
MM("Items", c="asset", f="item"),
),
homepage("survey")(
MM("Assessment Templates", c="survey", f="template"),
MM("Disaster Assessments", c="survey", f="series"),
),
homepage("project")(
MM("Projects", c="project", f="project"),
MM("Communities", c="project", f="location"),
),
homepage("vulnerability")(
MM("Map", c="vulnerability", f="index"),
),
homepage("event", "irs")(
MM("Events", c="event", f="event"),
MM("Incident Reports", c="irs", f="ireport"),
),
]
# -------------------------------------------------------------------------
@classmethod
def menu_dashboard(cls):
""" Dashboard Menu (at bottom of page) """
DB = S3DashBoardMenuLayout
request = current.request
if request.controller == "vol":
dashboard = DB()(
DB("VOLUNTEERS",
c="vol",
image = "graphic_staff_wide.png",
title = "Volunteers")(
DB("Manage Volunteer Data", f="volunteer"),
DB("Manage Teams Data", f="group"),
),
DB("CATALOGS",
c="hrm",
image="graphic_catalogue.png",
title="Catalogs")(
DB("Certificates", f="certificate"),
DB("Training Courses", f="course"),
#DB("Skills", f="skill"),
DB("Job Titles", f="job_title")
))
elif request.controller in ("hrm", "org"):
dashboard = DB()(
DB("STAFF",
c="hrm",
image = "graphic_staff_wide.png",
title = "Staff")(
DB("Manage Staff Data", f="staff"),
DB("Manage Teams Data", f="group"),
),
DB("OFFICES",
c="org",
image = "graphic_office.png",
title = "Offices")(
DB("Manage Offices Data", f="office"),
DB("Manage National Society Data", f="organisation",
vars=red_cross_filter
),
),
DB("CATALOGS",
c="hrm",
image="graphic_catalogue.png",
title="Catalogs")(
DB("Certificates", f="certificate"),
DB("Training Courses", f="course"),
#DB("Skills", f="skill"),
DB("Job Titles", f="job_title")
))
elif request.controller == "default" and request.function == "index":
dashboard = DB(_id="dashboard")(
DB("Staff", c="hrm", f="staff", m="search",
image = "graphic_staff.png",
title = "Staff",
text = "Add new and manage existing staff."),
DB("Volunteers", c="vol", f="volunteer", m="search",
image = "graphic_volunteers.png",
title = "Volunteers",
text = "Add new and manage existing volunteers."),
DB("Members", c="member", f="index",
image = "graphic_members.png",
title = "Members",
text = "Add new and manage existing members."),
DB("Warehouses", c="inv", f="index",
image = "graphic_warehouse.png",
title = "Warehouses",
text = "Stocks and relief items."),
DB("Assets", c="asset", f="index",
image = "graphic_assets.png",
title = "Assests",
text = "Manage office inventories and assets."),
DB("Assessments", c="survey", f="index",
image = "graphic_assessments.png",
title = "Assessments",
text = "Design, deploy & analyze surveys."),
DB("Projects", c="project", f="index",
image = "graphic_tools.png",
title = "Projects",
text = "Tracking and analysis of Projects and Activities.")
)
else:
dashboard = None
return dashboard
# -------------------------------------------------------------------------
@classmethod
def menu_personal(cls):
""" Custom Personal Menu """
auth = current.auth
s3 = current.response.s3
settings = current.deployment_settings
# Language selector
menu_lang = ML("Language", right=True)
for language in s3.l10n_languages.items():
code, name = language
menu_lang(
ML(name, translate=False, lang_code=code, lang_name=name)
)
if not auth.is_logged_in():
request = current.request
login_next = URL(args=request.args, vars=request.vars)
if request.controller == "default" and \
request.function == "user" and \
"_next" in request.get_vars:
login_next = request.get_vars["_next"]
self_registration = settings.get_security_self_registration()
menu_personal = MP()(
MP("Register", c="default", f="user",
m="register", check=self_registration),
MP("Login", c="default", f="user",
m="login", vars=dict(_next=login_next)),
MP("Lost Password", c="default", f="user",
m="retrieve_password"),
menu_lang
)
else:
s3_has_role = auth.s3_has_role
is_org_admin = lambda i: s3_has_role("ORG_ADMIN") and \
not s3_has_role("ADMIN")
menu_personal = MP()(
MP("Administration", c="admin", f="index",
check=s3_has_role("ADMIN")),
MP("Administration", c="admin", f="user",
check=is_org_admin),
MP("Profile", c="default", f="person"),
MP("Change Password", c="default", f="user",
m="change_password"),
MP("Logout", c="default", f="user",
m="logout"),
menu_lang,
)
return menu_personal
# =============================================================================
class S3OptionsMenu(default.S3OptionsMenu):
""" Custom Controller Menus """
# -------------------------------------------------------------------------
def hrm(self):
""" HRM Human Resource Management """
session = current.session
if "hrm" not in session.s3:
current.s3db.hrm_vars()
hrm_vars = session.s3.hrm
ADMIN = current.auth.get_system_roles().ADMIN
SECTORS = "Clusters" if current.deployment_settings.get_ui_label_cluster() \
else "Sectors"
manager_mode = lambda i: hrm_vars.mode is None
personal_mode = lambda i: hrm_vars.mode is not None
is_org_admin = lambda i: hrm_vars.orgs and True or \
ADMIN in session.s3.roles
staff = {"group": "staff"}
return M()(
M("Staff", c="hrm", f=("staff", "person"),
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", f="person", m="import",
vars=staff, p="create"),
),
M("Teams", c="hrm", f="group",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search Members", f="group_membership", m="search"),
M("Import", f="group_membership", m="import"),
),
M("National Societies", c="org",
f="organisation",
vars=red_cross_filter,
check=manager_mode)(
M("New", m="create",
vars=red_cross_filter
),
M("List All",
vars=red_cross_filter
),
M("Search", m="search",
vars=red_cross_filter
),
M("Import", m="import", p="create", check=is_org_admin)
),
M("Offices", c="org", f="office",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Department Catalog", c="hrm", f="department",
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
M("Job Title Catalog", c="hrm", f="job_title",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
),
#M("Skill Catalog", f="skill",
#check=manager_mode)(
#M("New", m="create"),
#M("List All"),
##M("Skill Provisions", f="skill_provision"),
#),
M("Training Events", c="hrm", f="training_event",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Search Training Participants", f="training",
m="search"),
M("Import Participant List", f="training", m="import"),
),
M("Reports", c="hrm", f="staff", m="report",
check=manager_mode)(
M("Staff Report", m="report"),
M("Expiring Staff Contracts Report",
vars=dict(expiring="1")),
M("Training Report", f="training", m="report"),
),
M("Training Course Catalog", c="hrm", f="course",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", c="hrm", f="certificate",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Organization Types", c="org", f="organisation_type",
restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
M("Office Types", c="org", f="office_type",
restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
#M("Facility Types", c="org", f="facility_type",
# restrict=[ADMIN],
# check=manager_mode)(
# M("New", m="create"),
# M("List All"),
#),
M(SECTORS, f="sector", c="org", restrict=[ADMIN],
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
#M("My Profile", c="hrm", f="person",
# check=personal_mode, vars=dict(mode="personal")),
# This provides the link to switch to the manager mode:
M("Human Resources", c="hrm", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
#M("Personal Profile", c="hrm", f="person",
# check=manager_mode, vars=dict(mode="personal"))
#M("Staff & Volunteers", c="hrm", f="human_resource", m="summary"),
)
# -------------------------------------------------------------------------
def vol(self):
""" Volunteer Management """
s3 = current.session.s3
ADMIN = s3.system_roles.ADMIN
# Custom conditions for the check-hook, as lambdas in order
# to have them checked only immediately before rendering:
manager_mode = lambda i: s3.hrm.mode is None
personal_mode = lambda i: s3.hrm.mode is not None
is_org_admin = lambda i: s3.hrm.orgs and True or \
ADMIN in s3.roles
settings = current.deployment_settings
show_programmes = lambda i: settings.get_hrm_vol_experience() == "programme"
show_tasks = lambda i: settings.has_module("project") and \
settings.get_project_mode_task()
teams = settings.get_hrm_teams()
use_teams = lambda i: teams
check_org_dependent_field = lambda tablename, fieldname: \
settings.set_org_dependent_field(tablename, fieldname,
enable_field = False)
return M(c="vol")(
M("Volunteers", f="volunteer",
check=[manager_mode])(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", f="person", m="import",
vars={"group":"volunteer"}, p="create"),
),
M(teams, f="group",
check=[manager_mode, use_teams])(
M("New", m="create"),
M("List All"),
M("Search Members", f="group_membership", m="search"),
M("Import", f="group_membership", m="import"),
),
M("Department Catalog", f="department",
check=manager_mode)(
M("New", m="create"),
M("List All"),
),
M("Volunteer Role Catalog", f="job_title",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Import", m="import", p="create", check=is_org_admin),
),
#M("Skill Catalog", f="skill",
# check=manager_mode)(
# M("New", m="create"),
# M("List All"),
# #M("Skill Provisions", f="skill_provision"),
#),
M("Training Events", f="training_event",
check=manager_mode)(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Search Training Participants", f="training",
m="search"),
M("Import Participant List", f="training", m="import"),
),
M("Training Course Catalog", f="course",
check=manager_mode)(
M("New", m="create"),
M("List All"),
#M("Course Certificates", f="course_certificate"),
),
M("Certificate Catalog", f="certificate",
check=manager_mode)(
M("New", m="create"),
M("List All"),
#M("Skill Equivalence", f="certificate_skill"),
),
M("Programmes", f="programme",
check=[manager_mode, show_programmes])(
M("New", m="create"),
M("List All"),
M("Import Hours", f="programme_hours", m="import"),
),
M("Awards", f="award",
check=[manager_mode, is_org_admin])(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster Type", f="cluster_type",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_type_id"))(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster", f="cluster",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_id"))(
M("New", m="create"),
M("List All"),
),
M("Volunteer Cluster Position", f="cluster_position",
check = check_org_dependent_field("vol_volunteer_cluster",
"vol_cluster_position_id"))(
M("New", m="create"),
M("List All"),
),
M("Reports", f="volunteer", m="report",
check=manager_mode)(
M("Volunteer Report", m="report"),
M("Hours by Role Report", f="programme_hours", m="report2",
vars=Storage(rows="job_title_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Hours by Programme Report", f="programme_hours", m="report2",
vars=Storage(rows="programme_id",
cols="month",
fact="sum(hours)"),
check=show_programmes),
M("Training Report", f="training", m="report"),
),
#M("My Profile", f="person",
# check=personal_mode, vars=dict(mode="personal")),
M("My Tasks", f="task",
check=[personal_mode, show_tasks],
vars=dict(mode="personal",
mine=1)),
# This provides the link to switch to the manager mode:
M("Volunteer Management", f="index",
check=[personal_mode, is_org_admin]),
# This provides the link to switch to the personal mode:
#M("Personal Profile", f="person",
# check=manager_mode, vars=dict(mode="personal"))
#M("Staff & Volunteers", c="hrm", f="human_resource", m="summary"),
)
# -------------------------------------------------------------------------
def inv(self):
""" INV / Inventory """
ADMIN = current.session.s3.system_roles.ADMIN
current.s3db.inv_recv_crud_strings()
crud_strings = current.response.s3.crud_strings
inv_recv_list = crud_strings.inv_recv.title_list
inv_recv_search = crud_strings.inv_recv.title_search
use_commit = lambda i: current.deployment_settings.get_req_use_commit()
return M()(
#M("Home", f="index"),
M("Warehouses", c="inv", f="warehouse")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Warehouse Stock", c="inv", f="inv_item")(
M("Search", f="inv_item", m="search"),
M("Search Shipped Items", f="track_item", m="search"),
M("Adjust Stock Levels", f="adj"),
#M("Kitting", f="kit"),
M("Import", f="inv_item", m="import", p="create"),
),
M("Reports", c="inv", f="inv_item")(
M("Warehouse Stock", f="inv_item",m="report"),
#M("Expiration Report", c="inv", f="track_item",
# m="search", vars=dict(report="exp")),
#M("Monetization Report", c="inv", f="inv_item",
# m="search", vars=dict(report="mon")),
#M("Utilization Report", c="inv", f="track_item",
# m="search", vars=dict(report="util")),
#M("Summary of Incoming Supplies", c="inv", f="track_item",
# m="search", vars=dict(report="inc")),
# M("Summary of Releases", c="inv", f="track_item",
# m="search", vars=dict(report="rel")),
),
M(inv_recv_list, c="inv", f="recv")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
),
M("Sent Shipments", c="inv", f="send")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Search Shipped Items", f="track_item", m="search"),
),
M("Items", c="supply", f="item")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Report", m="report"),
M("Import", f="catalog_item", m="import", p="create"),
),
# Catalog Items moved to be next to the Item Categories
#M("Catalog Items", c="supply", f="catalog_item")(
#M("New", m="create"),
#M("List All"),
#M("Search", m="search"),
#),
#M("Brands", c="supply", f="brand",
# restrict=[ADMIN])(
# M("New", m="create"),
# M("List All"),
#),
M("Catalogs", c="supply", f="catalog")(
M("New", m="create"),
M("List All"),
#M("Search", m="search"),
),
M("Item Categories", c="supply", f="item_category",
restrict=[ADMIN])(
M("New", m="create"),
M("List All"),
),
M("Suppliers", c="inv", f="supplier")(
M("New", m="create"),
M("List All"),
M("Search", m="search"),
M("Import", m="import", p="create"),
),
M("Facilities", c="inv", f="facility")(
M("New", m="create", t="org_facility"),
M("List All"),
#M("Search", m="search"),
),
M("Facility Types", c="inv", f="facility_type",
restrict=[ADMIN])(
M("New", m="create"),
M("List All"),
#M("Search", m="search"),
),
M("Requests", c="req", f="req")(
M("New", m="create"),
M("List All"),
M("Requested Items", f="req_item"),
#M("Search Requested Items", f="req_item", m="search"),
),
M("Commitments", c="req", f="commit", check=use_commit)(
M("List All"),
M("Search", m="search"),
),
)
# -------------------------------------------------------------------------
def irs(self):
""" IRS Incident Reporting """
return M()(
M("Events", c="event", f="event")(
M("New", m="create"),
M("List All"),
),
M("Incident Reports", c="irs", f="ireport")(
M("New", m="create"),
M("List All"),
M("Open Incidents", vars={"open": 1}),
M("Timeline", args="timeline"),
M("Search", m="search"),
),
M("Incident Categories", c="irs", f="icategory",
check=current.auth.s3_has_role(current.session.s3.system_roles.ADMIN))(
M("New", m="create"),
M("List All"),
),
M("Reports", c="irs", f="ireport", m="report")(
M("Incidents", m="report"),
),
)
# -------------------------------------------------------------------------
def org(self):
""" Organisation Management """
# Same as HRM
return self.hrm()
# -------------------------------------------------------------------------
def req(self):
""" Organisation Management """
# Same as Inventory
return self.inv()
# -------------------------------------------------------------------------
def event(self):
""" Event Management """
# Same as IRS
return self.irs()
# END =========================================================================
|
|
# occiput
# Stefano Pedemonte
# Harvard University, Martinos Center for Biomedical Imaging
# Apr. 2014, Boston, MA
import numpy
from .ilang_models import SSD_ilang
from ilang.Graphs import ProbabilisticGraphicalModel
from ilang.Samplers import Sampler
from occiput.Visualization import MultipleVolumesNiftyCore
from occiput.Core import Image3D, Transform_Translation
DEFAULT_N_POINTS = [100,100,100]
DEFAULT_N_ITER = 30
DEFAULT_SIGMA = 3000000
class Registration_Two_Images(object):
def __init__( self, source=None, target=None, degrees_of_freedom=3, sigma=DEFAULT_SIGMA, initial_transformation=None ):
self.set_source(source)
self.set_target(target)
self.set_sigma(sigma)
self.__make_graph()
if initial_transformation == None:
self.set_transformation( numpy.zeros(degrees_of_freedom) )
else:
self.set_transformation( initial_transformation )
def __make_graph(self):
self.ilang_model = SSD_ilang()
# The following is an alternative use of ilang models: define the log_p and gradient functions here in order to have access to
# the local variables.
self.ilang_model.log_conditional_probability_transformation = self.__P
self.ilang_model.log_conditional_probability_gradient_transformation = self.__G
self.ilang_graph = ProbabilisticGraphicalModel(['source','target','transformation','sigma'])
self.ilang_graph.set_nodes_given(['sigma','source','target'],True)
self.ilang_graph.add_dependence(self.ilang_model,{'source':'source','target':'target','sigma':'sigma','transformation':'transformation'})
self.ilang_sampler = Sampler(self.ilang_graph)
def __set_space(self):
# make sure that the images are in the same space
if self.source.space != self.target.space:
raise "Source [%s] and Target [%s] must be in the same space"%(source.space,target.space)
self.space = self.source.space
def set_transformation(self,tr):
self.__transformation = tr
def __get_transformation(self):
return self.__transformation
def set_cost_function(self,cost):
pass
def __initialize_registration(self, optimization_method='QuasiNewton_L_BFGS_B', n_points=DEFAULT_N_POINTS):
self.ilang_graph.set_node_value('source',self.source.data)
self.ilang_graph.set_node_value('target',self.target.data)
self.ilang_graph.set_node_value('sigma',self.sigma)
if n_points == None:
n_points = self.target.shape
self.grid = self.target.get_world_grid(n_points)
self.resampled_target = self.target.compute_resample_on_grid(self.grid)
self.ilang_sampler.set_node_sampling_method_manual('transformation',optimization_method)
def register(self, optimization_method='GradientAscent',iterations=DEFAULT_N_ITER, n_points=DEFAULT_N_POINTS):
self.__initialize_registration(optimization_method, n_points)
self.ilang_graph.set_node_value('transformation',self.transformation)
self.ilang_sampler.sample(iterations)
self.transformation = self.ilang_graph.get_node_value('transformation')
def set_source(self,source):
self.source = source
if hasattr(self,'target'):
self.__set_space()
def set_target(self,target):
self.target = target
if hasattr(self,'source'):
self.__set_space()
def set_sigma(self,sigma):
self.sigma = sigma
def get_result(self):
result = self.source.copy()
result.transform( self.get_transformation_matrix() )
return result
def get_transformation_matrix(self):
#rot = self.__transformation[0:3]
#tra = self.__transformation[3:6]
tra = self.__transformation
return Transform_Translation(tra, self.space, self.space) #FIXME: rotation and translation
def display(self):
D = MultipleVolumesNiftyCore([self.source, self.target, self.get_result()])
#D = MultipleVolumes([self.source, self.target])
return D
def __G(self,transformation):
# print "Transformation: ", transformation
#rot = transformation[0:3]
#tra = transformation[3:6]
tra = transformation
T = Transform_Translation(tra, self.space, self.space) #FIXME: rotation and translation
re_s = self.source.compute_resample_on_grid(self.grid, affine_grid_to_world=T) #FIXME: memoize
gr_s = self.source.compute_gradient_on_grid(self.grid, affine_grid_to_world=T)
re_t = self.resampled_target
G_tra0 = (re_t.data-re_t.data*gr_s[0]).sum()
G_tra1 = (re_t.data-re_t.data*gr_s[1]).sum()
G_tra2 = (re_t.data-re_t.data*gr_s[2]).sum()
G_tra = [G_tra0, G_tra1, G_tra2]
G = numpy.asarray([G_tra[0], G_tra[1], G_tra[2]]) / self.sigma
# print "gradient: ", G
# print "log_likelihood: ", self.__P(transformation)
return G
def __P(self,transformation):
# FIXME: memoize (now image is resampled to compute log_p and the gradient)
#print transformation
#rot = transformation[0:3]
#tra = transformation[3:6]
tra = transformation
T = Transform_Translation(tra, self.space, self.space) #FIXME: rotation and translation
resampled_source = self.source.compute_resample_on_grid(self.grid, affine_grid_to_world=T)
P = -numpy.linalg.norm(resampled_source.data - self.resampled_target.data) / self.sigma #FIXME: verify
# print "log_likelihood: ", P
return P
def _repr_html_(self):
#return self.ilang_graph._repr_html_()
return self.display()._repr_html_()
def __repr__(self):
return "Registration of 2 images."
transformation = property(__get_transformation, set_transformation)
class Registration_Longitudinal():
def __init__(self, images ):
self.__images = images
self.__make_graph()
def set_transformation(self,tr):
pass
def set_cost_function(self,cost):
pass
def register(self):
pass
def __make_graph(self):
self.ilang_graph = ProbabilisticGraphicalModel()
for i in range(len(self.__images)):
self.ilang_graph.add_node('im_%d'%i)
self.ilang_graph.set_nodes_given(['im_%d'%i,],True)
for i in range(len(self.__images)-1):
self.ilang_graph.add_nodes(['sigma_%d'%i,'T_%d'%i])
self.ilang_graph.set_nodes_given(['sigma_%d'%i,],True)
model = SSD_ilang('SSD_%d'%i)
self.ilang_graph.add_dependence(model,{'sigma':'sigma_%d'%i,'transformation':'T_%d'%i,'source':'im_%d'%i,'target':'im_%d'%(i+1)})
self.ilang_sampler = Sampler(self.ilang_graph)
def _repr_html_(self):
return self.ilang_graph._repr_html_()
def __repr__(self):
return "Longitudinal registation."
class Registration_N_Images():
def __init__(self, images ):
self.__images = images
self.__make_graph()
def set_transformation(self,tr):
pass
def set_cost_function(self,cost):
pass
def register(self):
pass
def __make_graph(self):
self.ilang_graph = ProbabilisticGraphicalModel()
self.ilang_graph.add_node('template')
for i in range(len(self.__images)):
self.ilang_graph.add_node('im_%d'%i)
self.ilang_graph.set_nodes_given(['im_%d'%i,],True)
for i in range(len(self.__images)):
self.ilang_graph.add_nodes(['sigma_%d'%i,'T_%d'%i])
self.ilang_graph.set_nodes_given(['sigma_%d'%i,],True)
model = SSD_ilang('SSD_%d'%i)
self.ilang_graph.add_dependence(model,{'sigma':'sigma_%d'%i,'transformation':'T_%d'%i,'source':'im_%d'%i,'target':'template'})
self.ilang_sampler = Sampler(self.ilang_graph)
def _repr_html_(self):
return self.ilang_graph._repr_html_()
def __repr__(self):
return "Registration of N images. "
|
|
import sys
import json
import codecs
import re
import copy
import getopt
#for now contains dummy rules
class ValidationRule:
def __init__(self, type, param = None):
if type == "USER_REGEX":
type = ValidationType.USER_REGEX
if type == "NOT_NULL":
type = ValidationType.NOT_NULL
if type == "IS_URL":
type = ValidationType.IS_URL
if type == "MIN_ROWS":
type = ValidationType.MIN_ROWS
if type == "MAX_ROWS":
type = ValidationType.MAX_ROWS
if type == "RULE1":
type = ValidationType.RULE1
if type == "RULE2":
type = ValidationType.RULE2
if type == "RULE3":
type = ValidationType.RULE3
self.__method = type
self.__param = param
def execute_rule(self, field_extraction):
#print "in execute_rule"
return self.__method(field_extraction, self.__param)
#apply user defined regular expreassion on extract
def user_regex(self, field_extraction, regex):
#print "in user_regex"
extract = field_extraction['extract']
regex = re.compile(regex)
# apply pattern
if regex.match(extract):
# print "matched:", extract
return True
else:
return False
#check if extract is null or empty string
def not_null(self, field_extraction, param = None):
#print "in not_null"
extract = field_extraction['extract']
if extract is None or not extract:
return False
else:
return True
def is_url(self, field_extraction, param = None):
#print "in is_url"
extract = field_extraction['extract']
regex = re.compile(
#r'^(?:file):///' # http:// or https://
r'^(?:http|ftp)s?://' # http:// or https://
#r'^((?:http|ftp)s?://)|((?:file):///)' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
#^(?:http|ftp)s?:\/\/(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|localhost|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(?::\d+)?(?:\/?|[\/?]\S+)$
if regex.match(extract):
# print "matched:", extract
return True
else:
return False
#returns true if the list contains at least min_rows
def min_rows(self, list_extraction, min_rows):
#print "in min_rows"
#this is a list of rows
#print "list:", list_extraction
rows = list_extraction['sequence']
if len(rows) < int(min_rows):
return False
else:
return True
#returns true if the list contains at most max_rows
def max_rows(self, list_extraction, max_rows):
#print "in max_rows"
#this is a list of rows
rows = list_extraction['sequence']
print("rows:", len(rows))
if len(rows) > int(max_rows):
return False
else:
return True
def rule1(self, extract, param = None):
#print "in valid1"
return True
def rule2(self, extract, param = None):
#print "in valid2"
return True
def rule3(self, extract, param = None):
#print "in valid3"
return False
def __str__(self):
output = str('rule:' + self.__method)
return output
class ValidationType:
USER_REGEX = ValidationRule.user_regex
NOT_NULL = ValidationRule.not_null
IS_URL = ValidationRule.is_url
MIN_ROWS = ValidationRule.min_rows
MAX_ROWS = ValidationRule.max_rows
RULE1 = ValidationRule.rule1
RULE2 = ValidationRule.rule2
RULE3 = ValidationRule.rule3
#look at end of file for sample of extraction with validation metadata
#applies validation rules to extracted data and returns extraction json with validation metadata
class Validation:
def __init__(self, validation_json):
# validation_json = {"gh56fj78": [see below ...]}]
# __validation: key = fieldid, value = json array with validation rules
# [ { "type": "USER_REGEX",
# "param": "the regex" },
# { "type": "NOT_NULL" },
# etc.}
self.__validation = validation_json
def get_validations(self):
return self.__validation
#applies validation on extraction and outputs extraction with metadata
def validate_extraction(self, extraction):
extraction_copy = copy.deepcopy(extraction)
for page in extraction:
self.validate_page(extraction_copy[page])
return extraction_copy
#validate data on one oage
def validate_page(self, page_extraction):
for fieldid in self.__validation:
self.validate_page_for_field(page_extraction, fieldid, self.__validation[fieldid])
return page_extraction
#find field that needs to be validated and validate that field
def validate_page_for_field(self, page_extraction, field_id, validation_rules):
#print "page extraction ...", page_extraction
if isinstance(page_extraction, list):
# this is a sequence item so we look at the 'sub_rules' if they exist
for row in page_extraction:
#print 'row ...', row
if 'sub_rules' in row:
#print "in subrules..."
self.validate_page_for_field(row['sub_rules'], field_id, validation_rules)
elif isinstance(page_extraction, dict):
#print 'dict ...', page_extraction
# this is a standard rule so we look at this one itself
for name in page_extraction:
#print 'name ...', name
if 'rule_id' in page_extraction[name] and page_extraction[name]['rule_id'] == field_id:
#found field that needs validation
self.validate_field(page_extraction[name], validation_rules)
elif 'sub_rules' in page_extraction[name]:
#print "in subrules ..."
self.validate_page_for_field(page_extraction[name]['sub_rules'], field_id, validation_rules)
elif 'sequence' in page_extraction[name]:
#print "in sequence..."
self.validate_page_for_field(page_extraction[name]['sequence'], field_id, validation_rules)
#validate one field
def validate_field(self, field_extraction, validation_rules):
#print 'validate field ...', field_extraction
for validation_rule in validation_rules:
is_valid = self.validate_value(field_extraction, validation_rule)
#construct validation json to be returned
validation_rule['valid'] = is_valid
self.add_validation(field_extraction, validation_rule)
# validate one field
def validate_field_extraction(self, field_extraction):
# print 'validate field ...', field_extraction
if field_extraction['fieldid'] in self.__validation:
for validation_rule in self.__validation[field_extraction['fieldid']]:
is_valid = self.validate_value(field_extraction, validation_rule)
# construct validation json to be returned
validation_rule['valid'] = is_valid
self.add_validation(field_extraction, validation_rule)
else:
#no validation for this field
field_extraction['valid'] = True
field_extraction['validation'] = None
#validate given extraction; could be a simple field or a list
# we handle it differently in the rule execution
#validation_rule = {type = ValidationType.RULE1, param = param if needed}
def validate_value(self, field_extraction, validation_rule):
validation_param = None
if 'param' in validation_rule:
validation_param = validation_rule['param']
one_validation_rule = ValidationRule(validation_rule['type'], validation_param)
return one_validation_rule.execute_rule(field_extraction)
#add validation to field
def add_validation(self, field_extraction, field_validation):
if 'validation' in field_extraction:
# already contains some validation; append to it and reset "valid" for this field
field_extraction['validation'].append(field_validation)
if field_extraction['valid'] is True and field_validation['valid'] is False:
field_extraction['valid'] = False
else:
validation = []
validation.append(field_validation)
field_extraction['validation'] = validation
field_extraction['valid'] = field_validation['valid']
#schema_json is an array with one element (root)
def get_schema_with_validation(self, schema_json):
#schema_json[0]['list'] contains list of fields
#a field can be a simple field or another list
self.add_validation_to_schema(schema_json[0]['list'])
#print json.dumps(schema_json, sort_keys=True, indent=2, separators=(',', ': '))
return schema_json
def add_validation_to_schema(self, schema_field_list):
for field in schema_field_list:
if field['schemaid'] in self.__validation:
# add it
field['validation'] = self.__validation[field['schemaid']]
if 'list' in field:
#this is a list field
self.add_validation_to_schema(field['list'])
#validation is either
# {"valid":true} OR
# {"valid":false}
@staticmethod
def get_validation_for_page_1(page_extraction, validation):
# if I found something false everything is false
if validation['valid'] is False:
return
if isinstance(page_extraction, list):
# this is a sequence item so we look at the 'sub_rules' if they exist
for row in page_extraction:
#print 'row:',row
if 'sub_rules' in row:
#print "in subrules...."
Validation.get_validation_for_page_1(row['sub_rules'], validation)
elif isinstance(page_extraction, dict):
#print 'dict ...'
# this is a standard rule so we look at this one itself
for name in page_extraction:
#print 'name ...', name
if 'valid' in page_extraction[name]:
#found field with validation
if page_extraction[name]['valid'] is False:
validation['valid'] = False
#print "False..."
return
if 'sub_rules' in page_extraction[name]:
#print "in subrules...."
Validation.get_validation_for_page_1(page_extraction[name]['sub_rules'], validation)
elif 'sequence' in page_extraction[name]:
#print "in sequence ..."
Validation.get_validation_for_page_1(page_extraction[name]['sequence'], validation)
#returns either true or false; if it finds one false validation it returns false
# {"valid":true} OR
# {"valid":false}
@staticmethod
def get_validation_for_extraction(extraction):
validation = {}
validation['valid'] = True
for page in extraction:
Validation.get_validation_for_page_1(extraction[page], validation)
if validation['valid'] is False:
return validation
return validation
#returns either true or false; if it finds one false validation it returns false
# {"valid":true} OR
# {"valid":false}
@staticmethod
def get_validation_for_page(page_extraction):
validation = {}
validation['valid'] = True
Validation.get_validation_for_page_1(page_extraction, validation)
return validation
def __str__(self):
output = str(self.__validation)
return output
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def main(argv=None):
# with codecs.open("validation_1.json", "r", "utf-8") as myfile:
# file_str = myfile.read().encode('utf-8')
# validation_json = json.loads(file_str)
# v = Validation(validation_json)
# print "validation:", v
#
# with codecs.open("page_extraction_2.json", "r", "utf-8") as myfile:
# page_str = myfile.read().encode('utf-8')
# page_json = json.loads(page_str)
# new_page_json = v.validate_extraction(page_json)
# print "end page:", new_page_json
# print json.dumps(new_page_json, indent=2, separators=(',', ': '))
#
# result = Validation.get_validation_for_extraction(new_page_json)
# print json.dumps(result, indent=2, separators=(',', ': '))
# sys.exit()
if argv is None:
argv = sys.argv
try:
try:
opts, args = getopt.getopt(argv[1:], "sh", ["simple", "help"])
simple = False
for opt in opts:
if opt in [('-s', ''), ('--simple', '')]:
simple = True
if opt in [('-h', ''), ('--help', '')]:
raise Usage(
'python validation/Validation.py [OPTIONAL_PARAMS] [FILE_WITH_VALIDATION] [FILE_WITH_EXTRACT]\n\t[OPTIONAL_PARAMS]: -s to return only true/false')
except getopt.error as msg:
raise Usage(msg)
if len(args) != 2:
raise Usage('python validation/Validation.py [OPTIONAL_PARAMS] [FILE_WITH_VALIDATION] [FILE_WITH_EXTRACT]\n\t[OPTIONAL_PARAMS]: -s to return only true/false')
validation_file = args[0]
extraction_file = args[1]
with codecs.open(validation_file, "r", "utf-8") as myfile:
validation_str = myfile.read().encode('utf-8')
validation_json = json.loads(validation_str)
v = Validation(validation_json)
with codecs.open(extraction_file, "r", "utf-8") as myfile:
extraction_str = myfile.read().encode('utf-8')
page_json = json.loads(extraction_str)
new_page_json = v.validate_page(page_json)
if simple:
result = Validation.get_validation_for_page(new_page_json)
print(json.dumps(result, indent=2, separators=(',', ': ')))
else:
print(json.dumps(new_page_json, indent=2, separators=(',', ': ')))
except Usage as err:
print(err.msg, end='', file=sys.stderr)
print("for help use --help", end='', file=sys.stderr)
return 2
if __name__ == "__main__":
sys.exit(main())
#Sample of extraction with validation metadata
#
# "extraction": {
# "page1": {
# "-whataboutthistable0005": {
# "begin_index": 87,
# "end_index": 96,
# "extract": "Firstname",
# "rule_id": "aa75710a-334d-458e-bcb5-f4298c8ea99b"
# },
# "0007": {
# "begin_index": 106,
# "end_index": 114,
# "extract": "Lastname",
# "rule_id": "607d9120-c56d-4c34-966c-3d470037f2ad",
# "valid": false,
# "validation": [
# {
# "param": "param1",
# "type": "RULE1",
# "valid": true
# },
# {
# "type": "RULE3",
# "valid": false
# }
# ]
# },
# "0012": {
# "begin_index": 201,
# "end_index": 239,
# "extract": "Bubba</td> <td>Yourd</td> <td>66</td>",
# "rule_id": "db221681-c05c-41e5-a22c-f4070e9314ff"
# },
# "Age0011": {
# "begin_index": 148,
# "end_index": 186,
# "extract": "Bill</td> <td>Wilson</td> <td>33</td>",
# "rule_id": "07fdbe8d-217b-47ae-b8ba-c2e8d5b43980"
# },
# "_list0001": {
# "begin_index": 50,
# "end_index": 313,
# "extract": " <table style=\"width:100%\"> <tr> <th>Firstname</th> <th>Lastname</th> <th>Age</th> </tr> <tr> <td>Bill</td> <td>Wilson</td> <td>33</td> </tr> <tr> <td>Bubba</td> <td>Yourd</td> <td>66</td> </tr> </table> ENDOFPAGE",
# "rule_id": "c06c2aa4-6150-4968-9968-2cb37cb6de13",
# "sequence": [
# {
# "begin_index": 87,
# "end_index": 131,
# "extract": "Firstname</th> <th>Lastname</th> <th>Age</th",
# "sequence_number": 1
# },
# {
# "begin_index": 148,
# "end_index": 184,
# "extract": "Bill</td> <td>Wilson</td> <td>33</td",
# "sequence_number": 2
# },
# {
# "begin_index": 201,
# "end_index": 237,
# "extract": "Bubba</td> <td>Yourd</td> <td>66</td",
# "sequence_number": 3
# }
# ],
# "valid": false,
# "validation": [
# {
# "param": "param3",
# "type": "RULE3",
# "valid": false
# }
# ]
# },
# "us_state0001": {
# "begin_index": 16,
# "end_index": 20,
# "extract": "me",
# "rule_id": "180c5bf4-50ee-42f0-b0f1-6339d290f207"
# }
# }
# }
# }}
|
|
"""Module containing the Train class and support functionality."""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from datetime import datetime
import os
import sys
import logging
import warnings
from pylearn2.utils import serial
from pylearn2.utils.string_utils import preprocess
from pylearn2.monitor import Monitor
from pylearn2.space import NullSpace
from pylearn2.utils.timing import log_timing, total_seconds
from pylearn2.utils import sharedX
log = logging.getLogger(__name__)
class Train(object):
"""
A class representing the main loop of the training script. Trains the
specified model using the specified algorithm on the specified dataset.
After each call to the training algorithm, the model is saved to
`save_path`. May be enhanced with `TrainExtension` plugins.
Parameters
----------
dataset : `pylearn2.datasets.dataset.Dataset`
model : `pylearn2.models.model.Model`
algorithm : \
`pylearn2.training_algorithms.training_algorithm.TrainingAlgorithm`, \
optional
save_path : str, optional
Path to save (with pickle / joblib) the model.
save_freq : int, optional
Frequency of saves, in epochs. A frequency of zero disables
automatic saving altogether. A frequency of 1 saves every
epoch. A frequency of 2 saves every other epoch, etc.
(default=0, i.e. never save). Note: when automatic saving is
enabled (eg save_freq > 0), the model is always saved after
learning, even when the final epoch is not a multiple of
`save_freq`.
extensions : iterable, optional
A collection of `TrainExtension` objects whose callbacks are
triggered at various points in learning.
allow_overwrite : bool, optional
If `True`, will save the model to save_path even if there is
already something there. Otherwise, will raise an error if the
`save_path` is already occupied.
"""
def __init__(self, dataset, model, algorithm=None, save_path=None,
save_freq=0, extensions=None, allow_overwrite=True):
self.allow_overwrite = allow_overwrite
self.first_save = True
self.dataset = dataset
self.model = model
self.algorithm = algorithm
if save_path is not None:
if save_freq == 0:
warnings.warn('save_path specified but save_freq is 0 '
'(never save). Is this intentional?')
self.save_path = preprocess(save_path)
else:
if save_freq > 0:
phase_variable = 'PYLEARN2_TRAIN_PHASE'
if phase_variable in os.environ:
phase = 'phase%d' % os.environ[phase_variable]
tokens = [os.environ['PYLEARN2_TRAIN_FILE_FULL_STEM'],
phase, 'pkl']
else:
tokens = os.environ['PYLEARN2_TRAIN_FILE_FULL_STEM'], 'pkl'
self.save_path = '.'.join(tokens)
self.save_freq = save_freq
if hasattr(self.dataset, 'yaml_src'):
self.model.dataset_yaml_src = self.dataset.yaml_src
else:
warnings.warn("dataset has no yaml src, model won't know what " +
"data it was trained on")
self.extensions = extensions if extensions is not None else []
self.training_seconds = sharedX(value=0,
name='training_seconds_this_epoch')
self.total_seconds = sharedX(value=0, name='total_seconds_last_epoch')
def setup_extensions(self):
""" Calls setup on all extensions."""
for ext in self.extensions:
ext.setup(self.model, self.dataset, self.algorithm)
def exceeded_time_budget(self, t0, time_budget):
"""
.. todo::
WRITEME
"""
dt = total_seconds(datetime.now() - t0)
if time_budget is not None and dt >= time_budget:
log.warning("Time budget exceeded (%.3f/%d seconds).",
dt, time_budget)
self.model.monitor.time_budget_exceeded = True
return True
else:
return False
def setup(self):
"""
Sets up the main loop. This is also called at the start of the
main loop, so you need only call it if you're using a driver
script that replaces the main loop with something else.
"""
self.model.monitor = Monitor.get_monitor(self.model)
self.model.monitor.time_budget_exceeded = False
if self.algorithm is not None:
self.algorithm.setup(model=self.model, dataset=self.dataset)
self.setup_extensions()
# Model.modify_updates is used by the training algorithm to
# enforce constraints after each step of learning. Here we
# make sure the constraints are enforced from the start.
self.model.enforce_constraints()
def main_loop(self, time_budget=None):
"""
Repeatedly runs an epoch of the training algorithm, runs any
epoch-level callbacks, and saves the model.
Parameters
----------
time_budget : int, optional
The maximum number of seconds before interrupting
training. Default is `None`, no time limit.
"""
t0 = datetime.now()
self.setup()
if self.algorithm is None:
extension_continue = self.run_callbacks_and_monitoring()
# First check if the model is already beyond the stop criteria of
# training, if so, just return directly.
continue_learning = (self.model.continue_learning() and
extension_continue)
assert continue_learning in [True, False, 0, 1]
while continue_learning:
if self.exceeded_time_budget(t0, time_budget):
break
rval = self.model.train_all(dataset=self.dataset)
if rval is not None:
raise ValueError(
"Model.train_all should not return anything. Use "
"Model.continue_learning to control whether "
"learning continues.")
self.model.monitor.report_epoch()
extension_continue = self.run_callbacks_and_monitoring()
freq = self.save_freq
if freq > 0 and \
self.model.monitor.get_epochs_seen() % freq == 0:
self.save()
continue_learning = (self.model.continue_learning() and
extension_continue)
assert continue_learning in [True, False, 0, 1]
else:
if not hasattr(self.model, 'monitor'):
# TODO: is this really necessary? I just put this error here
# to prevent an AttributeError later, but I think we could
# rewrite to avoid the AttributeError
raise RuntimeError("The algorithm is responsible for setting"
" up the Monitor, but failed to.")
if len(self.model.monitor._datasets) > 0:
# This monitoring channel keeps track of a shared variable,
# which does not need inputs nor data.
self.training_seconds.__doc__ = """\
The number of seconds that were spent in actual training during the most
recent epoch. This excludes seconds that were spent running callbacks for
the extensions, computing monitoring channels, etc."""
self.model.monitor.add_channel(
name="training_seconds_this_epoch",
ipt=None,
val=self.training_seconds,
data_specs=(NullSpace(), ''),
dataset=self.model.monitor._datasets[0])
self.total_seconds.__doc__ = """\
The number of seconds that were spent on the entirety of processing for the
previous epoch. This includes not only training but also the computation of
the monitoring channels, running TrainExtension callbacks, etc. This value
is reported for the *previous* epoch because the amount of time spent on
monitoring for this epoch is not known until the monitoring channels have
already been reported."""
self.model.monitor.add_channel(
name="total_seconds_last_epoch",
ipt=None,
val=self.total_seconds,
data_specs=(NullSpace(), ''),
dataset=self.model.monitor._datasets[0])
extension_continue = self.run_callbacks_and_monitoring()
# First check if the model is already beyond the stop criteria of
# training, if so, just return directly.
continue_learning = (
self.algorithm.continue_learning(self.model) and
extension_continue
)
assert continue_learning in [True, False, 0, 1]
while continue_learning:
if self.exceeded_time_budget(t0, time_budget):
break
with log_timing(log, None, level=logging.DEBUG,
callbacks=[self.total_seconds.set_value]):
with log_timing(
log, None, final_msg='Time this epoch:',
callbacks=[self.training_seconds.set_value]
):
rval = self.algorithm.train(dataset=self.dataset)
if rval is not None:
raise ValueError(
"TrainingAlgorithm.train should not return "
"anything. Use "
"TrainingAlgorithm.continue_learning to "
"control whether learning continues."
)
self.model.monitor.report_epoch()
extension_continue = self.run_callbacks_and_monitoring()
if self.save_freq > 0 and \
self.model.monitor.get_epochs_seen() % \
self.save_freq == 0:
self.save()
continue_learning = (
self.algorithm.continue_learning(self.model) and
extension_continue
)
assert continue_learning in [True, False, 0, 1]
self.model.monitor.training_succeeded = True
if self.save_freq > 0:
self.save()
def run_callbacks_and_monitoring(self):
"""
Runs the monitor, then calls Extension.on_monitor for all extensions.
Returns
-------
continue_learning : bool
If `False`, signals that at least one train
extension wants to stop learning.
"""
self.model.monitor()
continue_learning = True
for extension in self.extensions:
try:
extension.on_monitor(self.model, self.dataset, self.algorithm)
except TypeError:
logging.warning('Failure during callback ' + str(extension))
raise
# We catch an exception here instead of relying on return
# values for backward compatibility. Lots of extensions
# exist that don't return anything, currently.
except StopIteration:
log.info("Extension requested training halt.")
continue_learning = False
return continue_learning
def save(self):
"""Saves the model."""
#TODO-- save state of training algorithm so training can be
# resumed after a crash
for extension in self.extensions:
extension.on_save(self.model, self.dataset, self.algorithm)
if self.save_path is not None:
with log_timing(log, 'Saving to ' + self.save_path):
if self.first_save and (not self.allow_overwrite) \
and os.path.exists(self.save_path):
# Every job overwrites its own output on the second save
# and every save thereafter. The "allow_overwrite" flag
# only pertains to overwriting the output of previous jobs.
raise IOError("Trying to overwrite file when not allowed.")
try:
# Make sure that saving does not serialize the dataset
self.dataset._serialization_guard = SerializationGuard()
serial.save(self.save_path, self.model,
on_overwrite='backup')
finally:
self.dataset._serialization_guard = None
self.first_save = False
class SerializationGuard(object):
"""
This class exists to make objects that cannot be serialized. It is used to
make sure you don't accidentally put pointers to objects that should not
be serialized, such as the dataset, into objects that Train automatically
serializes, such as the Model.
"""
def __getstate__(self):
"""
This method is called when someone attempts to serialize the object.
This method raises an exception to prevent the serialization from
occurring.
"""
raise IOError("You tried to serialize something that should not"
" be serialized.")
if __name__ == "__main__":
logging.basicConfig(level=logging.ERROR)
log.error("You probably meant to run scripts/train.py")
sys.exit(1)
|
|
import tensorflow as tf
import numpy as np
import cv2
from configs.kitti_config import config
# ################## From SqueezeDet #########################
def safe_exp(w, thresh):
"""Safe exponential function for tensors."""
slope = np.exp(thresh)
with tf.name_scope('safe_exponential'):
lin_region = tf.to_float(w > thresh)
lin_out = slope * (w - thresh + 1.)
exp_out = tf.exp(w)
out = lin_region * lin_out + (1. - lin_region) * exp_out
return out
def bbox_transform_inv(bbox):
"""convert a bbox of form [xmin, ymin, xmax, ymax] to [cx, cy, w, h]. Works
for numpy array or list of tensors.
"""
with tf.name_scope('bbox_transform_inv') as scope:
xmin, ymin, xmax, ymax = bbox
out_box = [[]] * 4
width = xmax - xmin + 1.0
height = ymax - ymin + 1.0
out_box[0] = xmin + 0.5 * width
out_box[1] = ymin + 0.5 * height
out_box[2] = width
out_box[3] = height
return out_box
def bbox_transform(bbox):
"""convert a bbox of form [cx, cy, w, h] to [xmin, ymin, xmax, ymax]. Works
for numpy array or list of tensors.
"""
with tf.name_scope('bbox_transform') as scope:
cx, cy, w, h = bbox
out_box = [[]] * 4
out_box[0] = cx - w / 2
out_box[1] = cy - h / 2
out_box[2] = cx + w / 2
out_box[3] = cy + h / 2
return out_box
def nms(boxes, probs, threshold):
"""Non-Maximum supression.
Args:
boxes: array of [cx, cy, w, h] (center format)
probs: array of probabilities
threshold: two boxes are considered overlapping if their IOU is largher than
this threshold
form: 'center' or 'diagonal'
Returns:
keep: array of True or False.
"""
order = probs.argsort()[::-1]
keep = [True]*len(order)
for i in range(len(order)-1):
ovps = batch_iou(boxes[order[i+1:]], boxes[order[i]])
for j, ov in enumerate(ovps):
if ov > threshold:
keep[order[j+i+1]] = False
return keep
def interpre_prediction(prediction, input_mask, anchors, box_input):
# probability
batch_size = tf.shape(input_mask)[0]
num_class_probs = config.NUM_ANCHORS * config.NUM_CLASSES
pred_class_probs = tf.reshape(
tf.nn.softmax(
tf.reshape(
prediction[:, :, :, :num_class_probs],
[-1, config.NUM_CLASSES]
)
),
[batch_size, config.ANCHORS, config.NUM_CLASSES],
name='pred_class_probs'
)
# confidence
num_confidence_scores = config.NUM_ANCHORS + num_class_probs
pred_conf = tf.sigmoid(
tf.reshape(
prediction[:, :, :, num_class_probs:num_confidence_scores],
[batch_size, config.ANCHORS]
),
name='pred_confidence_score'
)
# bbox_delta
pred_box_delta = tf.reshape(
prediction[:, :, :, num_confidence_scores:],
[batch_size, config.ANCHORS, 4],
name='bbox_delta'
)
# number of object. Used to normalize bbox and classification loss
num_objects = tf.reduce_sum(input_mask, name='num_objects')
with tf.name_scope('bbox') as scope:
with tf.name_scope('stretching'):
delta_x, delta_y, delta_w, delta_h = tf.unstack(
pred_box_delta, axis=2)
anchor_x = anchors[:, 0]
anchor_y = anchors[:, 1]
anchor_w = anchors[:, 2]
anchor_h = anchors[:, 3]
box_center_x = tf.identity(
anchor_x + delta_x * anchor_w, name='bbox_cx')
box_center_y = tf.identity(
anchor_y + delta_y * anchor_h, name='bbox_cy')
box_width = tf.identity(
anchor_w * safe_exp(delta_w, config.EXP_THRESH),
name='bbox_width')
box_height = tf.identity(
anchor_h * safe_exp(delta_h, config.EXP_THRESH),
name='bbox_height')
with tf.name_scope('trimming'):
xmins, ymins, xmaxs, ymaxs = bbox_transform(
[box_center_x, box_center_y, box_width, box_height])
# The max x position is mc.IMAGE_WIDTH - 1 since we use zero-based
# pixels. Same for y.
xmins = tf.minimum(
tf.maximum(0.0, xmins), config.IMG_WIDTH - 1.0, name='bbox_xmin')
ymins = tf.minimum(
tf.maximum(0.0, ymins), config.IMG_HEIGHT - 1.0, name='bbox_ymin')
xmaxs = tf.maximum(
tf.minimum(config.IMG_WIDTH - 1.0, xmaxs), 0.0, name='bbox_xmax')
ymaxs = tf.maximum(
tf.minimum(config.IMG_HEIGHT - 1.0, ymaxs), 0.0, name='bbox_ymax')
det_boxes = tf.transpose(
tf.stack(bbox_transform_inv([xmins, ymins, xmaxs, ymaxs])),
(1, 2, 0), name='bbox'
)
with tf.name_scope('IOU'):
def _tensor_iou(box1, box2):
with tf.name_scope('intersection'):
xmin = tf.maximum(box1[0], box2[0], name='xmin')
ymin = tf.maximum(box1[1], box2[1], name='ymin')
xmax = tf.minimum(box1[2], box2[2], name='xmax')
ymax = tf.minimum(box1[3], box2[3], name='ymax')
w = tf.maximum(0.0, xmax - xmin, name='inter_w')
h = tf.maximum(0.0, ymax - ymin, name='inter_h')
intersection = tf.multiply(w, h, name='intersection')
with tf.name_scope('union'):
w1 = tf.subtract(box1[2], box1[0], name='w1')
h1 = tf.subtract(box1[3], box1[1], name='h1')
w2 = tf.subtract(box2[2], box2[0], name='w2')
h2 = tf.subtract(box2[3], box2[1], name='h2')
union = w1 * h1 + w2 * h2 - intersection
return intersection / (union + config.EPSILON) \
* tf.reshape(input_mask, [batch_size, config.ANCHORS])
ious = _tensor_iou(
bbox_transform(tf.unstack(det_boxes, axis=2)),
bbox_transform(tf.unstack(box_input, axis=2))
)
with tf.name_scope('probability') as scope:
probs = tf.multiply(
pred_class_probs,
tf.reshape(pred_conf, [batch_size, config.ANCHORS, 1]),
name='final_class_prob'
)
det_probs = tf.reduce_max(probs, 2, name='score')
det_class = tf.argmax(probs, 2, name='class_idx')
return pred_box_delta, pred_class_probs, pred_conf, ious, det_probs, det_boxes, det_class
def losses(input_mask, labels, ious, box_delta_input, pred_class_probs, pred_conf, pred_box_delta):
batch_size = tf.shape(input_mask)[0]
num_objects = tf.reduce_sum(input_mask, name='num_objects')
with tf.name_scope('class_regression') as scope:
# cross-entropy: q * -log(p) + (1-q) * -log(1-p)
# add a small value into log to prevent blowing up
class_loss = tf.truediv(
tf.reduce_sum(
(labels * (-tf.log(pred_class_probs + config.EPSILON))
+ (1 - labels) * (-tf.log(1 - pred_class_probs + config.EPSILON)))
* input_mask * config.LOSS_COEF_CLASS),
num_objects,
name='class_loss'
)
tf.losses.add_loss(class_loss)
with tf.name_scope('confidence_score_regression') as scope:
input_mask_ = tf.reshape(input_mask, [batch_size, config.ANCHORS])
conf_loss = tf.reduce_mean(
tf.reduce_sum(
tf.square((ious - pred_conf))
* (input_mask_ * config.LOSS_COEF_CONF_POS / num_objects
+ (1 - input_mask_) * config.LOSS_COEF_CONF_NEG / (config.ANCHORS - num_objects)),
reduction_indices=[1]
),
name='confidence_loss'
)
tf.losses.add_loss(conf_loss)
with tf.name_scope('bounding_box_regression') as scope:
bbox_loss = tf.truediv(
tf.reduce_sum(
config.LOSS_COEF_BBOX * tf.square(
input_mask * (pred_box_delta - box_delta_input))),
num_objects,
name='bbox_loss'
)
tf.losses.add_loss(bbox_loss)
# # add above losses as well as weight decay losses to form the total loss
# loss = tf.add_n(tf.get_collection('losses'), name='total_loss')
# return loss
# ################# MobileNet Det ########################
def xywh_to_yxyx(bbox):
shape = bbox.get_shape().as_list()
_axis = 1 if len(shape) > 1 else 0
[x, y, w, h] = tf.unstack(bbox, axis=_axis)
y_min = y - 0.5 * h
x_min = x - 0.5 * w
y_max = y + 0.5 * h
x_max = x + 0.5 * w
return tf.stack([y_min, x_min, y_max, x_max], axis=_axis)
def yxyx_to_xywh(bbox):
y_min = bbox[:, 0]
x_min = bbox[:, 1]
y_max = bbox[:, 2]
x_max = bbox[:, 3]
x = (x_min + x_max) * 0.5
y = (y_min + y_max) * 0.5
w = x_max - x_min
h = y_max - y_min
return tf.stack([x, y, w, h], axis=1)
def rearrange_coords(bbox):
y_min = bbox[:, 0]
x_min = bbox[:, 1]
y_max = bbox[:, 2]
x_max = bbox[:, 3]
return tf.stack([x_min, y_min, x_max, y_max])
def scale_bboxes(bbox, img_shape):
"""Scale bboxes to [0, 1). bbox format [ymin, xmin, ymax, xmax]
Args:
bbox: 2-D with shape '[num_bbox, 4]'
img_shape: 1-D with shape '[4]'
Return:
sclaed_bboxes: scaled bboxes
"""
img_h = tf.cast(img_shape[0], dtype=tf.float32)
img_w = tf.cast(img_shape[1], dtype=tf.float32)
shape = bbox.get_shape().as_list()
_axis = 1 if len(shape) > 1 else 0
[y_min, x_min, y_max, x_max] = tf.unstack(bbox, axis=_axis)
y_1 = tf.truediv(y_min, img_h)
x_1 = tf.truediv(x_min, img_w)
y_2 = tf.truediv(y_max, img_h)
x_2 = tf.truediv(x_max, img_w)
return tf.stack([y_1, x_1, y_2, x_2], axis=_axis)
def iou(bbox_1, bbox_2):
"""Compute iou of a box with another box. Box format '[y_min, x_min, y_max, x_max]'.
Args:
bbox_1: 1-D with shape `[4]`.
bbox_2: 1-D with shape `[4]`.
Returns:
IOU
"""
lr = tf.minimum(bbox_1[3], bbox_2[3]) - tf.maximum(bbox_1[1], bbox_2[1])
tb = tf.minimum(bbox_1[2], bbox_2[2]) - tf.maximum(bbox_1[0], bbox_2[0])
lr = tf.maximum(lr, lr * 0)
tb = tf.maximum(tb, tb * 0)
intersection = tf.multiply(tb, lr)
union = tf.subtract(
tf.multiply((bbox_1[3] - bbox_1[1]), (bbox_1[2] - bbox_1[0])) +
tf.multiply((bbox_2[3] - bbox_2[1]), (bbox_2[2] - bbox_2[0])),
intersection
)
iou = tf.div(intersection, union)
return iou
def batch_iou(bboxes, bbox):
"""Compute iou of a batch of boxes with another box. Box format '[y_min, x_min, y_max, x_max]'.
Args:
bboxes: A batch of boxes. 2-D with shape `[B, 4]`.
bbox: A single box. 1-D with shape `[4]`.
Returns:
Batch of IOUs
"""
lr = tf.maximum(
tf.minimum(bboxes[:, 3], bbox[3]) -
tf.maximum(bboxes[:, 1], bbox[1]),
0
)
tb = tf.maximum(
tf.minimum(bboxes[:, 2], bbox[2]) -
tf.maximum(bboxes[:, 0], bbox[0]),
0
)
intersection = tf.multiply(tb, lr)
union = tf.subtract(
tf.multiply((bboxes[:, 3] - bboxes[:, 1]), (bboxes[:, 2] - bboxes[:, 0])) +
tf.multiply((bbox[3] - bbox[1]), (bbox[2] - bbox[0])),
intersection
)
iou = tf.div(intersection, union)
return iou
def batch_iou_fast(anchors, bboxes):
""" Compute iou of two batch of boxes. Box format '[y_min, x_min, y_max, x_max]'.
Args:
anchors: know shape
bboxes: dynamic shape
Return:
ious: 2-D with shape '[num_bboxes, num_anchors]'
"""
num_anchors = anchors.get_shape().as_list()[0]
num_bboxes = tf.shape(bboxes)[0]
box_indices = tf.reshape(tf.range(num_bboxes), shape=[-1, 1])
box_indices = tf.reshape(tf.stack([box_indices] * num_anchors, axis=1), shape=[-1, 1]) # use tf.tile instead
# box_indices = tf.tile(box_indices, [num_anchors, 1])
# box_indices = tf.Print(box_indices, [box_indices], "box_indices", summarize=100)
bboxes_m = tf.gather_nd(bboxes, box_indices)
# bboxes_m = tf.Print(bboxes_m, [bboxes_m], "bboxes_m", summarize=100)
anchors_m = tf.tile(anchors, [num_bboxes, 1])
# anchors_m = tf.Print(anchors_m, [anchors_m], "anchors_m", summarize=100)
lr = tf.maximum(
tf.minimum(bboxes_m[:, 3], anchors_m[:, 3]) -
tf.maximum(bboxes_m[:, 1], anchors_m[:, 1]),
0
)
tb = tf.maximum(
tf.minimum(bboxes_m[:, 2], anchors_m[:, 2]) -
tf.maximum(bboxes_m[:, 0], anchors_m[:, 0]),
0
)
intersection = tf.multiply(tb, lr)
union = tf.subtract(
tf.multiply((bboxes_m[:, 3] - bboxes_m[:, 1]), (bboxes_m[:, 2] - bboxes_m[:, 0])) +
tf.multiply((anchors_m[:, 3] - anchors_m[:, 1]), (anchors_m[:, 2] - anchors_m[:, 0])),
intersection
)
ious = tf.truediv(intersection, union)
ious = tf.reshape(ious, shape=[num_bboxes, num_anchors])
return ious
def compute_delta(gt_box, anchor):
"""Compute delta, anchor+delta = gt_box. Box format '[cx, cy, w, h]'.
Args:
gt_box: A batch of boxes. 2-D with shape `[B, 4]`.
anchor: A single box. 1-D with shape `[4]`.
Returns:
delta: 1-D tensor with shape '[4]', [dx, dy, dw, dh]
"""
delta_x = (gt_box[0] - anchor[0]) / gt_box[2]
delta_y = (gt_box[1] - anchor[1]) / gt_box[3]
delta_w = tf.log(gt_box[2] / anchor[2])
delta_h = tf.log(gt_box[3] / anchor[3])
return tf.stack([delta_x, delta_y, delta_w, delta_h], axis=0)
def batch_delta(bboxes, anchors):
"""
Args:
bboxes: [num_bboxes, 4]
anchors: [num_bboxes, 4]
Return:
deltas: [num_bboxes, 4]
"""
bbox_x, bbox_y, bbox_w, bbox_h = tf.unstack(bboxes, axis=1)
anchor_x, anchor_y, anchor_w, anchor_h = tf.unstack(anchors, axis=1)
delta_x = (bbox_x - anchor_x) / bbox_w
delta_y = (bbox_y - anchor_y) / bbox_h
delta_w = tf.log(bbox_w / anchor_w)
delta_h = tf.log(bbox_h / anchor_h)
return tf.stack([delta_x, delta_y, delta_w, delta_h], axis=1)
def arg_closest_anchor(bboxes, anchors):
"""Find the closest anchor. Box Format [ymin, xmin, ymax, xmax]
"""
num_anchors = anchors.get_shape().as_list()[0]
num_bboxes = tf.shape(bboxes)[0]
_indices = tf.reshape(tf.range(num_bboxes), shape=[-1, 1])
_indices = tf.reshape(tf.stack([_indices] * num_anchors, axis=1), shape=[-1, 1])
bboxes_m = tf.gather_nd(bboxes, _indices)
# bboxes_m = tf.Print(bboxes_m, [bboxes_m], "bboxes_m", summarize=100)
anchors_m = tf.tile(anchors, [num_bboxes, 1])
# anchors_m = tf.Print(anchors_m, [anchors_m], "anchors_m", summarize=100)
square_dist = tf.squared_difference(bboxes_m[:, 0], anchors_m[:, 0]) + \
tf.squared_difference(bboxes_m[:, 1], anchors_m[:, 1]) + \
tf.squared_difference(bboxes_m[:, 2], anchors_m[:, 2]) + \
tf.squared_difference(bboxes_m[:, 3], anchors_m[:, 3])
square_dist = tf.reshape(square_dist, shape=[num_bboxes, num_anchors])
# square_dist = tf.Print(square_dist, [square_dist], "square_dist", summarize=100)
indices = tf.arg_min(square_dist, dimension=1)
return indices
def update_tensor(ref, indices, update):
zero = tf.cast(tf.sparse_to_dense(indices,
tf.shape(ref, out_type=tf.int64),
0,
default_value=1
),
dtype=tf.int64
)
update_value = tf.cast(tf.sparse_to_dense(indices,
tf.shape(ref, out_type=tf.int64),
update,
default_value=0
),
dtype=tf.int64
)
return ref * zero + update_value
def find_dup(a):
""" Find the duplicated elements in 1-D a tensor.
Args:
a: 1-D tensor.
Return:
more_than_one_vals: duplicated value in a.
indexes_in_a: duplicated value's index in a.
dups_in_a: duplicated value with duplicate in a.
"""
unique_a_vals, unique_idx = tf.unique(a)
count_a_unique = tf.unsorted_segment_sum(tf.ones_like(a),
unique_idx,
tf.shape(a)[0])
more_than_one = tf.greater(count_a_unique, 1)
more_than_one_idx = tf.squeeze(tf.where(more_than_one))
more_than_one_vals = tf.squeeze(tf.gather(unique_a_vals, more_than_one_idx))
not_duplicated, _ = tf.setdiff1d(a, more_than_one_vals)
dups_in_a, indexes_in_a = tf.setdiff1d(a, not_duplicated)
return more_than_one_vals, indexes_in_a, dups_in_a
def encode_annos(labels, bboxes, anchors, num_classes):
"""Encode annotations for losses computations.
All the output tensors have a fix shape(none dynamic dimention).
Args:
labels: 1-D with shape `[num_bounding_boxes]`.
bboxes: 2-D with shape `[num_bounding_boxes, 4]`. Format [ymin, xmin, ymax, xmax]
anchors: 4-D tensor with shape `[num_anchors, 4]`. Format [cx, cy, w, h]
Returns:
input_mask: 2-D with shape `[num_anchors, 1]`, indicate which anchor to be used to cal loss.
labels_input: 2-D with shape `[num_anchors, num_classes]`, one hot encode for every anchor.
box_delta_input: 2-D with shape `[num_anchors, 4]`. Format [dcx, dcy, dw, dh]
box_input: 2-D with shape '[num_anchors, 4]'. Format [ymin, xmin, ymax, xmax]
"""
with tf.name_scope("Encode_annotations") as scope:
num_anchors = config.ANCHORS
# num_bboxes = tf.shape(bboxes)[0]
# Cal iou, find the target anchor
with tf.name_scope("Matching") as subscope:
ious = batch_iou_fast(xywh_to_yxyx(anchors), bboxes)
anchor_indices = tf.reshape(tf.arg_max(ious, dimension=1), shape=[-1, 1]) # target anchor indices
# anchor_indices = tf.Print(anchor_indices, [anchor_indices], "anchor_indices", summarize=100)
# discard duplicate # unique_idx wrong
anchor_indices, idx, count = tf.unique_with_counts(tf.reshape(anchor_indices, shape=[-1]))
ori_idx = tf.cumsum(tf.pad(count, [[1, 0]]))[:-1]
anchor_indices = tf.reshape(anchor_indices, shape=[-1, 1])
bboxes = tf.gather(bboxes, tf.unique(ori_idx)[0])
labels = tf.gather(labels, tf.unique(ori_idx)[0])
ious = tf.gather(ious, tf.unique(ori_idx)[0])
num_bboxes = tf.shape(anchor_indices)[0]
# TODO(shizehao):deal with duplicate
# with tf.name_scope("Deal_with_duplicate"):
# dup_anchor_indices, indices_in_a, dup_anchor_indices_with_dup = find_dup(tf.reshape(anchor_indices, shape=[-1]))
#
# # reset duplicated corresponding anchor
# conflicted_ious = tf.gather(ious, indices_in_a)
# top_k_anchor_indices = tf.nn.top_k(conflicted_ious, k=20).indices # shape = [num_conflicted_bboxes, 20]
# dup_group_idx = tf.where(tf.equal(dup_anchor_indices_with_dup, tf.reshape(dup_anchor_indices, shape=[-1, 1])))
# seg_group = tf.unstack(dup_group_idx, axis=1)[0]
with tf.name_scope("Deal_with_noneoverlap"):
# find the none-overlap bbox
bbox_indices = tf.reshape(tf.range(num_bboxes), shape=[-1, 1])
# bbox_indices = tf.Print(bbox_indices, [bbox_indices], "bbox_indices", summarize=100)
# anchor_indices = tf.Print(anchor_indices, [anchor_indices], "anchor_indices", summarize=100)
iou_indices = tf.concat([bbox_indices, tf.cast(anchor_indices, dtype=tf.int32)], axis=1)
# iou_indices = tf.Print(iou_indices, [iou_indices], "iou_indices", summarize=100)
target_iou = tf.gather_nd(ious, iou_indices)
# target_iou = tf.Print(target_iou,[target_iou],"target_iou",summarize=100)
none_overlap_bbox_indices = tf.where(target_iou <= 0) # 1-D
# none_overlap_bbox_indices = tf.Print(none_overlap_bbox_indices, [none_overlap_bbox_indices], "none_overlap_bbox_indices", summarize=100)
# find it's corresponding anchor
target_bbox = tf.gather_nd(bboxes, none_overlap_bbox_indices)
# target_bbox = tf.Print(target_bbox, [target_bbox], "target_bbox", summarize=100)
closest_anchor_indices = arg_closest_anchor(target_bbox, xywh_to_yxyx(anchors)) # 1-D
# closest_anchor_indices = tf.Print(closest_anchor_indices, [closest_anchor_indices, tf.gather(anchors, closest_anchor_indices)], "closest_anchor_indices", summarize=100)
with tf.name_scope("Update_anchor_indices"):
anchor_indices = tf.reshape(anchor_indices, shape=[-1])
anchor_indices = update_tensor(anchor_indices, none_overlap_bbox_indices, closest_anchor_indices)
anchor_indices = tf.reshape(anchor_indices, shape=[-1, 1])
with tf.name_scope("Delta") as subscope:
target_anchors = tf.gather_nd(anchors, anchor_indices)
bboxes = yxyx_to_xywh(bboxes)
delta = batch_delta(bboxes, target_anchors)
with tf.name_scope("Scattering") as subscope:
# bbox
box_input = tf.scatter_nd(anchor_indices,
bboxes,
shape=[num_anchors, 4]
)
# label
labels_input = tf.scatter_nd(anchor_indices,
tf.one_hot(labels, num_classes),
shape=[num_anchors, num_classes]
)
# delta
box_delta_input = tf.scatter_nd(anchor_indices,
delta,
shape=[num_anchors, 4]
)
# anchor mask
# unique_indices, _ = tf.unique(tf.reshape(anchor_indices, shape=[-1]))
# unique_indices = tf.Print(unique_indices, [unique_indices], summarize=100)
# num_bboxes = tf.Print(num_bboxes, [num_bboxes])
input_mask = tf.scatter_nd(anchor_indices,
tf.ones([num_bboxes]),
shape=[num_anchors])
input_mask = tf.reshape(input_mask, shape=[-1, 1])
return input_mask, labels_input, box_delta_input, box_input
def filter_prediction(boxes, probs, cls_idx):
"""Filter bounding box predictions with probability threshold and
non-maximum supression.
Args:
boxes: array of [cx, cy, w, h].
probs: array of probabilities
cls_idx: array of class indices
Returns:
final_boxes: array of filtered bounding boxes.
final_probs: array of filtered probabilities
final_cls_idx: array of filtered class indices
"""
pass
# if config.TOP_N_DETECTION < len(probs) and config.TOP_N_DETECTION > 0:
# order = probs.argsort()[:-config.TOP_N_DETECTION - 1:-1]
# probs = probs[order]
# boxes = boxes[order]
# cls_idx = cls_idx[order]
# else:
# filtered_idx = np.nonzero(probs > config.PROB_THRESH)[0]
# probs = probs[filtered_idx]
# boxes = boxes[filtered_idx]
# cls_idx = cls_idx[filtered_idx]
#
# final_boxes = []
# final_probs = []
# final_cls_idx = []
#
# for c in range(config.CLASSES):
# idx_per_class = [i for i in range(len(probs)) if cls_idx[i] == c]
# keep = nms(boxes[idx_per_class], probs[idx_per_class], config.NMS_THRESH)
# for i in range(len(keep)):
# if keep[i]:
# final_boxes.append(boxes[idx_per_class[i]])
# final_probs.append(probs[idx_per_class[i]])
# final_cls_idx.append(c)
# return final_boxes, final_probs, final_cls_idx
def viz_prediction_result(images, batch_det_bbox, batch_det_class, batch_det_prob):
pass
# for i in range(len(images)):
# # draw prediction
# det_bbox, det_prob, det_class = filter_prediction(
# batch_det_bbox[i], batch_det_prob[i], batch_det_class[i])
#
# keep_idx = [idx for idx in range(len(det_prob)) \
# if det_prob[idx] > config.PLOT_PROB_THRESH]
# det_bbox = [det_bbox[idx] for idx in keep_idx]
# det_prob = [det_prob[idx] for idx in keep_idx]
# det_class = [det_class[idx] for idx in keep_idx]
#
# _draw_box(
# images[i], det_bbox,
# [mc.CLASS_NAMES[idx]+': (%.2f)'% prob \
# for idx, prob in zip(det_class, det_prob)],
# (0, 0, 255))
|
|
"""
Support for Bluesound devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.bluesound/
"""
import asyncio
from asyncio.futures import CancelledError
from datetime import timedelta
import logging
import aiohttp
from aiohttp.client_exceptions import ClientError
from aiohttp.hdrs import CONNECTION, KEEP_ALIVE
import async_timeout
import voluptuous as vol
from homeassistant.components.media_player import (
MediaPlayerDevice, PLATFORM_SCHEMA)
from homeassistant.components.media_player.const import (
ATTR_MEDIA_ENQUEUE, DOMAIN, MEDIA_TYPE_MUSIC,
SUPPORT_CLEAR_PLAYLIST, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
SUPPORT_SELECT_SOURCE, SUPPORT_SHUFFLE_SET, SUPPORT_STOP,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_VOLUME_STEP)
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT,
EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP, STATE_IDLE, STATE_OFF,
STATE_PAUSED, STATE_PLAYING)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util import Throttle
import homeassistant.util.dt as dt_util
REQUIREMENTS = ['xmltodict==0.11.0']
_LOGGER = logging.getLogger(__name__)
ATTR_MASTER = 'master'
DATA_BLUESOUND = 'bluesound'
DEFAULT_PORT = 11000
NODE_OFFLINE_CHECK_TIMEOUT = 180
NODE_RETRY_INITIATION = timedelta(minutes=3)
SERVICE_CLEAR_TIMER = 'bluesound_clear_sleep_timer'
SERVICE_JOIN = 'bluesound_join'
SERVICE_SET_TIMER = 'bluesound_set_sleep_timer'
SERVICE_UNJOIN = 'bluesound_unjoin'
STATE_GROUPED = 'grouped'
SYNC_STATUS_INTERVAL = timedelta(minutes=5)
UPDATE_CAPTURE_INTERVAL = timedelta(minutes=30)
UPDATE_PRESETS_INTERVAL = timedelta(minutes=30)
UPDATE_SERVICES_INTERVAL = timedelta(minutes=30)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_HOSTS): vol.All(cv.ensure_list, [{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}])
})
BS_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
BS_JOIN_SCHEMA = BS_SCHEMA.extend({
vol.Required(ATTR_MASTER): cv.entity_id,
})
SERVICE_TO_METHOD = {
SERVICE_JOIN: {
'method': 'async_join',
'schema': BS_JOIN_SCHEMA},
SERVICE_UNJOIN: {
'method': 'async_unjoin',
'schema': BS_SCHEMA},
SERVICE_SET_TIMER: {
'method': 'async_increase_timer',
'schema': BS_SCHEMA},
SERVICE_CLEAR_TIMER: {
'method': 'async_clear_timer',
'schema': BS_SCHEMA}
}
def _add_player(hass, async_add_entities, host, port=None, name=None):
"""Add Bluesound players."""
if host in [x.host for x in hass.data[DATA_BLUESOUND]]:
return
@callback
def _init_player(event=None):
"""Start polling."""
hass.async_create_task(player.async_init())
@callback
def _start_polling(event=None):
"""Start polling."""
player.start_polling()
@callback
def _stop_polling():
"""Stop polling."""
player.stop_polling()
@callback
def _add_player_cb():
"""Add player after first sync fetch."""
async_add_entities([player])
_LOGGER.info("Added device with name: %s", player.name)
if hass.is_running:
_start_polling()
else:
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, _start_polling)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_polling)
player = BluesoundPlayer(hass, host, port, name, _add_player_cb)
hass.data[DATA_BLUESOUND].append(player)
if hass.is_running:
_init_player()
else:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _init_player)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the Bluesound platforms."""
if DATA_BLUESOUND not in hass.data:
hass.data[DATA_BLUESOUND] = []
if discovery_info:
_add_player(hass, async_add_entities, discovery_info.get(CONF_HOST),
discovery_info.get(CONF_PORT, None))
return
hosts = config.get(CONF_HOSTS, None)
if hosts:
for host in hosts:
_add_player(
hass, async_add_entities, host.get(CONF_HOST),
host.get(CONF_PORT), host.get(CONF_NAME))
async def async_service_handler(service):
"""Map services to method of Bluesound devices."""
method = SERVICE_TO_METHOD.get(service.service)
if not method:
return
params = {key: value for key, value in service.data.items()
if key != ATTR_ENTITY_ID}
entity_ids = service.data.get(ATTR_ENTITY_ID)
if entity_ids:
target_players = [player for player in hass.data[DATA_BLUESOUND]
if player.entity_id in entity_ids]
else:
target_players = hass.data[DATA_BLUESOUND]
for player in target_players:
await getattr(player, method['method'])(**params)
for service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[service]['schema']
hass.services.async_register(
DOMAIN, service, async_service_handler, schema=schema)
class BluesoundPlayer(MediaPlayerDevice):
"""Representation of a Bluesound Player."""
def __init__(self, hass, host, port=None, name=None, init_callback=None):
"""Initialize the media player."""
self.host = host
self._hass = hass
self.port = port
self._polling_session = async_get_clientsession(hass)
self._polling_task = None # The actual polling task.
self._name = name
self._icon = None
self._capture_items = []
self._services_items = []
self._preset_items = []
self._sync_status = {}
self._status = None
self._last_status_update = None
self._is_online = False
self._retry_remove = None
self._lastvol = None
self._master = None
self._is_master = False
self._group_name = None
self._init_callback = init_callback
if self.port is None:
self.port = DEFAULT_PORT
class _TimeoutException(Exception):
pass
@staticmethod
def _try_get_index(string, search_string):
"""Get the index."""
try:
return string.index(search_string)
except ValueError:
return -1
async def force_update_sync_status(
self, on_updated_cb=None, raise_timeout=False):
"""Update the internal status."""
resp = await self.send_bluesound_command(
'SyncStatus', raise_timeout, raise_timeout)
if not resp:
return None
self._sync_status = resp['SyncStatus'].copy()
if not self._name:
self._name = self._sync_status.get('@name', self.host)
if not self._icon:
self._icon = self._sync_status.get('@icon', self.host)
master = self._sync_status.get('master', None)
if master is not None:
self._is_master = False
master_host = master.get('#text')
master_device = [device for device in
self._hass.data[DATA_BLUESOUND]
if device.host == master_host]
if master_device and master_host != self.host:
self._master = master_device[0]
else:
self._master = None
_LOGGER.error("Master not found %s", master_host)
else:
if self._master is not None:
self._master = None
slaves = self._sync_status.get('slave', None)
self._is_master = slaves is not None
if on_updated_cb:
on_updated_cb()
return True
async def _start_poll_command(self):
"""Loop which polls the status of the player."""
try:
while True:
await self.async_update_status()
except (asyncio.TimeoutError, ClientError,
BluesoundPlayer._TimeoutException):
_LOGGER.info("Node %s is offline, retrying later", self._name)
await asyncio.sleep(
NODE_OFFLINE_CHECK_TIMEOUT, loop=self._hass.loop)
self.start_polling()
except CancelledError:
_LOGGER.debug("Stopping the polling of node %s", self._name)
except Exception:
_LOGGER.exception("Unexpected error in %s", self._name)
raise
def start_polling(self):
"""Start the polling task."""
self._polling_task = self._hass.async_create_task(
self._start_poll_command())
def stop_polling(self):
"""Stop the polling task."""
self._polling_task.cancel()
async def async_init(self, triggered=None):
"""Initialize the player async."""
try:
if self._retry_remove is not None:
self._retry_remove()
self._retry_remove = None
await self.force_update_sync_status(
self._init_callback, True)
except (asyncio.TimeoutError, ClientError):
_LOGGER.info("Node %s is offline, retrying later", self.host)
self._retry_remove = async_track_time_interval(
self._hass, self.async_init, NODE_RETRY_INITIATION)
except Exception:
_LOGGER.exception(
"Unexpected when initiating error in %s", self.host)
raise
async def async_update(self):
"""Update internal status of the entity."""
if not self._is_online:
return
await self.async_update_sync_status()
await self.async_update_presets()
await self.async_update_captures()
await self.async_update_services()
async def send_bluesound_command(
self, method, raise_timeout=False, allow_offline=False):
"""Send command to the player."""
import xmltodict
if not self._is_online and not allow_offline:
return
if method[0] == '/':
method = method[1:]
url = "http://{}:{}/{}".format(self.host, self.port, method)
_LOGGER.debug("Calling URL: %s", url)
response = None
try:
websession = async_get_clientsession(self._hass)
with async_timeout.timeout(10, loop=self._hass.loop):
response = await websession.get(url)
if response.status == 200:
result = await response.text()
if result:
data = xmltodict.parse(result)
else:
data = None
elif response.status == 595:
_LOGGER.info("Status 595 returned, treating as timeout")
raise BluesoundPlayer._TimeoutException()
else:
_LOGGER.error("Error %s on %s", response.status, url)
return None
except (asyncio.TimeoutError, aiohttp.ClientError):
if raise_timeout:
_LOGGER.info("Timeout: %s", self.host)
raise
_LOGGER.debug("Failed communicating: %s", self.host)
return None
return data
async def async_update_status(self):
"""Use the poll session to always get the status of the player."""
import xmltodict
response = None
url = 'Status'
etag = ''
if self._status is not None:
etag = self._status.get('@etag', '')
if etag != '':
url = 'Status?etag={}&timeout=120.0'.format(etag)
url = "http://{}:{}/{}".format(self.host, self.port, url)
_LOGGER.debug("Calling URL: %s", url)
try:
with async_timeout.timeout(125, loop=self._hass.loop):
response = await self._polling_session.get(
url, headers={CONNECTION: KEEP_ALIVE})
if response.status == 200:
result = await response.text()
self._is_online = True
self._last_status_update = dt_util.utcnow()
self._status = xmltodict.parse(result)['status'].copy()
group_name = self._status.get('groupName', None)
if group_name != self._group_name:
_LOGGER.debug(
"Group name change detected on device: %s", self.host)
self._group_name = group_name
# the sleep is needed to make sure that the
# devices is synced
await asyncio.sleep(1, loop=self._hass.loop)
await self.async_trigger_sync_on_all()
elif self.is_grouped:
# when player is grouped we need to fetch volume from
# sync_status. We will force an update if the player is
# grouped this isn't a foolproof solution. A better
# solution would be to fetch sync_status more often when
# the device is playing. This would solve alot of
# problems. This change will be done when the
# communication is moved to a separate library
await self.force_update_sync_status()
self.async_schedule_update_ha_state()
elif response.status == 595:
_LOGGER.info("Status 595 returned, treating as timeout")
raise BluesoundPlayer._TimeoutException()
else:
_LOGGER.error("Error %s on %s. Trying one more time",
response.status, url)
except (asyncio.TimeoutError, ClientError):
self._is_online = False
self._last_status_update = None
self._status = None
self.async_schedule_update_ha_state()
_LOGGER.info(
"Client connection error, marking %s as offline", self._name)
raise
async def async_trigger_sync_on_all(self):
"""Trigger sync status update on all devices."""
_LOGGER.debug("Trigger sync status on all devices")
for player in self._hass.data[DATA_BLUESOUND]:
await player.force_update_sync_status()
@Throttle(SYNC_STATUS_INTERVAL)
async def async_update_sync_status(
self, on_updated_cb=None, raise_timeout=False):
"""Update sync status."""
await self.force_update_sync_status(
on_updated_cb, raise_timeout=False)
@Throttle(UPDATE_CAPTURE_INTERVAL)
async def async_update_captures(self):
"""Update Capture sources."""
resp = await self.send_bluesound_command(
'RadioBrowse?service=Capture')
if not resp:
return
self._capture_items = []
def _create_capture_item(item):
self._capture_items.append({
'title': item.get('@text', ''),
'name': item.get('@text', ''),
'type': item.get('@serviceType', 'Capture'),
'image': item.get('@image', ''),
'url': item.get('@URL', '')
})
if 'radiotime' in resp and 'item' in resp['radiotime']:
if isinstance(resp['radiotime']['item'], list):
for item in resp['radiotime']['item']:
_create_capture_item(item)
else:
_create_capture_item(resp['radiotime']['item'])
return self._capture_items
@Throttle(UPDATE_PRESETS_INTERVAL)
async def async_update_presets(self):
"""Update Presets."""
resp = await self.send_bluesound_command('Presets')
if not resp:
return
self._preset_items = []
def _create_preset_item(item):
self._preset_items.append({
'title': item.get('@name', ''),
'name': item.get('@name', ''),
'type': 'preset',
'image': item.get('@image', ''),
'is_raw_url': True,
'url2': item.get('@url', ''),
'url': 'Preset?id={}'.format(item.get('@id', ''))
})
if 'presets' in resp and 'preset' in resp['presets']:
if isinstance(resp['presets']['preset'], list):
for item in resp['presets']['preset']:
_create_preset_item(item)
else:
_create_preset_item(resp['presets']['preset'])
return self._preset_items
@Throttle(UPDATE_SERVICES_INTERVAL)
async def async_update_services(self):
"""Update Services."""
resp = await self.send_bluesound_command('Services')
if not resp:
return
self._services_items = []
def _create_service_item(item):
self._services_items.append({
'title': item.get('@displayname', ''),
'name': item.get('@name', ''),
'type': item.get('@type', ''),
'image': item.get('@icon', ''),
'url': item.get('@name', '')
})
if 'services' in resp and 'service' in resp['services']:
if isinstance(resp['services']['service'], list):
for item in resp['services']['service']:
_create_service_item(item)
else:
_create_service_item(resp['services']['service'])
return self._services_items
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def state(self):
"""Return the state of the device."""
if self._status is None:
return STATE_OFF
if self.is_grouped and not self.is_master:
return STATE_GROUPED
status = self._status.get('state', None)
if status in ('pause', 'stop'):
return STATE_PAUSED
if status in ('stream', 'play'):
return STATE_PLAYING
return STATE_IDLE
@property
def media_title(self):
"""Title of current playing media."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
return self._status.get('title1', None)
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
if self._status is None:
return None
if self.is_grouped and not self.is_master:
return self._group_name
artist = self._status.get('artist', None)
if not artist:
artist = self._status.get('title2', None)
return artist
@property
def media_album_name(self):
"""Artist of current playing media (Music track only)."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
album = self._status.get('album', None)
if not album:
album = self._status.get('title3', None)
return album
@property
def media_image_url(self):
"""Image url of current playing media."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
url = self._status.get('image', None)
if not url:
return
if url[0] == '/':
url = "http://{}:{}{}".format(self.host, self.port, url)
return url
@property
def media_position(self):
"""Position of current playing media in seconds."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
mediastate = self.state
if self._last_status_update is None or mediastate == STATE_IDLE:
return None
position = self._status.get('secs', None)
if position is None:
return None
position = float(position)
if mediastate == STATE_PLAYING:
position += (dt_util.utcnow() -
self._last_status_update).total_seconds()
return position
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
duration = self._status.get('totlen', None)
if duration is None:
return None
return float(duration)
@property
def media_position_updated_at(self):
"""Last time status was updated."""
return self._last_status_update
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
volume = self._status.get('volume', None)
if self.is_grouped:
volume = self._sync_status.get('@volume', None)
if volume is not None:
return int(volume) / 100
return None
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
volume = self.volume_level
if not volume:
return None
return 0 <= volume < 0.001
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def icon(self):
"""Return the icon of the device."""
return self._icon
@property
def source_list(self):
"""List of available input sources."""
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
sources = []
for source in self._preset_items:
sources.append(source['title'])
for source in [x for x in self._services_items
if x['type'] == 'LocalMusic' or
x['type'] == 'RadioService']:
sources.append(source['title'])
for source in self._capture_items:
sources.append(source['title'])
return sources
@property
def source(self):
"""Name of the current input source."""
from urllib import parse
if (self._status is None or
(self.is_grouped and not self.is_master)):
return None
current_service = self._status.get('service', '')
if current_service == '':
return ''
stream_url = self._status.get('streamUrl', '')
if self._status.get('is_preset', '') == '1' and stream_url != '':
# This check doesn't work with all presets, for example playlists.
# But it works with radio service_items will catch playlists.
items = [x for x in self._preset_items if 'url2' in x and
parse.unquote(x['url2']) == stream_url]
if items:
return items[0]['title']
# This could be a bit difficult to detect. Bluetooth could be named
# different things and there is not any way to match chooses in
# capture list to current playing. It's a bit of guesswork.
# This method will be needing some tweaking over time.
title = self._status.get('title1', '').lower()
if title == 'bluetooth' or stream_url == 'Capture:hw:2,0/44100/16/2':
items = [x for x in self._capture_items
if x['url'] == "Capture%3Abluez%3Abluetooth"]
if items:
return items[0]['title']
items = [x for x in self._capture_items if x['url'] == stream_url]
if items:
return items[0]['title']
if stream_url[:8] == 'Capture:':
stream_url = stream_url[8:]
idx = BluesoundPlayer._try_get_index(stream_url, ':')
if idx > 0:
stream_url = stream_url[:idx]
for item in self._capture_items:
url = parse.unquote(item['url'])
if url[:8] == 'Capture:':
url = url[8:]
idx = BluesoundPlayer._try_get_index(url, ':')
if idx > 0:
url = url[:idx]
if url.lower() == stream_url.lower():
return item['title']
items = [x for x in self._capture_items
if x['name'] == current_service]
if items:
return items[0]['title']
items = [x for x in self._services_items
if x['name'] == current_service]
if items:
return items[0]['title']
if self._status.get('streamUrl', '') != '':
_LOGGER.debug("Couldn't find source of stream URL: %s",
self._status.get('streamUrl', ''))
return None
@property
def supported_features(self):
"""Flag of media commands that are supported."""
if self._status is None:
return None
if self.is_grouped and not self.is_master:
return SUPPORT_VOLUME_STEP | SUPPORT_VOLUME_SET | \
SUPPORT_VOLUME_MUTE
supported = SUPPORT_CLEAR_PLAYLIST
if self._status.get('indexing', '0') == '0':
supported = supported | SUPPORT_PAUSE | SUPPORT_PREVIOUS_TRACK | \
SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | \
SUPPORT_STOP | SUPPORT_PLAY | SUPPORT_SELECT_SOURCE | \
SUPPORT_SHUFFLE_SET
current_vol = self.volume_level
if current_vol is not None and current_vol >= 0:
supported = supported | SUPPORT_VOLUME_STEP | \
SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE
if self._status.get('canSeek', '') == '1':
supported = supported | SUPPORT_SEEK
return supported
@property
def is_master(self):
"""Return true if player is a coordinator."""
return self._is_master
@property
def is_grouped(self):
"""Return true if player is a coordinator."""
return self._master is not None or self._is_master
@property
def shuffle(self):
"""Return true if shuffle is active."""
return self._status.get('shuffle', '0') == '1'
async def async_join(self, master):
"""Join the player to a group."""
master_device = [device for device in self.hass.data[DATA_BLUESOUND]
if device.entity_id == master]
if master_device:
_LOGGER.debug("Trying to join player: %s to master: %s",
self.host, master_device[0].host)
await master_device[0].async_add_slave(self)
else:
_LOGGER.error("Master not found %s", master_device)
async def async_unjoin(self):
"""Unjoin the player from a group."""
if self._master is None:
return
_LOGGER.debug("Trying to unjoin player: %s", self.host)
await self._master.async_remove_slave(self)
async def async_add_slave(self, slave_device):
"""Add slave to master."""
return await self.send_bluesound_command(
'/AddSlave?slave={}&port={}'.format(
slave_device.host, slave_device.port))
async def async_remove_slave(self, slave_device):
"""Remove slave to master."""
return await self.send_bluesound_command(
'/RemoveSlave?slave={}&port={}'.format(
slave_device.host, slave_device.port))
async def async_increase_timer(self):
"""Increase sleep time on player."""
sleep_time = await self.send_bluesound_command('/Sleep')
if sleep_time is None:
_LOGGER.error(
"Error while increasing sleep time on player: %s", self.host)
return 0
return int(sleep_time.get('sleep', '0'))
async def async_clear_timer(self):
"""Clear sleep timer on player."""
sleep = 1
while sleep > 0:
sleep = await self.async_increase_timer()
async def async_set_shuffle(self, shuffle):
"""Enable or disable shuffle mode."""
value = '1' if shuffle else '0'
return await self.send_bluesound_command(
'/Shuffle?state={}'.format(value))
async def async_select_source(self, source):
"""Select input source."""
if self.is_grouped and not self.is_master:
return
items = [x for x in self._preset_items if x['title'] == source]
if not items:
items = [x for x in self._services_items if x['title'] == source]
if not items:
items = [x for x in self._capture_items if x['title'] == source]
if not items:
return
selected_source = items[0]
url = 'Play?url={}&preset_id&image={}'.format(
selected_source['url'], selected_source['image'])
if 'is_raw_url' in selected_source and selected_source['is_raw_url']:
url = selected_source['url']
return await self.send_bluesound_command(url)
async def async_clear_playlist(self):
"""Clear players playlist."""
if self.is_grouped and not self.is_master:
return
return await self.send_bluesound_command('Clear')
async def async_media_next_track(self):
"""Send media_next command to media player."""
if self.is_grouped and not self.is_master:
return
cmd = 'Skip'
if self._status and 'actions' in self._status:
for action in self._status['actions']['action']:
if ('@name' in action and '@url' in action and
action['@name'] == 'skip'):
cmd = action['@url']
return await self.send_bluesound_command(cmd)
async def async_media_previous_track(self):
"""Send media_previous command to media player."""
if self.is_grouped and not self.is_master:
return
cmd = 'Back'
if self._status and 'actions' in self._status:
for action in self._status['actions']['action']:
if ('@name' in action and '@url' in action and
action['@name'] == 'back'):
cmd = action['@url']
return await self.send_bluesound_command(cmd)
async def async_media_play(self):
"""Send media_play command to media player."""
if self.is_grouped and not self.is_master:
return
return await self.send_bluesound_command('Play')
async def async_media_pause(self):
"""Send media_pause command to media player."""
if self.is_grouped and not self.is_master:
return
return await self.send_bluesound_command('Pause')
async def async_media_stop(self):
"""Send stop command."""
if self.is_grouped and not self.is_master:
return
return await self.send_bluesound_command('Pause')
async def async_media_seek(self, position):
"""Send media_seek command to media player."""
if self.is_grouped and not self.is_master:
return
return await self.send_bluesound_command(
'Play?seek={}'.format(float(position)))
async def async_play_media(self, media_type, media_id, **kwargs):
"""
Send the play_media command to the media player.
If ATTR_MEDIA_ENQUEUE is True, add `media_id` to the queue.
"""
if self.is_grouped and not self.is_master:
return
url = 'Play?url={}'.format(media_id)
if kwargs.get(ATTR_MEDIA_ENQUEUE):
return await self.send_bluesound_command(url)
return await self.send_bluesound_command(url)
async def async_volume_up(self):
"""Volume up the media player."""
current_vol = self.volume_level
if not current_vol or current_vol < 0:
return
return self.async_set_volume_level(((current_vol*100)+1)/100)
async def async_volume_down(self):
"""Volume down the media player."""
current_vol = self.volume_level
if not current_vol or current_vol < 0:
return
return self.async_set_volume_level(((current_vol*100)-1)/100)
async def async_set_volume_level(self, volume):
"""Send volume_up command to media player."""
if volume < 0:
volume = 0
elif volume > 1:
volume = 1
return await self.send_bluesound_command(
'Volume?level=' + str(float(volume) * 100))
async def async_mute_volume(self, mute):
"""Send mute command to media player."""
if mute:
volume = self.volume_level
if volume > 0:
self._lastvol = volume
return await self.send_bluesound_command('Volume?level=0')
return await self.send_bluesound_command(
'Volume?level=' + str(float(self._lastvol) * 100))
|
|
"""Script to check the configuration file."""
import argparse
import logging
import os
from collections import OrderedDict, namedtuple
from glob import glob
from typing import Dict, List, Sequence
from unittest.mock import patch
import attr
import voluptuous as vol
from homeassistant import bootstrap, core, loader
from homeassistant.config import (
get_default_config_dir, CONF_CORE, CORE_CONFIG_SCHEMA,
CONF_PACKAGES, merge_packages_config, _format_config_error,
find_config_file, load_yaml_config_file,
extract_domain_configs, config_per_platform)
from homeassistant.util import yaml
from homeassistant.exceptions import HomeAssistantError
REQUIREMENTS = ('colorlog==4.0.2',)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=protected-access
MOCKS = {
'load': ("homeassistant.util.yaml.load_yaml", yaml.load_yaml),
'load*': ("homeassistant.config.load_yaml", yaml.load_yaml),
'secrets': ("homeassistant.util.yaml.secret_yaml", yaml.secret_yaml),
}
SILENCE = (
'homeassistant.scripts.check_config.yaml.clear_secret_cache',
)
PATCHES = {}
C_HEAD = 'bold'
ERROR_STR = 'General Errors'
def color(the_color, *args, reset=None):
"""Color helper."""
from colorlog.escape_codes import escape_codes, parse_colors
try:
if not args:
assert reset is None, "You cannot reset if nothing being printed"
return parse_colors(the_color)
return parse_colors(the_color) + ' '.join(args) + \
escape_codes[reset or 'reset']
except KeyError as k:
raise ValueError("Invalid color {} in {}".format(str(k), the_color))
def run(script_args: List) -> int:
"""Handle ensure config commandline script."""
parser = argparse.ArgumentParser(
description="Check Home Assistant configuration.")
parser.add_argument(
'--script', choices=['check_config'])
parser.add_argument(
'-c', '--config',
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration")
parser.add_argument(
'-i', '--info', nargs='?',
default=None, const='all',
help="Show a portion of the config")
parser.add_argument(
'-f', '--files',
action='store_true',
help="Show used configuration files")
parser.add_argument(
'-s', '--secrets',
action='store_true',
help="Show secret information")
args, unknown = parser.parse_known_args()
if unknown:
print(color('red', "Unknown arguments:", ', '.join(unknown)))
config_dir = os.path.join(os.getcwd(), args.config)
print(color('bold', "Testing configuration at", config_dir))
res = check(config_dir, args.secrets)
domain_info = []
if args.info:
domain_info = args.info.split(',')
if args.files:
print(color(C_HEAD, 'yaml files'), '(used /',
color('red', 'not used') + ')')
deps = os.path.join(config_dir, 'deps')
yaml_files = [f for f in glob(os.path.join(config_dir, '**/*.yaml'),
recursive=True)
if not f.startswith(deps)]
for yfn in sorted(yaml_files):
the_color = '' if yfn in res['yaml_files'] else 'red'
print(color(the_color, '-', yfn))
if res['except']:
print(color('bold_white', 'Failed config'))
for domain, config in res['except'].items():
domain_info.append(domain)
print(' ', color('bold_red', domain + ':'),
color('red', '', reset='red'))
dump_dict(config, reset='red')
print(color('reset'))
if domain_info:
if 'all' in domain_info:
print(color('bold_white', 'Successful config (all)'))
for domain, config in res['components'].items():
print(' ', color(C_HEAD, domain + ':'))
dump_dict(config)
else:
print(color('bold_white', 'Successful config (partial)'))
for domain in domain_info:
if domain == ERROR_STR:
continue
print(' ', color(C_HEAD, domain + ':'))
dump_dict(res['components'].get(domain, None))
if args.secrets:
flatsecret = {}
for sfn, sdict in res['secret_cache'].items():
sss = []
for skey in sdict:
if skey in flatsecret:
_LOGGER.error('Duplicated secrets in files %s and %s',
flatsecret[skey], sfn)
flatsecret[skey] = sfn
sss.append(color('green', skey) if skey in res['secrets']
else skey)
print(color(C_HEAD, 'Secrets from', sfn + ':'), ', '.join(sss))
print(color(C_HEAD, 'Used Secrets:'))
for skey, sval in res['secrets'].items():
if sval is None:
print(' -', skey + ':', color('red', "not found"))
continue
print(' -', skey + ':', sval, color('cyan', '[from:', flatsecret
.get(skey, 'keyring') + ']'))
return len(res['except'])
def check(config_dir, secrets=False):
"""Perform a check by mocking hass load functions."""
logging.getLogger('homeassistant.loader').setLevel(logging.CRITICAL)
res = {
'yaml_files': OrderedDict(), # yaml_files loaded
'secrets': OrderedDict(), # secret cache and secrets loaded
'except': OrderedDict(), # exceptions raised (with config)
'components': None, # successful components
'secret_cache': None,
}
# pylint: disable=possibly-unused-variable
def mock_load(filename):
"""Mock hass.util.load_yaml to save config file names."""
res['yaml_files'][filename] = True
return MOCKS['load'][1](filename)
# pylint: disable=possibly-unused-variable
def mock_secrets(ldr, node):
"""Mock _get_secrets."""
try:
val = MOCKS['secrets'][1](ldr, node)
except HomeAssistantError:
val = None
res['secrets'][node.value] = val
return val
# Patches to skip functions
for sil in SILENCE:
PATCHES[sil] = patch(sil)
# Patches with local mock functions
for key, val in MOCKS.items():
if not secrets and key == 'secrets':
continue
# The * in the key is removed to find the mock_function (side_effect)
# This allows us to use one side_effect to patch multiple locations
mock_function = locals()['mock_' + key.replace('*', '')]
PATCHES[key] = patch(val[0], side_effect=mock_function)
# Start all patches
for pat in PATCHES.values():
pat.start()
if secrets:
# Ensure !secrets point to the patched function
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
try:
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
res['components'] = check_ha_config_file(hass)
res['secret_cache'] = OrderedDict(yaml.__SECRET_CACHE)
for err in res['components'].errors:
domain = err.domain or ERROR_STR
res['except'].setdefault(domain, []).append(err.message)
if err.config:
res['except'].setdefault(domain, []).append(err.config)
except Exception as err: # pylint: disable=broad-except
_LOGGER.exception("BURB")
print(color('red', 'Fatal error while loading config:'), str(err))
res['except'].setdefault(ERROR_STR, []).append(str(err))
finally:
# Stop all patches
for pat in PATCHES.values():
pat.stop()
if secrets:
# Ensure !secrets point to the original function
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
bootstrap.clear_secret_cache()
return res
def line_info(obj, **kwargs):
"""Display line config source."""
if hasattr(obj, '__config_file__'):
return color('cyan', "[source {}:{}]"
.format(obj.__config_file__, obj.__line__ or '?'),
**kwargs)
return '?'
def dump_dict(layer, indent_count=3, listi=False, **kwargs):
"""Display a dict.
A friendly version of print yaml.yaml.dump(config).
"""
def sort_dict_key(val):
"""Return the dict key for sorting."""
key = str(val[0]).lower()
return '0' if key == 'platform' else key
indent_str = indent_count * ' '
if listi or isinstance(layer, list):
indent_str = indent_str[:-1] + '-'
if isinstance(layer, Dict):
for key, value in sorted(layer.items(), key=sort_dict_key):
if isinstance(value, (dict, list)):
print(indent_str, str(key) + ':', line_info(value, **kwargs))
dump_dict(value, indent_count + 2)
else:
print(indent_str, str(key) + ':', value)
indent_str = indent_count * ' '
if isinstance(layer, Sequence):
for i in layer:
if isinstance(i, dict):
dump_dict(i, indent_count + 2, True)
else:
print(' ', indent_str, i)
CheckConfigError = namedtuple(
'CheckConfigError', "message domain config")
@attr.s
class HomeAssistantConfig(OrderedDict):
"""Configuration result with errors attribute."""
errors = attr.ib(default=attr.Factory(list))
def add_error(self, message, domain=None, config=None):
"""Add a single error."""
self.errors.append(CheckConfigError(str(message), domain, config))
return self
def check_ha_config_file(hass):
"""Check if Home Assistant configuration file is valid."""
config_dir = hass.config.config_dir
result = HomeAssistantConfig()
def _pack_error(package, component, config, message):
"""Handle errors from packages: _log_pkg_error."""
message = "Package {} setup failed. Component {} {}".format(
package, component, message)
domain = 'homeassistant.packages.{}.{}'.format(package, component)
pack_config = core_config[CONF_PACKAGES].get(package, config)
result.add_error(message, domain, pack_config)
def _comp_error(ex, domain, config):
"""Handle errors from components: async_log_exception."""
result.add_error(
_format_config_error(ex, domain, config), domain, config)
# Load configuration.yaml
try:
config_path = find_config_file(config_dir)
if not config_path:
return result.add_error("File configuration.yaml not found.")
config = load_yaml_config_file(config_path)
except HomeAssistantError as err:
return result.add_error(
"Error loading {}: {}".format(config_path, err))
finally:
yaml.clear_secret_cache()
# Extract and validate core [homeassistant] config
try:
core_config = config.pop(CONF_CORE, {})
core_config = CORE_CONFIG_SCHEMA(core_config)
result[CONF_CORE] = core_config
except vol.Invalid as err:
result.add_error(err, CONF_CORE, core_config)
core_config = {}
# Merge packages
merge_packages_config(
hass, config, core_config.get(CONF_PACKAGES, {}), _pack_error)
core_config.pop(CONF_PACKAGES, None)
# Filter out repeating config sections
components = set(key.split(' ')[0] for key in config.keys())
# Process and validate config
for domain in components:
component = loader.get_component(hass, domain)
if not component:
result.add_error("Component not found: {}".format(domain))
continue
if hasattr(component, 'CONFIG_SCHEMA'):
try:
config = component.CONFIG_SCHEMA(config)
result[domain] = config[domain]
except vol.Invalid as ex:
_comp_error(ex, domain, config)
continue
if (not hasattr(component, 'PLATFORM_SCHEMA') and
not hasattr(component, 'PLATFORM_SCHEMA_BASE')):
continue
platforms = []
for p_name, p_config in config_per_platform(config, domain):
# Validate component specific platform schema
try:
if hasattr(component, 'PLATFORM_SCHEMA_BASE'):
p_validated = \
component.PLATFORM_SCHEMA_BASE( # type: ignore
p_config)
else:
p_validated = component.PLATFORM_SCHEMA( # type: ignore
p_config)
except vol.Invalid as ex:
_comp_error(ex, domain, config)
continue
# Not all platform components follow same pattern for platforms
# So if p_name is None we are not going to validate platform
# (the automation component is one of them)
if p_name is None:
platforms.append(p_validated)
continue
platform = loader.get_platform(hass, domain, p_name)
if platform is None:
result.add_error(
"Platform not found: {}.{}".format(domain, p_name))
continue
# Validate platform specific schema
if hasattr(platform, 'PLATFORM_SCHEMA'):
try:
p_validated = platform.PLATFORM_SCHEMA(p_validated)
except vol.Invalid as ex:
_comp_error(
ex, '{}.{}'.format(domain, p_name), p_validated)
continue
platforms.append(p_validated)
# Remove config for current component and add validated config back in.
for filter_comp in extract_domain_configs(config, domain):
del config[filter_comp]
result[domain] = platforms
return result
|
|
#!/usr/bin/env python
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: James D. McClain
# Timothy Berkelbach <tim.berkelbach@gmail.com>
#
import time
import numpy
from functools import reduce
from pyscf import lib
from pyscf.lib import logger
from pyscf.pbc import scf
from pyscf.cc import gccsd
from pyscf.cc import ccsd
from pyscf.pbc.mp.kmp2 import (get_frozen_mask, get_nmo, get_nocc,
padded_mo_coeff, padding_k_idx)
from pyscf.pbc.cc import kintermediates as imdk
from pyscf.lib.parameters import LOOSE_ZERO_TOL, LARGE_DENOM
from pyscf.pbc.lib import kpts_helper
DEBUG = False
#
# FIXME: When linear dependence is found in KHF and handled by function
# pyscf.scf.addons.remove_linear_dep_, different k-point may have different
# number of orbitals.
#
#einsum = numpy.einsum
einsum = lib.einsum
def energy(cc, t1, t2, eris):
nkpts, nocc, nvir = t1.shape
fock = eris.fock
eris_oovv = eris.oovv.copy()
e = 0.0 + 0j
for ki in range(nkpts):
e += einsum('ia,ia', fock[ki, :nocc, nocc:], t1[ki, :, :])
t1t1 = numpy.zeros(shape=t2.shape, dtype=t2.dtype)
for ki in range(nkpts):
ka = ki
for kj in range(nkpts):
#kb = kj
t1t1[ki, kj, ka, :, :, :, :] = einsum('ia,jb->ijab', t1[ki, :, :], t1[kj, :, :])
tau = t2 + 2 * t1t1
e += 0.25 * numpy.dot(tau.flatten(), eris_oovv.flatten())
e /= nkpts
if abs(e.imag) > 1e-4:
logger.warn(cc, 'Non-zero imaginary part found in KCCSD energy %s', e)
return e.real
def update_amps(cc, t1, t2, eris):
time0 = time.clock(), time.time()
log = logger.Logger(cc.stdout, cc.verbose)
nkpts, nocc, nvir = t1.shape
fock = eris.fock
mo_e_o = [e[:nocc] for e in eris.mo_energy]
mo_e_v = [e[nocc:] + cc.level_shift for e in eris.mo_energy]
# Get location of padded elements in occupied and virtual space
nonzero_opadding, nonzero_vpadding = padding_k_idx(cc, kind="split")
fov = fock[:, :nocc, nocc:].copy()
foo = fock[:, :nocc, :nocc].copy()
fvv = fock[:, nocc:, nocc:].copy()
# Get the momentum conservation array
# Note: chemist's notation for momentum conserving t2(ki,kj,ka,kb), even though
# integrals are in physics notation
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
tau = imdk.make_tau(cc, t2, t1, t1, kconserv)
Fvv = imdk.cc_Fvv(cc, t1, t2, eris, kconserv)
Foo = imdk.cc_Foo(cc, t1, t2, eris, kconserv)
Fov = imdk.cc_Fov(cc, t1, t2, eris, kconserv)
Woooo = imdk.cc_Woooo(cc, t1, t2, eris, kconserv)
Wvvvv = imdk.cc_Wvvvv(cc, t1, t2, eris, kconserv)
Wovvo = imdk.cc_Wovvo(cc, t1, t2, eris, kconserv)
# Move energy terms to the other side
for k in range(nkpts):
Foo[k][numpy.diag_indices(nocc)] -= mo_e_o[k]
Fvv[k][numpy.diag_indices(nvir)] -= mo_e_v[k]
eris_ovvo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nocc, nvir, nvir, nocc), dtype=t2.dtype)
eris_oovo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nocc, nocc, nvir, nocc), dtype=t2.dtype)
eris_vvvo = numpy.zeros(shape=(nkpts, nkpts, nkpts, nvir, nvir, nvir, nocc), dtype=t2.dtype)
for km, kb, ke in kpts_helper.loop_kkk(nkpts):
kj = kconserv[km, ke, kb]
# <mb||je> -> -<mb||ej>
eris_ovvo[km, kb, ke] = -eris.ovov[km, kb, kj].transpose(0, 1, 3, 2)
# <mn||je> -> -<mn||ej>
# let kb = kn as a dummy variable
eris_oovo[km, kb, ke] = -eris.ooov[km, kb, kj].transpose(0, 1, 3, 2)
# <ma||be> -> - <be||am>*
# let kj = ka as a dummy variable
kj = kconserv[km, ke, kb]
eris_vvvo[ke, kj, kb] = -eris.ovvv[km, kb, ke].transpose(2, 3, 1, 0).conj()
# T1 equation
t1new = numpy.zeros(shape=t1.shape, dtype=t1.dtype)
for ka in range(nkpts):
ki = ka
t1new[ka] += numpy.array(fov[ka, :, :]).conj()
t1new[ka] += einsum('ie,ae->ia', t1[ka], Fvv[ka])
t1new[ka] += -einsum('ma,mi->ia', t1[ka], Foo[ka])
for km in range(nkpts):
t1new[ka] += einsum('imae,me->ia', t2[ka, km, ka], Fov[km])
t1new[ka] += -einsum('nf,naif->ia', t1[km], eris.ovov[km, ka, ki])
for kn in range(nkpts):
ke = kconserv[km, ki, kn]
t1new[ka] += -0.5 * einsum('imef,maef->ia', t2[ki, km, ke], eris.ovvv[km, ka, ke])
t1new[ka] += -0.5 * einsum('mnae,nmei->ia', t2[km, kn, ka], eris_oovo[kn, km, ke])
# T2 equation
t2new = numpy.array(eris.oovv).conj()
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
# Chemist's notation for momentum conserving t2(ki,kj,ka,kb)
kb = kconserv[ki, ka, kj]
Ftmp = Fvv[kb] - 0.5 * einsum('mb,me->be', t1[kb], Fov[kb])
tmp = einsum('ijae,be->ijab', t2[ki, kj, ka], Ftmp)
t2new[ki, kj, ka] += tmp
#t2new[ki,kj,kb] -= tmp.transpose(0,1,3,2)
Ftmp = Fvv[ka] - 0.5 * einsum('ma,me->ae', t1[ka], Fov[ka])
tmp = einsum('ijbe,ae->ijab', t2[ki, kj, kb], Ftmp)
t2new[ki, kj, ka] -= tmp
Ftmp = Foo[kj] + 0.5 * einsum('je,me->mj', t1[kj], Fov[kj])
tmp = einsum('imab,mj->ijab', t2[ki, kj, ka], Ftmp)
t2new[ki, kj, ka] -= tmp
#t2new[kj,ki,ka] += tmp.transpose(1,0,2,3)
Ftmp = Foo[ki] + 0.5 * einsum('ie,me->mi', t1[ki], Fov[ki])
tmp = einsum('jmab,mi->ijab', t2[kj, ki, ka], Ftmp)
t2new[ki, kj, ka] += tmp
for km in range(nkpts):
# Wminj
# - km - kn + ka + kb = 0
# => kn = ka - km + kb
kn = kconserv[ka, km, kb]
t2new[ki, kj, ka] += 0.5 * einsum('mnab,mnij->ijab', tau[km, kn, ka], Woooo[km, kn, ki])
ke = km
t2new[ki, kj, ka] += 0.5 * einsum('ijef,abef->ijab', tau[ki, kj, ke], Wvvvv[ka, kb, ke])
# Wmbej
# - km - kb + ke + kj = 0
# => ke = km - kj + kb
ke = kconserv[km, kj, kb]
tmp = einsum('imae,mbej->ijab', t2[ki, km, ka], Wovvo[km, kb, ke])
# - km - kb + ke + kj = 0
# => ke = km - kj + kb
#
# t[i,e] => ki = ke
# t[m,a] => km = ka
if km == ka and ke == ki:
tmp -= einsum('ie,ma,mbej->ijab', t1[ki], t1[km], eris_ovvo[km, kb, ke])
t2new[ki, kj, ka] += tmp
t2new[ki, kj, kb] -= tmp.transpose(0, 1, 3, 2)
t2new[kj, ki, ka] -= tmp.transpose(1, 0, 2, 3)
t2new[kj, ki, kb] += tmp.transpose(1, 0, 3, 2)
ke = ki
tmp = einsum('ie,abej->ijab', t1[ki], eris_vvvo[ka, kb, ke])
t2new[ki, kj, ka] += tmp
# P(ij) term
ke = kj
tmp = einsum('je,abei->ijab', t1[kj], eris_vvvo[ka, kb, ke])
t2new[ki, kj, ka] -= tmp
km = ka
tmp = einsum('ma,mbij->ijab', t1[ka], eris.ovoo[km, kb, ki])
t2new[ki, kj, ka] -= tmp
# P(ab) term
km = kb
tmp = einsum('mb,maij->ijab', t1[kb], eris.ovoo[km, ka, ki])
t2new[ki, kj, ka] += tmp
for ki in range(nkpts):
ka = ki
# Remove zero/padded elements from denominator
eia = LARGE_DENOM * numpy.ones((nocc, nvir), dtype=eris.mo_energy[0].dtype)
n0_ovp_ia = numpy.ix_(nonzero_opadding[ki], nonzero_vpadding[ka])
eia[n0_ovp_ia] = (mo_e_o[ki][:,None] - mo_e_v[ka])[n0_ovp_ia]
t1new[ki] /= eia
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
# For LARGE_DENOM, see t1new update above
eia = LARGE_DENOM * numpy.ones((nocc, nvir), dtype=eris.mo_energy[0].dtype)
n0_ovp_ia = numpy.ix_(nonzero_opadding[ki], nonzero_vpadding[ka])
eia[n0_ovp_ia] = (mo_e_o[ki][:,None] - mo_e_v[ka])[n0_ovp_ia]
ejb = LARGE_DENOM * numpy.ones((nocc, nvir), dtype=eris.mo_energy[0].dtype)
n0_ovp_jb = numpy.ix_(nonzero_opadding[kj], nonzero_vpadding[kb])
ejb[n0_ovp_jb] = (mo_e_o[kj][:,None] - mo_e_v[kb])[n0_ovp_jb]
eijab = eia[:, None, :, None] + ejb[:, None, :]
t2new[ki, kj, ka] /= eijab
time0 = log.timer_debug1('update t1 t2', *time0)
return t1new, t2new
def spatial2spin(tx, orbspin, kconserv):
'''Convert T1/T2 of spatial orbital representation to T1/T2 of
spin-orbital representation
'''
if isinstance(tx, numpy.ndarray) and tx.ndim == 3:
# KRCCSD t1 amplitudes
return spatial2spin((tx,tx), orbspin, kconserv)
elif isinstance(tx, numpy.ndarray) and tx.ndim == 7:
# KRCCSD t2 amplitudes
t2aa = numpy.zeros_like(tx)
nkpts = t2aa.shape[2]
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki,ka,kj]
t2aa[ki,kj,ka] = tx[ki,kj,ka] - tx[ki,kj,kb].transpose(0,1,3,2)
return spatial2spin((t2aa,tx,t2aa), orbspin, kconserv)
elif len(tx) == 2: # KUCCSD t1
t1a, t1b = tx
nocc_a, nvir_a = t1a.shape[1:]
nocc_b, nvir_b = t1b.shape[1:]
else: # KUCCSD t2
t2aa, t2ab, t2bb = tx
nocc_a, nocc_b, nvir_a, nvir_b = t2ab.shape[3:]
nkpts = len(orbspin)
nocc = nocc_a + nocc_b
nvir = nvir_a + nvir_b
idxoa = [numpy.where(orbspin[k][:nocc] == 0)[0] for k in range(nkpts)]
idxob = [numpy.where(orbspin[k][:nocc] == 1)[0] for k in range(nkpts)]
idxva = [numpy.where(orbspin[k][nocc:] == 0)[0] for k in range(nkpts)]
idxvb = [numpy.where(orbspin[k][nocc:] == 1)[0] for k in range(nkpts)]
if len(tx) == 2: # t1
t1 = numpy.zeros((nkpts,nocc,nvir), dtype=t1a.dtype)
for k in range(nkpts):
lib.takebak_2d(t1[k], t1a[k], idxoa[k], idxva[k])
lib.takebak_2d(t1[k], t1b[k], idxob[k], idxvb[k])
t1 = lib.tag_array(t1, orbspin=orbspin)
return t1
else:
t2 = numpy.zeros((nkpts,nkpts,nkpts,nocc**2,nvir**2), dtype=t2aa.dtype)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki,ka,kj]
idxoaa = idxoa[ki][:,None] * nocc + idxoa[kj]
idxoab = idxoa[ki][:,None] * nocc + idxob[kj]
idxoba = idxob[kj][:,None] * nocc + idxoa[ki]
idxobb = idxob[ki][:,None] * nocc + idxob[kj]
idxvaa = idxva[ka][:,None] * nvir + idxva[kb]
idxvab = idxva[ka][:,None] * nvir + idxvb[kb]
idxvba = idxvb[kb][:,None] * nvir + idxva[ka]
idxvbb = idxvb[ka][:,None] * nvir + idxvb[kb]
tmp2aa = t2aa[ki,kj,ka].reshape(nocc_a*nocc_a,nvir_a*nvir_a)
tmp2bb = t2bb[ki,kj,ka].reshape(nocc_b*nocc_b,nvir_b*nvir_b)
tmp2ab = t2ab[ki,kj,ka].reshape(nocc_a*nocc_b,nvir_a*nvir_b)
lib.takebak_2d(t2[ki,kj,ka], tmp2aa, idxoaa.ravel() , idxvaa.ravel() )
lib.takebak_2d(t2[ki,kj,ka], tmp2bb, idxobb.ravel() , idxvbb.ravel() )
lib.takebak_2d(t2[ki,kj,ka], tmp2ab, idxoab.ravel() , idxvab.ravel() )
lib.takebak_2d(t2[kj,ki,kb], tmp2ab, idxoba.T.ravel(), idxvba.T.ravel())
abba = -tmp2ab
lib.takebak_2d(t2[ki,kj,kb], abba, idxoab.ravel() , idxvba.T.ravel())
lib.takebak_2d(t2[kj,ki,ka], abba, idxoba.T.ravel(), idxvab.ravel() )
t2 = t2.reshape(nkpts,nkpts,nkpts,nocc,nocc,nvir,nvir)
t2 = lib.tag_array(t2, orbspin=orbspin)
return t2
def spin2spatial(tx, orbspin, kconserv):
if tx.ndim == 3: # t1
nocc, nvir = tx.shape[1:]
else:
nocc, nvir = tx.shape[4:6]
nkpts = len(tx)
idxoa = [numpy.where(orbspin[k][:nocc] == 0)[0] for k in range(nkpts)]
idxob = [numpy.where(orbspin[k][:nocc] == 1)[0] for k in range(nkpts)]
idxva = [numpy.where(orbspin[k][nocc:] == 0)[0] for k in range(nkpts)]
idxvb = [numpy.where(orbspin[k][nocc:] == 1)[0] for k in range(nkpts)]
nocc_a = len(idxoa[0])
nocc_b = len(idxob[0])
nvir_a = len(idxva[0])
nvir_b = len(idxvb[0])
if tx.ndim == 3: # t1
t1a = numpy.zeros((nkpts,nocc_a,nvir_a), dtype=tx.dtype)
t1b = numpy.zeros((nkpts,nocc_b,nvir_b), dtype=tx.dtype)
for k in range(nkpts):
lib.take_2d(tx[k], idxoa[k], idxva[k], out=t1a[k])
lib.take_2d(tx[k], idxob[k], idxvb[k], out=t1b[k])
return t1a, t1b
else:
t2aa = numpy.zeros((nkpts,nkpts,nkpts,nocc_a,nocc_a,nvir_a,nvir_a), dtype=tx.dtype)
t2ab = numpy.zeros((nkpts,nkpts,nkpts,nocc_a,nocc_b,nvir_a,nvir_b), dtype=tx.dtype)
t2bb = numpy.zeros((nkpts,nkpts,nkpts,nocc_b,nocc_b,nvir_b,nvir_b), dtype=tx.dtype)
t2 = tx.reshape(nkpts,nkpts,nkpts,nocc**2,nvir**2)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki,ka,kj]
idxoaa = idxoa[ki][:,None] * nocc + idxoa[kj]
idxoab = idxoa[ki][:,None] * nocc + idxob[kj]
idxobb = idxob[ki][:,None] * nocc + idxob[kj]
idxvaa = idxva[ka][:,None] * nvir + idxva[kb]
idxvab = idxva[ka][:,None] * nvir + idxvb[kb]
idxvbb = idxvb[ka][:,None] * nvir + idxvb[kb]
lib.take_2d(t2[ki,kj,ka], idxoaa.ravel(), idxvaa.ravel(), out=t2aa[ki,kj,ka])
lib.take_2d(t2[ki,kj,ka], idxobb.ravel(), idxvbb.ravel(), out=t2bb[ki,kj,ka])
lib.take_2d(t2[ki,kj,ka], idxoab.ravel(), idxvab.ravel(), out=t2ab[ki,kj,ka])
return t2aa, t2ab, t2bb
class GCCSD(gccsd.GCCSD):
def __init__(self, mf, frozen=None, mo_coeff=None, mo_occ=None):
assert (isinstance(mf, scf.khf.KSCF))
if not isinstance(mf, scf.kghf.KGHF):
mf = scf.addons.convert_to_ghf(mf)
self.kpts = mf.kpts
self.khelper = kpts_helper.KptsHelper(mf.cell, mf.kpts)
gccsd.GCCSD.__init__(self, mf, frozen, mo_coeff, mo_occ)
@property
def nkpts(self):
return len(self.kpts)
get_nocc = get_nocc
get_nmo = get_nmo
get_frozen_mask = get_frozen_mask
def dump_flags(self, verbose=None):
logger.info(self, '\n')
logger.info(self, '******** PBC CC flags ********')
gccsd.GCCSD.dump_flags(self, verbose)
return self
def init_amps(self, eris):
time0 = time.clock(), time.time()
nocc = self.nocc
nvir = self.nmo - nocc
nkpts = self.nkpts
mo_e_o = [eris.mo_energy[k][:nocc] for k in range(nkpts)]
mo_e_v = [eris.mo_energy[k][nocc:] for k in range(nkpts)]
t1 = numpy.zeros((nkpts, nocc, nvir), dtype=numpy.complex128)
t2 = numpy.zeros((nkpts, nkpts, nkpts, nocc, nocc, nvir, nvir), dtype=numpy.complex128)
self.emp2 = 0
foo = eris.fock[:, :nocc, :nocc].copy()
fvv = eris.fock[:, nocc:, nocc:].copy()
fov = eris.fock[:, :nocc, nocc:].copy()
eris_oovv = eris.oovv.copy()
# Get location of padded elements in occupied and virtual space
nonzero_opadding, nonzero_vpadding = padding_k_idx(self, kind="split")
kconserv = kpts_helper.get_kconserv(self._scf.cell, self.kpts)
for ki, kj, ka in kpts_helper.loop_kkk(nkpts):
kb = kconserv[ki, ka, kj]
# For LARGE_DENOM, see t1new update above
eia = LARGE_DENOM * numpy.ones((nocc, nvir), dtype=eris.mo_energy[0].dtype)
n0_ovp_ia = numpy.ix_(nonzero_opadding[ki], nonzero_vpadding[ka])
eia[n0_ovp_ia] = (mo_e_o[ki][:,None] - mo_e_v[ka])[n0_ovp_ia]
ejb = LARGE_DENOM * numpy.ones((nocc, nvir), dtype=eris.mo_energy[0].dtype)
n0_ovp_jb = numpy.ix_(nonzero_opadding[kj], nonzero_vpadding[kb])
ejb[n0_ovp_jb] = (mo_e_o[kj][:,None] - mo_e_v[kb])[n0_ovp_jb]
eijab = eia[:, None, :, None] + ejb[:, None, :]
t2[ki, kj, ka] = eris_oovv[ki, kj, ka] / eijab
t2 = numpy.conj(t2)
self.emp2 = 0.25 * numpy.einsum('pqrijab,pqrijab', t2, eris_oovv).real
self.emp2 /= nkpts
logger.info(self, 'Init t2, MP2 energy = %.15g', self.emp2.real)
logger.timer(self, 'init mp2', *time0)
return self.emp2, t1, t2
def ccsd(self, t1=None, t2=None, eris=None, **kwargs):
if eris is None: eris = self.ao2mo(self.mo_coeff)
e_corr, self.t1, self.t2 = ccsd.CCSD.ccsd(self, t1, t2, eris)
if getattr(eris, 'orbspin', None) is not None:
self.t1 = lib.tag_array(self.t1, orbspin=eris.orbspin)
self.t2 = lib.tag_array(self.t2, orbspin=eris.orbspin)
return e_corr, self.t1, self.t2
update_amps = update_amps
energy = energy
def ao2mo(self, mo_coeff=None):
nkpts = self.nkpts
nmo = self.nmo
mem_incore = nkpts**3 * nmo**4 * 8 / 1e6
mem_now = lib.current_memory()[0]
if (mem_incore + mem_now < self.max_memory) or self.mol.incore_anyway:
return _make_eris_incore(self, mo_coeff)
else:
raise NotImplementedError
def ccsd_t(self, t1=None, t2=None, eris=None):
from pyscf.pbc.cc import kccsd_t
if t1 is None: t1 = self.t1
if t2 is None: t2 = self.t2
if eris is None: eris = self.ao2mo(self.mo_coeff)
return kccsd_t.kernel(self, eris, t1, t2, self.verbose)
def amplitudes_to_vector(self, t1, t2):
return numpy.hstack((t1.ravel(), t2.ravel()))
def vector_to_amplitudes(self, vec, nmo=None, nocc=None):
if nocc is None: nocc = self.nocc
if nmo is None: nmo = self.nmo
nvir = nmo - nocc
nkpts = self.nkpts
nov = nkpts * nocc * nvir
t1 = vec[:nov].reshape(nkpts, nocc, nvir)
t2 = vec[nov:].reshape(nkpts, nkpts, nkpts, nocc, nocc, nvir, nvir)
return t1, t2
def spatial2spin(self, tx, orbspin=None, kconserv=None):
if orbspin is None:
if getattr(self.mo_coeff[0], 'orbspin', None) is not None:
orbspin = [self.mo_coeff[k].orbspin[idx]
for k, idx in enumerate(self.get_frozen_mask())]
else:
orbspin = numpy.zeros((self.nkpts,self.nmo), dtype=int)
orbspin[:,1::2] = 1
if kconserv is None:
kconserv = kpts_helper.get_kconserv(self._scf.cell, self.kpts)
return spatial2spin(tx, orbspin, kconserv)
def spin2spatial(self, tx, orbspin=None, kconserv=None):
if orbspin is None:
if getattr(self.mo_coeff[0], 'orbspin', None) is not None:
orbspin = [self.mo_coeff[k].orbspin[idx]
for k, idx in enumerate(self.get_frozen_mask())]
else:
orbspin = numpy.zeros((self.nkpts,self.nmo), dtype=int)
orbspin[:,1::2] = 1
if kconserv is None:
kconserv = kpts_helper.get_kconserv(self._scf.cell, self.kpts)
return spin2spatial(tx, orbspin, kconserv)
def from_uccsd(self, t1, t2, orbspin=None):
return self.spatial2spin(t1, orbspin), self.spatial2spin(t2, orbspin)
def to_uccsd(self, t1, t2, orbspin=None):
return spin2spatial(t1, orbspin), spin2spatial(t2, orbspin)
CCSD = KCCSD = KGCCSD = GCCSD
def _make_eris_incore(cc, mo_coeff=None):
from pyscf.pbc import tools
from pyscf.pbc.cc.ccsd import _adjust_occ
log = logger.Logger(cc.stdout, cc.verbose)
cput0 = (time.clock(), time.time())
eris = gccsd._PhysicistsERIs()
cell = cc._scf.cell
kpts = cc.kpts
nkpts = cc.nkpts
nocc = cc.nocc
nmo = cc.nmo
nvir = nmo - nocc
eris.nocc = nocc
#if any(nocc != numpy.count_nonzero(cc._scf.mo_occ[k] > 0) for k in range(nkpts)):
# raise NotImplementedError('Different occupancies found for different k-points')
if mo_coeff is None:
mo_coeff = cc.mo_coeff
nao = mo_coeff[0].shape[0]
dtype = mo_coeff[0].dtype
moidx = get_frozen_mask(cc)
nocc_per_kpt = numpy.asarray(get_nocc(cc, per_kpoint=True))
nmo_per_kpt = numpy.asarray(get_nmo(cc, per_kpoint=True))
padded_moidx = []
for k in range(nkpts):
kpt_nocc = nocc_per_kpt[k]
kpt_nvir = nmo_per_kpt[k] - kpt_nocc
kpt_padded_moidx = numpy.concatenate((numpy.ones(kpt_nocc, dtype=numpy.bool),
numpy.zeros(nmo - kpt_nocc - kpt_nvir, dtype=numpy.bool),
numpy.ones(kpt_nvir, dtype=numpy.bool)))
padded_moidx.append(kpt_padded_moidx)
eris.mo_coeff = []
eris.orbspin = []
# Generate the molecular orbital coefficients with the frozen orbitals masked.
# Each MO is tagged with orbspin, a list of 0's and 1's that give the overall
# spin of each MO.
#
# Here we will work with two index arrays; one is for our original (small) moidx
# array while the next is for our new (large) padded array.
for k in range(nkpts):
kpt_moidx = moidx[k]
kpt_padded_moidx = padded_moidx[k]
mo = numpy.zeros((nao, nmo), dtype=dtype)
mo[:, kpt_padded_moidx] = mo_coeff[k][:, kpt_moidx]
if getattr(mo_coeff[k], 'orbspin', None) is not None:
orbspin_dtype = mo_coeff[k].orbspin[kpt_moidx].dtype
orbspin = numpy.zeros(nmo, dtype=orbspin_dtype)
orbspin[kpt_padded_moidx] = mo_coeff[k].orbspin[kpt_moidx]
mo = lib.tag_array(mo, orbspin=orbspin)
eris.orbspin.append(orbspin)
# FIXME: What if the user freezes all up spin orbitals in
# an RHF calculation? The number of electrons will still be
# even.
else: # guess orbital spin - assumes an RHF calculation
assert (numpy.count_nonzero(kpt_moidx) % 2 == 0)
orbspin = numpy.zeros(mo.shape[1], dtype=int)
orbspin[1::2] = 1
mo = lib.tag_array(mo, orbspin=orbspin)
eris.orbspin.append(orbspin)
eris.mo_coeff.append(mo)
# Re-make our fock MO matrix elements from density and fock AO
dm = cc._scf.make_rdm1(cc.mo_coeff, cc.mo_occ)
with lib.temporary_env(cc._scf, exxdiv=None):
# _scf.exxdiv affects eris.fock. HF exchange correction should be
# excluded from the Fock matrix.
vhf = cc._scf.get_veff(cell, dm)
fockao = cc._scf.get_hcore() + vhf
eris.fock = numpy.asarray([reduce(numpy.dot, (mo.T.conj(), fockao[k], mo))
for k, mo in enumerate(eris.mo_coeff)])
eris.e_hf = cc._scf.energy_tot(dm=dm, vhf=vhf)
eris.mo_energy = [eris.fock[k].diagonal().real for k in range(nkpts)]
# Add HFX correction in the eris.mo_energy to improve convergence in
# CCSD iteration. It is useful for the 2D systems since their occupied and
# the virtual orbital energies may overlap which may lead to numerical
# issue in the CCSD iterations.
# FIXME: Whether to add this correction for other exxdiv treatments?
# Without the correction, MP2 energy may be largely off the correct value.
madelung = tools.madelung(cell, kpts)
eris.mo_energy = [_adjust_occ(mo_e, nocc, -madelung)
for k, mo_e in enumerate(eris.mo_energy)]
# Get location of padded elements in occupied and virtual space.
nocc_per_kpt = get_nocc(cc, per_kpoint=True)
nonzero_padding = padding_k_idx(cc, kind="joint")
# Check direct and indirect gaps for possible issues with CCSD convergence.
mo_e = [eris.mo_energy[kp][nonzero_padding[kp]] for kp in range(nkpts)]
mo_e = numpy.sort([y for x in mo_e for y in x]) # Sort de-nested array
gap = mo_e[numpy.sum(nocc_per_kpt)] - mo_e[numpy.sum(nocc_per_kpt)-1]
if gap < 1e-5:
logger.warn(cc, 'HOMO-LUMO gap %s too small for KCCSD. '
'May cause issues in convergence.', gap)
kconserv = kpts_helper.get_kconserv(cell, kpts)
if getattr(mo_coeff[0], 'orbspin', None) is None:
# The bottom nao//2 coefficients are down (up) spin while the top are up (down).
mo_a_coeff = [mo[:nao // 2] for mo in eris.mo_coeff]
mo_b_coeff = [mo[nao // 2:] for mo in eris.mo_coeff]
eri = numpy.empty((nkpts, nkpts, nkpts, nmo, nmo, nmo, nmo), dtype=numpy.complex128)
fao2mo = cc._scf.with_df.ao2mo
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kq, kr]
eri_kpt = fao2mo(
(mo_a_coeff[kp], mo_a_coeff[kq], mo_a_coeff[kr], mo_a_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt += fao2mo(
(mo_b_coeff[kp], mo_b_coeff[kq], mo_b_coeff[kr], mo_b_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt += fao2mo(
(mo_a_coeff[kp], mo_a_coeff[kq], mo_b_coeff[kr], mo_b_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt += fao2mo(
(mo_b_coeff[kp], mo_b_coeff[kq], mo_a_coeff[kr], mo_a_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt = eri_kpt.reshape(nmo, nmo, nmo, nmo)
eri[kp, kq, kr] = eri_kpt
else:
mo_a_coeff = [mo[:nao // 2] + mo[nao // 2:] for mo in eris.mo_coeff]
eri = numpy.empty((nkpts, nkpts, nkpts, nmo, nmo, nmo, nmo), dtype=numpy.complex128)
fao2mo = cc._scf.with_df.ao2mo
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kq, kr]
eri_kpt = fao2mo(
(mo_a_coeff[kp], mo_a_coeff[kq], mo_a_coeff[kr], mo_a_coeff[ks]), (kpts[kp], kpts[kq], kpts[kr], kpts[ks]),
compact=False)
eri_kpt[(eris.orbspin[kp][:, None] != eris.orbspin[kq]).ravel()] = 0
eri_kpt[:, (eris.orbspin[kr][:, None] != eris.orbspin[ks]).ravel()] = 0
eri_kpt = eri_kpt.reshape(nmo, nmo, nmo, nmo)
eri[kp, kq, kr] = eri_kpt
# Check some antisymmetrized properties of the integrals
if DEBUG:
check_antisymm_3412(cc, cc.kpts, eri)
# Antisymmetrizing (pq|rs)-(ps|rq), where the latter integral is equal to
# (rq|ps); done since we aren't tracking the kpoint of orbital 's'
eri = eri - eri.transpose(2, 1, 0, 5, 4, 3, 6)
# Chemist -> physics notation
eri = eri.transpose(0, 2, 1, 3, 5, 4, 6)
# Set the various integrals
eris.dtype = eri.dtype
eris.oooo = eri[:, :, :, :nocc, :nocc, :nocc, :nocc].copy() / nkpts
eris.ooov = eri[:, :, :, :nocc, :nocc, :nocc, nocc:].copy() / nkpts
eris.ovoo = eri[:, :, :, :nocc, nocc:, :nocc, :nocc].copy() / nkpts
eris.oovv = eri[:, :, :, :nocc, :nocc, nocc:, nocc:].copy() / nkpts
eris.ovov = eri[:, :, :, :nocc, nocc:, :nocc, nocc:].copy() / nkpts
eris.ovvv = eri[:, :, :, :nocc, nocc:, nocc:, nocc:].copy() / nkpts
eris.vvvv = eri[:, :, :, nocc:, nocc:, nocc:, nocc:].copy() / nkpts
log.timer('CCSD integral transformation', *cput0)
return eris
def check_antisymm_3412(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
rspq = integrals[kq, kp, kr, q, p, r, s]
cdiff = numpy.linalg.norm(pqrs - rspq).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, rspq, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
return diff
def check_antisymm_12(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
qprs = integrals[kq, kp, kr, q, p, r, s]
cdiff = numpy.linalg.norm(pqrs + qprs).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, qprs, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
def check_antisymm_34(cc, kpts, integrals):
kconserv = kpts_helper.get_kconserv(cc._scf.cell, cc.kpts)
nkpts = len(kpts)
diff = 0.0
for kp, kq, kr in kpts_helper.loop_kkk(nkpts):
ks = kconserv[kp, kr, kq]
for p in range(integrals.shape[3]):
for q in range(integrals.shape[4]):
for r in range(integrals.shape[5]):
for s in range(integrals.shape[6]):
pqrs = integrals[kp, kq, kr, p, q, r, s]
pqsr = integrals[kp, kq, ks, p, q, s, r]
cdiff = numpy.linalg.norm(pqrs + pqsr).real
if diff > 1e-5:
print("AS diff = %.15g" % cdiff, pqrs, pqsr, kp, kq, kr, ks, p, q, r, s)
diff = max(diff, cdiff)
print("antisymmetrization : max diff = %.15g" % diff)
if diff > 1e-5:
print("Energy cutoff (or cell.mesh) is not enough to converge AO integrals.")
imd = imdk
class _IMDS:
# Identical to molecular rccsd_slow
def __init__(self, cc):
self.verbose = cc.verbose
self.stdout = cc.stdout
self.t1 = cc.t1
self.t2 = cc.t2
self.eris = cc.eris
self.kconserv = cc.khelper.kconserv
self.made_ip_imds = False
self.made_ea_imds = False
self._made_shared_2e = False
self._fimd = None
def _make_shared_1e(self):
cput0 = (time.clock(), time.time())
log = logger.Logger(self.stdout, self.verbose)
t1,t2,eris = self.t1, self.t2, self.eris
kconserv = self.kconserv
self.Loo = imd.Loo(t1,t2,eris,kconserv)
self.Lvv = imd.Lvv(t1,t2,eris,kconserv)
self.Fov = imd.cc_Fov(t1,t2,eris,kconserv)
log.timer('EOM-CCSD shared one-electron intermediates', *cput0)
def _make_shared_2e(self):
cput0 = (time.clock(), time.time())
log = logger.Logger(self.stdout, self.verbose)
t1,t2,eris = self.t1, self.t2, self.eris
kconserv = self.kconserv
# TODO: check whether to hold Wovov Wovvo in memory
if self._fimd is None:
self._fimd = lib.H5TmpFile()
nkpts, nocc, nvir = t1.shape
self._fimd.create_dataset('ovov', (nkpts,nkpts,nkpts,nocc,nvir,nocc,nvir), t1.dtype.char)
self._fimd.create_dataset('ovvo', (nkpts,nkpts,nkpts,nocc,nvir,nvir,nocc), t1.dtype.char)
# 2 virtuals
self.Wovov = imd.Wovov(t1,t2,eris,kconserv, self._fimd['ovov'])
self.Wovvo = imd.Wovvo(t1,t2,eris,kconserv, self._fimd['ovvo'])
self.Woovv = eris.oovv
log.timer('EOM-CCSD shared two-electron intermediates', *cput0)
def make_ip(self, ip_partition=None):
self._make_shared_1e()
if self._made_shared_2e is False and ip_partition != 'mp':
self._make_shared_2e()
self._made_shared_2e = True
cput0 = (time.clock(), time.time())
log = logger.Logger(self.stdout, self.verbose)
t1,t2,eris = self.t1, self.t2, self.eris
kconserv = self.kconserv
nkpts, nocc, nvir = t1.shape
self._fimd.create_dataset('oooo', (nkpts,nkpts,nkpts,nocc,nocc,nocc,nocc), t1.dtype.char)
self._fimd.create_dataset('ooov', (nkpts,nkpts,nkpts,nocc,nocc,nocc,nvir), t1.dtype.char)
self._fimd.create_dataset('ovoo', (nkpts,nkpts,nkpts,nocc,nvir,nocc,nocc), t1.dtype.char)
# 0 or 1 virtuals
if ip_partition != 'mp':
self.Woooo = imd.Woooo(t1,t2,eris,kconserv, self._fimd['oooo'])
self.Wooov = imd.Wooov(t1,t2,eris,kconserv, self._fimd['ooov'])
self.Wovoo = imd.Wovoo(t1,t2,eris,kconserv, self._fimd['ovoo'])
self.made_ip_imds = True
log.timer('EOM-CCSD IP intermediates', *cput0)
def make_ea(self, ea_partition=None):
self._make_shared_1e()
if self._made_shared_2e is False and ea_partition != 'mp':
self._make_shared_2e()
self._made_shared_2e = True
cput0 = (time.clock(), time.time())
log = logger.Logger(self.stdout, self.verbose)
t1,t2,eris = self.t1, self.t2, self.eris
kconserv = self.kconserv
nkpts, nocc, nvir = t1.shape
self._fimd.create_dataset('vovv', (nkpts,nkpts,nkpts,nvir,nocc,nvir,nvir), t1.dtype.char)
self._fimd.create_dataset('vvvo', (nkpts,nkpts,nkpts,nvir,nvir,nvir,nocc), t1.dtype.char)
self._fimd.create_dataset('vvvv', (nkpts,nkpts,nkpts,nvir,nvir,nvir,nvir), t1.dtype.char)
# 3 or 4 virtuals
self.Wvovv = imd.Wvovv(t1,t2,eris,kconserv, self._fimd['vovv'])
if ea_partition == 'mp' and np.all(t1 == 0):
self.Wvvvo = imd.Wvvvo(t1,t2,eris,kconserv, self._fimd['vvvo'])
else:
self.Wvvvv = imd.Wvvvv(t1,t2,eris,kconserv, self._fimd['vvvv'])
self.Wvvvo = imd.Wvvvo(t1,t2,eris,kconserv,self.Wvvvv, self._fimd['vvvo'])
self.made_ea_imds = True
log.timer('EOM-CCSD EA intermediates', *cput0)
from pyscf.pbc import scf
scf.kghf.KGHF.CCSD = lib.class_as_method(KGCCSD)
if __name__ == '__main__':
from pyscf.pbc import gto, scf, cc
cell = gto.Cell()
cell.atom='''
C 0.000000000000 0.000000000000 0.000000000000
C 1.685068664391 1.685068664391 1.685068664391
'''
cell.basis = 'gth-szv'
cell.pseudo = 'gth-pade'
cell.a = '''
0.000000000, 3.370137329, 3.370137329
3.370137329, 0.000000000, 3.370137329
3.370137329, 3.370137329, 0.000000000'''
cell.unit = 'B'
cell.verbose = 5
cell.build()
# Running HF and CCSD with 1x1x2 Monkhorst-Pack k-point mesh
kmf = scf.KRHF(cell, kpts=cell.make_kpts([1,1,2]), exxdiv=None)
ehf = kmf.kernel()
kmf = scf.addons.convert_to_ghf(kmf)
mycc = KGCCSD(kmf)
ecc, t1, t2 = mycc.kernel()
print(ecc - -0.155298393321855)
|
|
"""
Local templates for the archetype zopeskel project
"""
import os
from zopeskel.base import var
from zopeskel.localcommands import ZopeSkelLocalTemplate
from Cheetah.Template import Template as cheetah_template
class ArchetypeSubTemplate(ZopeSkelLocalTemplate):
use_cheetah = True
parent_templates = ['archetype']
class ContentType(ArchetypeSubTemplate):
"""
A Content Type skeleton
"""
_template_dir = 'templates/archetype/contenttype'
summary = "A content type skeleton"
vars = [
var('contenttype_name', 'Content type name ', default='Example Type'),
var('contenttype_description', 'Content type description ',
default='Description of the Example Type'),
var('folderish', 'True/False: Content type is Folderish ',
default=False),
var('global_allow', 'True/False: Globally addable ',
default=True),
var('allow_discussion', 'True/False: Allow discussion ',
default=False),
]
def pre(self, command, output_dir, vars):
vars['contenttype_classname'] = vars['contenttype_name'].replace(" ", "")
vars['schema_name'] = vars['contenttype_classname'] + "Schema"
vars['content_class_filename'] = vars['contenttype_classname'].lower()
vars['types_xml_filename'] = vars['contenttype_name'].replace(" ", "_")
vars['interface_name'] = "I" + vars['contenttype_name'].replace(" ", "")
vars['add_permission_name'] = vars['package_dotted_name'] + ': Add ' + vars['contenttype_name']
class ATSchemaField(ArchetypeSubTemplate):
"""
A handy AT schema builder
"""
_template_dir = 'templates/archetype/atschema'
summary = "A handy AT schema builder"
marker_name = "Your Archetypes field definitions here ..."
# mapping of ATSchema types to zope.schema types
typemap = {'boolean': 'Bool',
'computed': 'TextLine',
'cmfobject': 'TextLine',
'datetime': 'Date',
'file': 'Bytes',
'fixedpoint': 'Float',
'float': 'Float',
'image': 'Bytes',
'integer': 'Int',
'lines': 'List',
'reference': 'Object',
'string': 'TextLine',
'text': 'Text',
'unknown': 'TextLine'}
# fieldtypes-map to (widget, validator)
fieldtypes = {
'boolean': ('boolean', None),
'computed': ('computed', None),
'cmfobject': ('file', None),
'datetime': ('calendar', 'isValidDate'),
'file': ('file', 'isNonEmptyFile'),
'fixedpoint': ('decimal', 'isDecimal'),
'float': ('decimal', 'isDecimal'),
'image': ('image', 'isNonEmptyFile'),
'integer': ('integer', 'isInt'),
'lines': ('lines', None),
'reference': ('reference', None),
'string': ('string', None),
'text': ('textarea', None),
}
vars = [
var('content_class_filename',
'What is the module (file)name of your content class?',
default='exampletype'),
var('field_name',
'What would you like to name this field?',
default='newfield'),
var('field_type',
'What kind of field should I make for you?\nSome examples: ['+','.join(fieldtypes.keys())+']\n',
default='string'),
var('widget_type',
'What kind of widget do you want to use (example: Password)?',
default='default'),
var('field_label',
'What should be the label of this field (title)?',
default='New Field'),
var('field_desc',
'What should be the description of this field (help text)?',
default='Field description'),
var('required',
'Is this field required?',
default='False'),
var('default',
"If you'd like a default type it here, otherwise leave it blank",
default=''),
var('validator',
"Enter a validator (isEmail), or None, or get a default validator for your specified field type.",
default='use default validator'),
]
def check_vars(self, *args, **kwargs):
"""
Overloading check_vars to print welcome message
"""
print "Welcome to the ATSchema Builder. Field names/widgets can be specified in lowercase or upper case."
print "NOTE: No need to add 'widget' or 'field' to the names. atschema does the work for you!"
print "See "
print " http://plone.org/documentation/manual/developer-manual/archetypes/fields/fields-reference/"
print "and "
print " http://plone.org/documentation/manual/developer-manual/archetypes/fields/widgets-reference/"
print "for field and widget details"
return super(ATSchemaField, self).check_vars(*args, **kwargs)
def run(self, command, output_dir, vars):
"""
By-passing the base run so I can do multiple inserts
with different marker names
"""
(vars['namespace_package'],
vars['namespace_package2'],
vars['package']) = command.get_parent_namespace_packages()
if vars['namespace_package2']:
vars['package_dotted_name'] = "%s.%s.%s" % \
(vars['namespace_package'],
vars['namespace_package2'],
vars['package'])
else:
vars['package_dotted_name'] = "%s.%s" % \
(vars['namespace_package'],
vars['package'])
vars['a_validator'] = ''
if vars['validator'] == 'use default validator':
## take default Validator...
val = ATSchemaField.fieldtypes[vars['field_type'].lower()][1]
if val is not None:
vars['a_validator'] = """'%s'""" % val
elif vars['validator'] != 'None': ## user providing 'aValidator'
vars['a_validator'] = """'%s'""" % vars['validator']
self.pre(command, output_dir, vars)
interface_insert_template = open(os.path.join(self.template_dir(), 'interfaces/+interface_name+.py_insert')).read()
atschema_insert_template = open(os.path.join(self.template_dir(),'content/+content_class_filename+.py_insert')).read()
bridges_insert_template = open(os.path.join(self.template_dir(),'content/schema_field_bridge.txt_insert')).read()
content_messagefactory_insert_template = open(os.path.join(self.template_dir(), 'content/messagefactory_insert.txt_insert')).read()
interface_additional_imports_template = open(os.path.join(self.template_dir(), 'interfaces/additional_imports.txt_insert')).read()
# insert_into_file really wants the inserted text to end with a newline
interface_insert = str(cheetah_template(interface_insert_template, vars))+"\n"
atschema_insert = str(cheetah_template(atschema_insert_template, vars))+"\n"
bridges_insert = str(cheetah_template(bridges_insert_template, vars))+"\n"
content_messagefactory_insert = str(cheetah_template(content_messagefactory_insert_template, vars))+"\n"
interface_additional_imports = str(cheetah_template(interface_additional_imports_template, vars))+"\n"
# self.write_files(command, output_dir, vars)
command.insert_into_file(os.path.join(command.dest_dir(), 'content', '%s.py' % (vars['content_class_filename'])), self.marker_name, atschema_insert)
command.insert_into_file(os.path.join(command.dest_dir(), 'interfaces', '%s.py' % (vars['content_class_filename'])), 'schema definition goes here', interface_insert)
command.insert_into_file(os.path.join(command.dest_dir(), 'content', '%s.py' % (vars['content_class_filename'])), 'Your ATSchema to Python Property Bridges Here ...', bridges_insert)
command.insert_into_file(os.path.join(command.dest_dir(), 'content', '%s.py' % (vars['content_class_filename'])), 'Message Factory Imported Here', content_messagefactory_insert)
command.insert_into_file(os.path.join(command.dest_dir(), 'interfaces', '%s.py' % (vars['content_class_filename'])), 'Additional Imports Here', interface_additional_imports)
self.post(command, output_dir, vars)
def pre(self, command, output_dir, vars):
file = vars['content_class_filename']
if file.endswith('.py'):
file = os.path.splitext(file)[0]
vars['field_type'] = vars['field_type'].capitalize()
if vars['widget_type'].lower() == 'default':
vars['widget_type'] = self.fieldtypes[vars['field_type'].lower()][0]
vars['widget_type'] = vars['widget_type'].capitalize()
# camelcase multiword names
if vars['field_type'].lower() == 'fixedpoint':
vars['field_type'] = 'FixedPoint'
if vars['field_type'].lower() == 'datetime':
vars['field_type'] = 'DateTime'
if vars['field_type'].lower() == 'date':
vars['field_type'] = 'DateTime'
if vars['widget_type'].lower() == 'inandout':
vars['widget_type'] = 'InAndOut'
if vars['widget_type'].lower() == 'multiselection':
vars['widget_type'] = 'MultiSelection'
if vars['widget_type'].lower() == 'picklist':
vars['widget_type'] = 'PickList'
if vars['widget_type'].lower() == 'referencebrowser':
vars['widget_type'] = 'ReferenceBrowser'
if vars['widget_type'].lower() == 'textarea':
vars['widget_type'] = 'TextArea'
# try to get the zope.schema type, but default to TextLine if no dice
try:
vars['zopeschema_type'] = self.typemap[vars['field_type'].lower()]
except:
vars['zopeschema_type'] = self.typemap['unknown']
# if the widget is the RichWidget, set the type to 'SourceText'
if vars['widget_type'].lower() == 'rich':
vars['zopeschema_type'] = 'SourceText'
# if not vars['i18n_domain']:
# vars['i18n_domain'] = vars['package_dotted_name']
vars['content_class_filename'] = file
|
|
# Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import time
import os
import sys
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
from pymongo import MongoClient
import bson
import pymongo
from pymongo.read_preferences import ReadPreference
from mongo_connector.doc_managers.doc_manager_simulator import DocManager
from mongo_connector.locking_dict import LockingDict
from mongo_connector.oplog_manager import OplogThread
from mongo_connector.util import retry_until_ok
from tests import mongo_host
from tests.setup_cluster import (
kill_mongo_proc,
restart_mongo_proc,
start_cluster,
get_shard,
kill_all
)
from tests.util import assert_soon
class TestOplogManagerSharded(unittest.TestCase):
"""Defines all test cases for OplogThreads running on a sharded
cluster
"""
def setUp(self):
""" Initialize the cluster:
Clean out the databases used by the tests
Make connections to mongos, mongods
Create and shard test collections
Create OplogThreads
"""
# Start the cluster with a mongos on port 27217
self.mongos_p = start_cluster()
# Connection to mongos
mongos_address = '%s:%d' % (mongo_host, self.mongos_p)
self.mongos_conn = MongoClient(mongos_address)
# Connections to the shards
shard1_ports = get_shard(self.mongos_p, 0)
shard2_ports = get_shard(self.mongos_p, 1)
self.shard1_prim_p = shard1_ports['primary']
self.shard1_scnd_p = shard1_ports['secondaries'][0]
self.shard2_prim_p = shard2_ports['primary']
self.shard2_scnd_p = shard2_ports['secondaries'][0]
self.shard1_conn = MongoClient('%s:%d'
% (mongo_host, self.shard1_prim_p),
replicaSet="demo-set-0")
self.shard2_conn = MongoClient('%s:%d'
% (mongo_host, self.shard2_prim_p),
replicaSet="demo-set-1")
self.shard1_secondary_conn = MongoClient(
'%s:%d' % (mongo_host, self.shard1_scnd_p),
read_preference=ReadPreference.SECONDARY_PREFERRED
)
self.shard2_secondary_conn = MongoClient(
'%s:%d' % (mongo_host, self.shard2_scnd_p),
read_preference=ReadPreference.SECONDARY_PREFERRED
)
# Wipe any test data
self.mongos_conn["test"]["mcsharded"].drop()
# Create and shard the collection test.mcsharded on the "i" field
self.mongos_conn["test"]["mcsharded"].ensure_index("i")
self.mongos_conn.admin.command("enableSharding", "test")
self.mongos_conn.admin.command("shardCollection",
"test.mcsharded",
key={"i": 1})
# Pre-split the collection so that:
# i < 1000 lives on shard1
# i >= 1000 lives on shard2
self.mongos_conn.admin.command(bson.SON([
("split", "test.mcsharded"),
("middle", {"i": 1000})
]))
# disable the balancer
self.mongos_conn.config.settings.update(
{"_id": "balancer"},
{"$set": {"stopped": True}},
upsert=True
)
# Move chunks to their proper places
try:
self.mongos_conn["admin"].command(
"moveChunk",
"test.mcsharded",
find={"i": 1},
to="demo-set-0"
)
except pymongo.errors.OperationFailure:
pass # chunk may already be on the correct shard
try:
self.mongos_conn["admin"].command(
"moveChunk",
"test.mcsharded",
find={"i": 1000},
to="demo-set-1"
)
except pymongo.errors.OperationFailure:
pass # chunk may already be on the correct shard
# Make sure chunks are distributed correctly
self.mongos_conn["test"]["mcsharded"].insert({"i": 1})
self.mongos_conn["test"]["mcsharded"].insert({"i": 1000})
def chunks_moved():
doc1 = self.shard1_conn.test.mcsharded.find_one()
doc2 = self.shard2_conn.test.mcsharded.find_one()
if None in (doc1, doc2):
return False
return doc1['i'] == 1 and doc2['i'] == 1000
assert_soon(chunks_moved)
self.mongos_conn.test.mcsharded.remove()
# create a new oplog progress file
try:
os.unlink("config.txt")
except OSError:
pass
open("config.txt", "w").close()
# Oplog threads (oplog manager) for each shard
doc_manager = DocManager()
oplog_progress = LockingDict()
self.opman1 = OplogThread(
primary_conn=self.shard1_conn,
main_address='%s:%d' % (mongo_host, self.mongos_p),
oplog_coll=self.shard1_conn["local"]["oplog.rs"],
is_sharded=True,
doc_manager=doc_manager,
oplog_progress_dict=oplog_progress,
namespace_set=["test.mcsharded", "test.mcunsharded"],
auth_key=None,
auth_username=None
)
self.opman2 = OplogThread(
primary_conn=self.shard2_conn,
main_address='%s:%d' % (mongo_host, self.mongos_p),
oplog_coll=self.shard2_conn["local"]["oplog.rs"],
is_sharded=True,
doc_manager=doc_manager,
oplog_progress_dict=oplog_progress,
namespace_set=["test.mcsharded", "test.mcunsharded"],
auth_key=None,
auth_username=None
)
def tearDown(self):
try:
self.opman1.join()
except RuntimeError:
pass # thread may not have been started
try:
self.opman2.join()
except RuntimeError:
pass # thread may not have been started
self.mongos_conn.close()
self.shard1_conn.close()
self.shard2_conn.close()
self.shard1_secondary_conn.close()
self.shard2_secondary_conn.close()
kill_all()
def test_get_oplog_cursor(self):
"""Test the get_oplog_cursor method"""
# timestamp is None - all oplog entries are returned.
cursor = self.opman.get_oplog_cursor(None)
self.assertEqual(cursor.count(),
self.primary_conn["local"]["oplog.rs"].count())
# earliest entry is the only one at/after timestamp
doc = {"ts": bson.Timestamp(1000, 0), "i": 1}
self.mongos_conn["test"]["mcsharded"].insert(doc)
latest_timestamp = self.opman1.get_last_oplog_timestamp()
cursor = self.opman1.get_oplog_cursor(latest_timestamp)
self.assertNotEqual(cursor, None)
self.assertEqual(cursor.count(), 1)
next_entry_id = cursor[0]['o']['_id']
retrieved = self.mongos_conn.test.mcsharded.find_one(next_entry_id)
self.assertEqual(retrieved, doc)
# many entries before and after timestamp
for i in range(2, 2002):
self.mongos_conn["test"]["mcsharded"].insert({
"i": i
})
oplog1 = self.shard1_conn["local"]["oplog.rs"].find(
sort=[("ts", pymongo.ASCENDING)]
)
oplog2 = self.shard2_conn["local"]["oplog.rs"].find(
sort=[("ts", pymongo.ASCENDING)]
)
# oplogs should have records for inserts performed, plus
# various other messages
oplog1_count = oplog1.count()
oplog2_count = oplog2.count()
self.assertGreaterEqual(oplog1_count, 998)
self.assertGreaterEqual(oplog2_count, 1002)
pivot1 = oplog1.skip(400).limit(1)[0]
pivot2 = oplog2.skip(400).limit(1)[0]
cursor1 = self.opman1.get_oplog_cursor(pivot1["ts"])
cursor2 = self.opman2.get_oplog_cursor(pivot2["ts"])
self.assertEqual(cursor1.count(), oplog1_count - 400)
self.assertEqual(cursor2.count(), oplog2_count - 400)
def test_get_last_oplog_timestamp(self):
"""Test the get_last_oplog_timestamp method"""
# "empty" the oplog
self.opman1.oplog = self.shard1_conn["test"]["emptycollection"]
self.opman2.oplog = self.shard2_conn["test"]["emptycollection"]
self.assertEqual(self.opman1.get_last_oplog_timestamp(), None)
self.assertEqual(self.opman2.get_last_oplog_timestamp(), None)
# Test non-empty oplog
self.opman1.oplog = self.shard1_conn["local"]["oplog.rs"]
self.opman2.oplog = self.shard2_conn["local"]["oplog.rs"]
for i in range(1000):
self.mongos_conn["test"]["mcsharded"].insert({
"i": i + 500
})
oplog1 = self.shard1_conn["local"]["oplog.rs"]
oplog1 = oplog1.find().sort("$natural", pymongo.DESCENDING).limit(1)[0]
oplog2 = self.shard2_conn["local"]["oplog.rs"]
oplog2 = oplog2.find().sort("$natural", pymongo.DESCENDING).limit(1)[0]
self.assertEqual(self.opman1.get_last_oplog_timestamp(),
oplog1["ts"])
self.assertEqual(self.opman2.get_last_oplog_timestamp(),
oplog2["ts"])
def test_dump_collection(self):
"""Test the dump_collection method
Cases:
1. empty oplog
2. non-empty oplog
"""
# Test with empty oplog
self.opman1.oplog = self.shard1_conn["test"]["emptycollection"]
self.opman2.oplog = self.shard2_conn["test"]["emptycollection"]
last_ts1 = self.opman1.dump_collection()
last_ts2 = self.opman2.dump_collection()
self.assertEqual(last_ts1, None)
self.assertEqual(last_ts2, None)
# Test with non-empty oplog
self.opman1.oplog = self.shard1_conn["local"]["oplog.rs"]
self.opman2.oplog = self.shard2_conn["local"]["oplog.rs"]
for i in range(1000):
self.mongos_conn["test"]["mcsharded"].insert({
"i": i + 500
})
last_ts1 = self.opman1.get_last_oplog_timestamp()
last_ts2 = self.opman2.get_last_oplog_timestamp()
self.assertEqual(last_ts1, self.opman1.dump_collection())
self.assertEqual(last_ts2, self.opman2.dump_collection())
self.assertEqual(len(self.opman1.doc_managers[0]._search()), 1000)
def test_init_cursor(self):
"""Test the init_cursor method
Cases:
1. no last checkpoint, no collection dump
2. no last checkpoint, collection dump ok and stuff to dump
3. no last checkpoint, nothing to dump, stuff in oplog
4. no last checkpoint, nothing to dump, nothing in oplog
5. no last checkpoint, no collection dump, stuff in oplog
6. last checkpoint exists
7. last checkpoint is behind
"""
# N.B. these sub-cases build off of each other and cannot be re-ordered
# without side-effects
# No last checkpoint, no collection dump, nothing in oplog
# "change oplog collection" to put nothing in oplog
self.opman1.oplog = self.shard1_conn["test"]["emptycollection"]
self.opman2.oplog = self.shard2_conn["test"]["emptycollection"]
self.opman1.collection_dump = False
self.opman2.collection_dump = False
self.assertTrue(all(doc['op'] == 'n'
for doc in self.opman1.init_cursor()[0]))
self.assertEqual(self.opman1.checkpoint, None)
self.assertTrue(all(doc['op'] == 'n'
for doc in self.opman2.init_cursor()[0]))
self.assertEqual(self.opman2.checkpoint, None)
# No last checkpoint, empty collections, nothing in oplog
self.opman1.collection_dump = self.opman2.collection_dump = True
cursor, cursor_len = self.opman1.init_cursor()
self.assertEqual(cursor, None)
self.assertEqual(cursor_len, 0)
self.assertEqual(self.opman1.checkpoint, None)
cursor, cursor_len = self.opman2.init_cursor()
self.assertEqual(cursor, None)
self.assertEqual(cursor_len, 0)
self.assertEqual(self.opman2.checkpoint, None)
# No last checkpoint, empty collections, something in oplog
self.opman1.oplog = self.shard1_conn["local"]["oplog.rs"]
self.opman2.oplog = self.shard2_conn["local"]["oplog.rs"]
oplog_startup_ts = self.opman2.get_last_oplog_timestamp()
collection = self.mongos_conn["test"]["mcsharded"]
collection.insert({"i": 1})
collection.remove({"i": 1})
time.sleep(3)
last_ts1 = self.opman1.get_last_oplog_timestamp()
cursor, cursor_len = self.opman1.init_cursor()
self.assertEqual(cursor_len, 0)
self.assertEqual(self.opman1.checkpoint, last_ts1)
with self.opman1.oplog_progress as prog:
self.assertEqual(prog.get_dict()[str(self.opman1.oplog)], last_ts1)
# init_cursor should point to startup message in shard2 oplog
cursor, cursor_len = self.opman2.init_cursor()
self.assertEqual(cursor_len, 0)
self.assertEqual(self.opman2.checkpoint, oplog_startup_ts)
# No last checkpoint, no collection dump, stuff in oplog
progress = LockingDict()
self.opman1.oplog_progress = self.opman2.oplog_progress = progress
self.opman1.collection_dump = self.opman2.collection_dump = False
collection.insert({"i": 1200})
last_ts2 = self.opman2.get_last_oplog_timestamp()
self.opman1.init_cursor()
self.assertEqual(self.opman1.checkpoint, last_ts1)
with self.opman1.oplog_progress as prog:
self.assertEqual(prog.get_dict()[str(self.opman1.oplog)], last_ts1)
cursor, cursor_len = self.opman2.init_cursor()
for i in range(cursor_len - 1):
next(cursor)
self.assertEqual(next(cursor)["o"]["i"], 1200)
self.assertEqual(self.opman2.checkpoint, last_ts2)
with self.opman2.oplog_progress as prog:
self.assertEqual(prog.get_dict()[str(self.opman2.oplog)], last_ts2)
# Last checkpoint exists
progress = LockingDict()
self.opman1.oplog_progress = self.opman2.oplog_progress = progress
for i in range(1000):
collection.insert({"i": i + 500})
entry1 = list(
self.shard1_conn["local"]["oplog.rs"].find(skip=200, limit=2))
entry2 = list(
self.shard2_conn["local"]["oplog.rs"].find(skip=200, limit=2))
progress.get_dict()[str(self.opman1.oplog)] = entry1[0]["ts"]
progress.get_dict()[str(self.opman2.oplog)] = entry2[0]["ts"]
self.opman1.oplog_progress = self.opman2.oplog_progress = progress
self.opman1.checkpoint = self.opman2.checkpoint = None
cursor1, cursor_len1 = self.opman1.init_cursor()
cursor2, cursor_len2 = self.opman2.init_cursor()
self.assertEqual(entry1[1]["ts"], next(cursor1)["ts"])
self.assertEqual(entry2[1]["ts"], next(cursor2)["ts"])
self.assertEqual(self.opman1.checkpoint, entry1[0]["ts"])
self.assertEqual(self.opman2.checkpoint, entry2[0]["ts"])
with self.opman1.oplog_progress as prog:
self.assertEqual(prog.get_dict()[str(self.opman1.oplog)],
entry1[0]["ts"])
with self.opman2.oplog_progress as prog:
self.assertEqual(prog.get_dict()[str(self.opman2.oplog)],
entry2[0]["ts"])
# Last checkpoint is behind
progress = LockingDict()
progress.get_dict()[str(self.opman1.oplog)] = bson.Timestamp(1, 0)
progress.get_dict()[str(self.opman2.oplog)] = bson.Timestamp(1, 0)
self.opman1.oplog_progress = self.opman2.oplog_progress = progress
self.opman1.checkpoint = self.opman2.checkpoint = None
cursor, cursor_len = self.opman1.init_cursor()
self.assertEqual(cursor_len, 0)
self.assertEqual(cursor, None)
self.assertIsNotNone(self.opman1.checkpoint)
cursor, cursor_len = self.opman2.init_cursor()
self.assertEqual(cursor_len, 0)
self.assertEqual(cursor, None)
self.assertIsNotNone(self.opman2.checkpoint)
def test_rollback(self):
"""Test the rollback method in a sharded environment
Cases:
1. Documents on both shards, rollback on one shard
2. Documents on both shards, rollback on both shards
"""
self.opman1.start()
self.opman2.start()
# Insert first documents while primaries are up
db_main = self.mongos_conn["test"]["mcsharded"]
db_main.insert({"i": 0}, w=2)
db_main.insert({"i": 1000}, w=2)
self.assertEqual(self.shard1_conn["test"]["mcsharded"].count(), 1)
self.assertEqual(self.shard2_conn["test"]["mcsharded"].count(), 1)
# Case 1: only one primary goes down, shard1 in this case
kill_mongo_proc(self.shard1_prim_p, destroy=False)
# Wait for the secondary to be promoted
shard1_secondary_admin = self.shard1_secondary_conn["admin"]
assert_soon(
lambda: shard1_secondary_admin.command("isMaster")["ismaster"])
# Insert another document. This will be rolled back later
retry_until_ok(db_main.insert, {"i": 1})
db_secondary1 = self.shard1_secondary_conn["test"]["mcsharded"]
db_secondary2 = self.shard2_secondary_conn["test"]["mcsharded"]
self.assertEqual(db_secondary1.count(), 2)
# Wait for replication on the doc manager
# Note that both OplogThreads share the same doc manager
c = lambda: len(self.opman1.doc_managers[0]._search()) == 3
assert_soon(c, "not all writes were replicated to doc manager",
max_tries=120)
# Kill the new primary
kill_mongo_proc(self.shard1_scnd_p, destroy=False)
# Start both servers back up
restart_mongo_proc(self.shard1_prim_p)
primary_admin = self.shard1_conn["admin"]
c = lambda: primary_admin.command("isMaster")["ismaster"]
assert_soon(lambda: retry_until_ok(c))
restart_mongo_proc(self.shard1_scnd_p)
secondary_admin = self.shard1_secondary_conn["admin"]
c = lambda: secondary_admin.command("replSetGetStatus")["myState"] == 2
assert_soon(c)
query = {"i": {"$lt": 1000}}
assert_soon(lambda: retry_until_ok(db_main.find(query).count) > 0)
# Only first document should exist in MongoDB
self.assertEqual(db_main.find(query).count(), 1)
self.assertEqual(db_main.find_one(query)["i"], 0)
# Same should hold for the doc manager
docman_docs = [d for d in self.opman1.doc_managers[0]._search()
if d["i"] < 1000]
self.assertEqual(len(docman_docs), 1)
self.assertEqual(docman_docs[0]["i"], 0)
# Wait for previous rollback to complete
def rollback_done():
secondary1_count = retry_until_ok(db_secondary1.count)
secondary2_count = retry_until_ok(db_secondary2.count)
return (1, 1) == (secondary1_count, secondary2_count)
assert_soon(rollback_done,
"rollback never replicated to one or more secondaries")
##############################
# Case 2: Primaries on both shards go down
kill_mongo_proc(self.shard1_prim_p, destroy=False)
kill_mongo_proc(self.shard2_prim_p, destroy=False)
# Wait for the secondaries to be promoted
shard1_secondary_admin = self.shard1_secondary_conn["admin"]
shard2_secondary_admin = self.shard2_secondary_conn["admin"]
assert_soon(
lambda: shard1_secondary_admin.command("isMaster")["ismaster"])
assert_soon(
lambda: shard2_secondary_admin.command("isMaster")["ismaster"])
# Insert another document on each shard. These will be rolled back later
retry_until_ok(db_main.insert, {"i": 1})
self.assertEqual(db_secondary1.count(), 2)
retry_until_ok(db_main.insert, {"i": 1001})
self.assertEqual(db_secondary2.count(), 2)
# Wait for replication on the doc manager
c = lambda: len(self.opman1.doc_managers[0]._search()) == 4
assert_soon(c, "not all writes were replicated to doc manager")
# Kill the new primaries
kill_mongo_proc(self.shard1_scnd_p, destroy=False)
kill_mongo_proc(self.shard2_scnd_p, destroy=False)
# Start the servers back up...
# Shard 1
restart_mongo_proc(self.shard1_prim_p)
c = lambda: self.shard1_conn['admin'].command("isMaster")["ismaster"]
assert_soon(lambda: retry_until_ok(c))
restart_mongo_proc(self.shard1_scnd_p)
secondary_admin = self.shard1_secondary_conn["admin"]
c = lambda: secondary_admin.command("replSetGetStatus")["myState"] == 2
assert_soon(c)
# Shard 2
restart_mongo_proc(self.shard2_prim_p)
c = lambda: self.shard2_conn['admin'].command("isMaster")["ismaster"]
assert_soon(lambda: retry_until_ok(c))
restart_mongo_proc(self.shard2_scnd_p)
secondary_admin = self.shard2_secondary_conn["admin"]
c = lambda: secondary_admin.command("replSetGetStatus")["myState"] == 2
assert_soon(c)
# Wait for the shards to come online
assert_soon(lambda: retry_until_ok(db_main.find(query).count) > 0)
query2 = {"i": {"$gte": 1000}}
assert_soon(lambda: retry_until_ok(db_main.find(query2).count) > 0)
# Only first documents should exist in MongoDB
self.assertEqual(db_main.find(query).count(), 1)
self.assertEqual(db_main.find_one(query)["i"], 0)
self.assertEqual(db_main.find(query2).count(), 1)
self.assertEqual(db_main.find_one(query2)["i"], 1000)
# Same should hold for the doc manager
i_values = [d["i"] for d in self.opman1.doc_managers[0]._search()]
self.assertEqual(len(i_values), 2)
self.assertIn(0, i_values)
self.assertIn(1000, i_values)
def test_with_chunk_migration(self):
"""Test that DocManagers have proper state after both a successful
and an unsuccessful chunk migration
"""
# Start replicating to dummy doc managers
self.opman1.start()
self.opman2.start()
collection = self.mongos_conn["test"]["mcsharded"]
for i in range(1000):
collection.insert({"i": i + 500})
# Assert current state of the mongoverse
self.assertEqual(self.shard1_conn["test"]["mcsharded"].find().count(),
500)
self.assertEqual(self.shard2_conn["test"]["mcsharded"].find().count(),
500)
assert_soon(lambda: len(self.opman1.doc_managers[0]._search()) == 1000)
# Test successful chunk move from shard 1 to shard 2
self.mongos_conn["admin"].command(
"moveChunk",
"test.mcsharded",
find={"i": 1},
to="demo-set-1"
)
# doc manager should still have all docs
all_docs = self.opman1.doc_managers[0]._search()
self.assertEqual(len(all_docs), 1000)
for i, doc in enumerate(sorted(all_docs, key=lambda x: x["i"])):
self.assertEqual(doc["i"], i + 500)
# Mark the collection as "dropped". This will cause migration to fail.
self.mongos_conn["config"]["collections"].update(
{"_id": "test.mcsharded"},
{"$set": {"dropped": True}}
)
# Test unsuccessful chunk move from shard 2 to shard 1
def fail_to_move_chunk():
self.mongos_conn["admin"].command(
"moveChunk",
"test.mcsharded",
find={"i": 1},
to="demo-set-0"
)
self.assertRaises(pymongo.errors.OperationFailure, fail_to_move_chunk)
# doc manager should still have all docs
all_docs = self.opman1.doc_managers[0]._search()
self.assertEqual(len(all_docs), 1000)
for i, doc in enumerate(sorted(all_docs, key=lambda x: x["i"])):
self.assertEqual(doc["i"], i + 500)
def test_with_orphan_documents(self):
"""Test that DocManagers have proper state after a chunk migration
that resuts in orphaned documents.
"""
# Start replicating to dummy doc managers
self.opman1.start()
self.opman2.start()
collection = self.mongos_conn["test"]["mcsharded"]
collection.insert({"i": i + 500} for i in range(1000))
# Assert current state of the mongoverse
self.assertEqual(self.shard1_conn["test"]["mcsharded"].find().count(),
500)
self.assertEqual(self.shard2_conn["test"]["mcsharded"].find().count(),
500)
assert_soon(lambda: len(self.opman1.doc_managers[0]._search()) == 1000)
# Stop replication using the 'rsSyncApplyStop' failpoint
self.shard1_conn.admin.command(
"configureFailPoint", "rsSyncApplyStop",
mode="alwaysOn"
)
# Move a chunk from shard2 to shard1
def move_chunk():
try:
self.mongos_conn["admin"].command(
"moveChunk",
"test.mcsharded",
find={"i": 1000},
to="demo-set-0"
)
except pymongo.errors.OperationFailure:
pass
# moveChunk will never complete, so use another thread to continue
mover = threading.Thread(target=move_chunk)
mover.start()
# wait for documents to start moving to shard 1
assert_soon(lambda: self.shard1_conn.test.mcsharded.count() > 500)
# Get opid for moveChunk command
operations = self.mongos_conn.test.current_op()
opid = None
for op in operations["inprog"]:
if op.get("query", {}).get("moveChunk"):
opid = op["opid"]
self.assertNotEqual(opid, None, "could not find moveChunk operation")
# Kill moveChunk with the opid
self.mongos_conn["test"]["$cmd.sys.killop"].find_one({"op": opid})
# Mongo Connector should not become confused by unsuccessful chunk move
docs = self.opman1.doc_managers[0]._search()
self.assertEqual(len(docs), 1000)
self.assertEqual(sorted(d["i"] for d in docs),
list(range(500, 1500)))
# cleanup
mover.join()
if __name__ == '__main__':
unittest.main()
|
|
import unittest
from test import support
from test.test_grammar import (VALID_UNDERSCORE_LITERALS,
INVALID_UNDERSCORE_LITERALS)
from random import random
from math import atan2, isnan, copysign
import operator
INF = float("inf")
NAN = float("nan")
# These tests ensure that complex math does the right thing
class ComplexTest(unittest.TestCase):
def assertAlmostEqual(self, a, b):
if isinstance(a, complex):
if isinstance(b, complex):
unittest.TestCase.assertAlmostEqual(self, a.real, b.real)
unittest.TestCase.assertAlmostEqual(self, a.imag, b.imag)
else:
unittest.TestCase.assertAlmostEqual(self, a.real, b)
unittest.TestCase.assertAlmostEqual(self, a.imag, 0.)
else:
if isinstance(b, complex):
unittest.TestCase.assertAlmostEqual(self, a, b.real)
unittest.TestCase.assertAlmostEqual(self, 0., b.imag)
else:
unittest.TestCase.assertAlmostEqual(self, a, b)
def assertCloseAbs(self, x, y, eps=1e-9):
"""Return true iff floats x and y "are close"."""
# put the one with larger magnitude second
if abs(x) > abs(y):
x, y = y, x
if y == 0:
return abs(x) < eps
if x == 0:
return abs(y) < eps
# check that relative difference < eps
self.assertTrue(abs((x-y)/y) < eps)
def assertFloatsAreIdentical(self, x, y):
"""assert that floats x and y are identical, in the sense that:
(1) both x and y are nans, or
(2) both x and y are infinities, with the same sign, or
(3) both x and y are zeros, with the same sign, or
(4) x and y are both finite and nonzero, and x == y
"""
msg = 'floats {!r} and {!r} are not identical'
if isnan(x) or isnan(y):
if isnan(x) and isnan(y):
return
elif x == y:
if x != 0.0:
return
# both zero; check that signs match
elif copysign(1.0, x) == copysign(1.0, y):
return
else:
msg += ': zeros have different signs'
self.fail(msg.format(x, y))
def assertClose(self, x, y, eps=1e-9):
"""Return true iff complexes x and y "are close"."""
self.assertCloseAbs(x.real, y.real, eps)
self.assertCloseAbs(x.imag, y.imag, eps)
def check_div(self, x, y):
"""Compute complex z=x*y, and check that z/x==y and z/y==x."""
z = x * y
if x != 0:
q = z / x
self.assertClose(q, y)
q = z.__truediv__(x)
self.assertClose(q, y)
if y != 0:
q = z / y
self.assertClose(q, x)
q = z.__truediv__(y)
self.assertClose(q, x)
def test_truediv(self):
simple_real = [float(i) for i in range(-5, 6)]
simple_complex = [complex(x, y) for x in simple_real for y in simple_real]
for x in simple_complex:
for y in simple_complex:
self.check_div(x, y)
# A naive complex division algorithm (such as in 2.0) is very prone to
# nonsense errors for these (overflows and underflows).
self.check_div(complex(1e200, 1e200), 1+0j)
self.check_div(complex(1e-200, 1e-200), 1+0j)
# Just for fun.
for i in range(100):
self.check_div(complex(random(), random()),
complex(random(), random()))
self.assertRaises(ZeroDivisionError, complex.__truediv__, 1+1j, 0+0j)
self.assertRaises(OverflowError, pow, 1e200+1j, 1e200+1j)
self.assertAlmostEqual(complex.__truediv__(2+0j, 1+1j), 1-1j)
self.assertRaises(ZeroDivisionError, complex.__truediv__, 1+1j, 0+0j)
for denom_real, denom_imag in [(0, NAN), (NAN, 0), (NAN, NAN)]:
z = complex(0, 0) / complex(denom_real, denom_imag)
self.assertTrue(isnan(z.real))
self.assertTrue(isnan(z.imag))
def test_floordiv(self):
self.assertRaises(TypeError, complex.__floordiv__, 3+0j, 1.5+0j)
self.assertRaises(TypeError, complex.__floordiv__, 3+0j, 0+0j)
def test_richcompare(self):
self.assertIs(complex.__eq__(1+1j, 1<<10000), False)
self.assertIs(complex.__lt__(1+1j, None), NotImplemented)
self.assertIs(complex.__eq__(1+1j, 1+1j), True)
self.assertIs(complex.__eq__(1+1j, 2+2j), False)
self.assertIs(complex.__ne__(1+1j, 1+1j), False)
self.assertIs(complex.__ne__(1+1j, 2+2j), True)
for i in range(1, 100):
f = i / 100.0
self.assertIs(complex.__eq__(f+0j, f), True)
self.assertIs(complex.__ne__(f+0j, f), False)
self.assertIs(complex.__eq__(complex(f, f), f), False)
self.assertIs(complex.__ne__(complex(f, f), f), True)
self.assertIs(complex.__lt__(1+1j, 2+2j), NotImplemented)
self.assertIs(complex.__le__(1+1j, 2+2j), NotImplemented)
self.assertIs(complex.__gt__(1+1j, 2+2j), NotImplemented)
self.assertIs(complex.__ge__(1+1j, 2+2j), NotImplemented)
self.assertRaises(TypeError, operator.lt, 1+1j, 2+2j)
self.assertRaises(TypeError, operator.le, 1+1j, 2+2j)
self.assertRaises(TypeError, operator.gt, 1+1j, 2+2j)
self.assertRaises(TypeError, operator.ge, 1+1j, 2+2j)
self.assertIs(operator.eq(1+1j, 1+1j), True)
self.assertIs(operator.eq(1+1j, 2+2j), False)
self.assertIs(operator.ne(1+1j, 1+1j), False)
self.assertIs(operator.ne(1+1j, 2+2j), True)
def test_richcompare_boundaries(self):
def check(n, deltas, is_equal, imag = 0.0):
for delta in deltas:
i = n + delta
z = complex(i, imag)
self.assertIs(complex.__eq__(z, i), is_equal(delta))
self.assertIs(complex.__ne__(z, i), not is_equal(delta))
# For IEEE-754 doubles the following should hold:
# x in [2 ** (52 + i), 2 ** (53 + i + 1)] -> x mod 2 ** i == 0
# where the interval is representable, of course.
for i in range(1, 10):
pow = 52 + i
mult = 2 ** i
check(2 ** pow, range(1, 101), lambda delta: delta % mult == 0)
check(2 ** pow, range(1, 101), lambda delta: False, float(i))
check(2 ** 53, range(-100, 0), lambda delta: True)
def test_mod(self):
# % is no longer supported on complex numbers
self.assertRaises(TypeError, (1+1j).__mod__, 0+0j)
self.assertRaises(TypeError, lambda: (3.33+4.43j) % 0)
self.assertRaises(TypeError, (1+1j).__mod__, 4.3j)
def test_divmod(self):
self.assertRaises(TypeError, divmod, 1+1j, 1+0j)
self.assertRaises(TypeError, divmod, 1+1j, 0+0j)
def test_pow(self):
self.assertAlmostEqual(pow(1+1j, 0+0j), 1.0)
self.assertAlmostEqual(pow(0+0j, 2+0j), 0.0)
self.assertRaises(ZeroDivisionError, pow, 0+0j, 1j)
self.assertAlmostEqual(pow(1j, -1), 1/1j)
self.assertAlmostEqual(pow(1j, 200), 1)
self.assertRaises(ValueError, pow, 1+1j, 1+1j, 1+1j)
a = 3.33+4.43j
self.assertEqual(a ** 0j, 1)
self.assertEqual(a ** 0.+0.j, 1)
self.assertEqual(3j ** 0j, 1)
self.assertEqual(3j ** 0, 1)
try:
0j ** a
except ZeroDivisionError:
pass
else:
self.fail("should fail 0.0 to negative or complex power")
try:
0j ** (3-2j)
except ZeroDivisionError:
pass
else:
self.fail("should fail 0.0 to negative or complex power")
# The following is used to exercise certain code paths
self.assertEqual(a ** 105, a ** 105)
self.assertEqual(a ** -105, a ** -105)
self.assertEqual(a ** -30, a ** -30)
self.assertEqual(0.0j ** 0, 1)
b = 5.1+2.3j
self.assertRaises(ValueError, pow, a, b, 0)
def test_boolcontext(self):
for i in range(100):
self.assertTrue(complex(random() + 1e-6, random() + 1e-6))
self.assertTrue(not complex(0.0, 0.0))
def test_conjugate(self):
self.assertClose(complex(5.3, 9.8).conjugate(), 5.3-9.8j)
def test_constructor(self):
class OS:
def __init__(self, value): self.value = value
def __complex__(self): return self.value
class NS(object):
def __init__(self, value): self.value = value
def __complex__(self): return self.value
self.assertEqual(complex(OS(1+10j)), 1+10j)
self.assertEqual(complex(NS(1+10j)), 1+10j)
self.assertRaises(TypeError, complex, OS(None))
self.assertRaises(TypeError, complex, NS(None))
self.assertRaises(TypeError, complex, {})
self.assertRaises(TypeError, complex, NS(1.5))
self.assertRaises(TypeError, complex, NS(1))
self.assertAlmostEqual(complex("1+10j"), 1+10j)
self.assertAlmostEqual(complex(10), 10+0j)
self.assertAlmostEqual(complex(10.0), 10+0j)
self.assertAlmostEqual(complex(10), 10+0j)
self.assertAlmostEqual(complex(10+0j), 10+0j)
self.assertAlmostEqual(complex(1,10), 1+10j)
self.assertAlmostEqual(complex(1,10), 1+10j)
self.assertAlmostEqual(complex(1,10.0), 1+10j)
self.assertAlmostEqual(complex(1,10), 1+10j)
self.assertAlmostEqual(complex(1,10), 1+10j)
self.assertAlmostEqual(complex(1,10.0), 1+10j)
self.assertAlmostEqual(complex(1.0,10), 1+10j)
self.assertAlmostEqual(complex(1.0,10), 1+10j)
self.assertAlmostEqual(complex(1.0,10.0), 1+10j)
self.assertAlmostEqual(complex(3.14+0j), 3.14+0j)
self.assertAlmostEqual(complex(3.14), 3.14+0j)
self.assertAlmostEqual(complex(314), 314.0+0j)
self.assertAlmostEqual(complex(314), 314.0+0j)
self.assertAlmostEqual(complex(3.14+0j, 0j), 3.14+0j)
self.assertAlmostEqual(complex(3.14, 0.0), 3.14+0j)
self.assertAlmostEqual(complex(314, 0), 314.0+0j)
self.assertAlmostEqual(complex(314, 0), 314.0+0j)
self.assertAlmostEqual(complex(0j, 3.14j), -3.14+0j)
self.assertAlmostEqual(complex(0.0, 3.14j), -3.14+0j)
self.assertAlmostEqual(complex(0j, 3.14), 3.14j)
self.assertAlmostEqual(complex(0.0, 3.14), 3.14j)
self.assertAlmostEqual(complex("1"), 1+0j)
self.assertAlmostEqual(complex("1j"), 1j)
self.assertAlmostEqual(complex(), 0)
self.assertAlmostEqual(complex("-1"), -1)
self.assertAlmostEqual(complex("+1"), +1)
self.assertAlmostEqual(complex("(1+2j)"), 1+2j)
self.assertAlmostEqual(complex("(1.3+2.2j)"), 1.3+2.2j)
self.assertAlmostEqual(complex("3.14+1J"), 3.14+1j)
self.assertAlmostEqual(complex(" ( +3.14-6J )"), 3.14-6j)
self.assertAlmostEqual(complex(" ( +3.14-J )"), 3.14-1j)
self.assertAlmostEqual(complex(" ( +3.14+j )"), 3.14+1j)
self.assertAlmostEqual(complex("J"), 1j)
self.assertAlmostEqual(complex("( j )"), 1j)
self.assertAlmostEqual(complex("+J"), 1j)
self.assertAlmostEqual(complex("( -j)"), -1j)
self.assertAlmostEqual(complex('1e-500'), 0.0 + 0.0j)
self.assertAlmostEqual(complex('-1e-500j'), 0.0 - 0.0j)
self.assertAlmostEqual(complex('-1e-500+1e-500j'), -0.0 + 0.0j)
class complex2(complex): pass
self.assertAlmostEqual(complex(complex2(1+1j)), 1+1j)
self.assertAlmostEqual(complex(real=17, imag=23), 17+23j)
self.assertAlmostEqual(complex(real=17+23j), 17+23j)
self.assertAlmostEqual(complex(real=17+23j, imag=23), 17+46j)
self.assertAlmostEqual(complex(real=1+2j, imag=3+4j), -3+5j)
# check that the sign of a zero in the real or imaginary part
# is preserved when constructing from two floats. (These checks
# are harmless on systems without support for signed zeros.)
def split_zeros(x):
"""Function that produces different results for 0. and -0."""
return atan2(x, -1.)
self.assertEqual(split_zeros(complex(1., 0.).imag), split_zeros(0.))
self.assertEqual(split_zeros(complex(1., -0.).imag), split_zeros(-0.))
self.assertEqual(split_zeros(complex(0., 1.).real), split_zeros(0.))
self.assertEqual(split_zeros(complex(-0., 1.).real), split_zeros(-0.))
c = 3.14 + 1j
self.assertTrue(complex(c) is c)
del c
self.assertRaises(TypeError, complex, "1", "1")
self.assertRaises(TypeError, complex, 1, "1")
# SF bug 543840: complex(string) accepts strings with \0
# Fixed in 2.3.
self.assertRaises(ValueError, complex, '1+1j\0j')
self.assertRaises(TypeError, int, 5+3j)
self.assertRaises(TypeError, int, 5+3j)
self.assertRaises(TypeError, float, 5+3j)
self.assertRaises(ValueError, complex, "")
self.assertRaises(TypeError, complex, None)
self.assertRaisesRegex(TypeError, "not 'NoneType'", complex, None)
self.assertRaises(ValueError, complex, "\0")
self.assertRaises(ValueError, complex, "3\09")
self.assertRaises(TypeError, complex, "1", "2")
self.assertRaises(TypeError, complex, "1", 42)
self.assertRaises(TypeError, complex, 1, "2")
self.assertRaises(ValueError, complex, "1+")
self.assertRaises(ValueError, complex, "1+1j+1j")
self.assertRaises(ValueError, complex, "--")
self.assertRaises(ValueError, complex, "(1+2j")
self.assertRaises(ValueError, complex, "1+2j)")
self.assertRaises(ValueError, complex, "1+(2j)")
self.assertRaises(ValueError, complex, "(1+2j)123")
self.assertRaises(ValueError, complex, "x")
self.assertRaises(ValueError, complex, "1j+2")
self.assertRaises(ValueError, complex, "1e1ej")
self.assertRaises(ValueError, complex, "1e++1ej")
self.assertRaises(ValueError, complex, ")1+2j(")
self.assertRaisesRegex(
TypeError,
"first argument must be a string or a number, not 'dict'",
complex, {1:2}, 1)
self.assertRaisesRegex(
TypeError,
"second argument must be a number, not 'dict'",
complex, 1, {1:2})
# the following three are accepted by Python 2.6
self.assertRaises(ValueError, complex, "1..1j")
self.assertRaises(ValueError, complex, "1.11.1j")
self.assertRaises(ValueError, complex, "1e1.1j")
# check that complex accepts long unicode strings
self.assertEqual(type(complex("1"*500)), complex)
# check whitespace processing
self.assertEqual(complex('\N{EM SPACE}(\N{EN SPACE}1+1j ) '), 1+1j)
# Invalid unicode string
# See bpo-34087
self.assertRaises(ValueError, complex, '\u3053\u3093\u306b\u3061\u306f')
class EvilExc(Exception):
pass
class evilcomplex:
def __complex__(self):
raise EvilExc
self.assertRaises(EvilExc, complex, evilcomplex())
class float2:
def __init__(self, value):
self.value = value
def __float__(self):
return self.value
self.assertAlmostEqual(complex(float2(42.)), 42)
self.assertAlmostEqual(complex(real=float2(17.), imag=float2(23.)), 17+23j)
self.assertRaises(TypeError, complex, float2(None))
class MyIndex:
def __init__(self, value):
self.value = value
def __index__(self):
return self.value
self.assertAlmostEqual(complex(MyIndex(42)), 42.0+0.0j)
self.assertAlmostEqual(complex(123, MyIndex(42)), 123.0+42.0j)
self.assertRaises(OverflowError, complex, MyIndex(2**2000))
self.assertRaises(OverflowError, complex, 123, MyIndex(2**2000))
class MyInt:
def __int__(self):
return 42
self.assertRaises(TypeError, complex, MyInt())
self.assertRaises(TypeError, complex, 123, MyInt())
class complex0(complex):
"""Test usage of __complex__() when inheriting from 'complex'"""
def __complex__(self):
return 42j
class complex1(complex):
"""Test usage of __complex__() with a __new__() method"""
def __new__(self, value=0j):
return complex.__new__(self, 2*value)
def __complex__(self):
return self
class complex2(complex):
"""Make sure that __complex__() calls fail if anything other than a
complex is returned"""
def __complex__(self):
return None
self.assertEqual(complex(complex0(1j)), 42j)
with self.assertWarns(DeprecationWarning):
self.assertEqual(complex(complex1(1j)), 2j)
self.assertRaises(TypeError, complex, complex2(1j))
@support.requires_IEEE_754
def test_constructor_special_numbers(self):
class complex2(complex):
pass
for x in 0.0, -0.0, INF, -INF, NAN:
for y in 0.0, -0.0, INF, -INF, NAN:
with self.subTest(x=x, y=y):
z = complex(x, y)
self.assertFloatsAreIdentical(z.real, x)
self.assertFloatsAreIdentical(z.imag, y)
z = complex2(x, y)
self.assertIs(type(z), complex2)
self.assertFloatsAreIdentical(z.real, x)
self.assertFloatsAreIdentical(z.imag, y)
z = complex(complex2(x, y))
self.assertIs(type(z), complex)
self.assertFloatsAreIdentical(z.real, x)
self.assertFloatsAreIdentical(z.imag, y)
z = complex2(complex(x, y))
self.assertIs(type(z), complex2)
self.assertFloatsAreIdentical(z.real, x)
self.assertFloatsAreIdentical(z.imag, y)
def test_underscores(self):
# check underscores
for lit in VALID_UNDERSCORE_LITERALS:
if not any(ch in lit for ch in 'xXoObB'):
self.assertEqual(complex(lit), eval(lit))
self.assertEqual(complex(lit), complex(lit.replace('_', '')))
for lit in INVALID_UNDERSCORE_LITERALS:
if lit in ('0_7', '09_99'): # octals are not recognized here
continue
if not any(ch in lit for ch in 'xXoObB'):
self.assertRaises(ValueError, complex, lit)
def test_hash(self):
for x in range(-30, 30):
self.assertEqual(hash(x), hash(complex(x, 0)))
x /= 3.0 # now check against floating point
self.assertEqual(hash(x), hash(complex(x, 0.)))
def test_abs(self):
nums = [complex(x/3., y/7.) for x in range(-9,9) for y in range(-9,9)]
for num in nums:
self.assertAlmostEqual((num.real**2 + num.imag**2) ** 0.5, abs(num))
def test_repr_str(self):
def test(v, expected, test_fn=self.assertEqual):
test_fn(repr(v), expected)
test_fn(str(v), expected)
test(1+6j, '(1+6j)')
test(1-6j, '(1-6j)')
test(-(1+0j), '(-1+-0j)', test_fn=self.assertNotEqual)
test(complex(1., INF), "(1+infj)")
test(complex(1., -INF), "(1-infj)")
test(complex(INF, 1), "(inf+1j)")
test(complex(-INF, INF), "(-inf+infj)")
test(complex(NAN, 1), "(nan+1j)")
test(complex(1, NAN), "(1+nanj)")
test(complex(NAN, NAN), "(nan+nanj)")
test(complex(0, INF), "infj")
test(complex(0, -INF), "-infj")
test(complex(0, NAN), "nanj")
self.assertEqual(1-6j,complex(repr(1-6j)))
self.assertEqual(1+6j,complex(repr(1+6j)))
self.assertEqual(-6j,complex(repr(-6j)))
self.assertEqual(6j,complex(repr(6j)))
@support.requires_IEEE_754
def test_negative_zero_repr_str(self):
def test(v, expected, test_fn=self.assertEqual):
test_fn(repr(v), expected)
test_fn(str(v), expected)
test(complex(0., 1.), "1j")
test(complex(-0., 1.), "(-0+1j)")
test(complex(0., -1.), "-1j")
test(complex(-0., -1.), "(-0-1j)")
test(complex(0., 0.), "0j")
test(complex(0., -0.), "-0j")
test(complex(-0., 0.), "(-0+0j)")
test(complex(-0., -0.), "(-0-0j)")
def test_neg(self):
self.assertEqual(-(1+6j), -1-6j)
def test_file(self):
a = 3.33+4.43j
b = 5.1+2.3j
fo = None
try:
fo = open(support.TESTFN, "w")
print(a, b, file=fo)
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), ("%s %s\n" % (a, b)))
finally:
if (fo is not None) and (not fo.closed):
fo.close()
support.unlink(support.TESTFN)
def test_getnewargs(self):
self.assertEqual((1+2j).__getnewargs__(), (1.0, 2.0))
self.assertEqual((1-2j).__getnewargs__(), (1.0, -2.0))
self.assertEqual((2j).__getnewargs__(), (0.0, 2.0))
self.assertEqual((-0j).__getnewargs__(), (0.0, -0.0))
self.assertEqual(complex(0, INF).__getnewargs__(), (0.0, INF))
self.assertEqual(complex(INF, 0).__getnewargs__(), (INF, 0.0))
@support.requires_IEEE_754
def test_plus_minus_0j(self):
# test that -0j and 0j literals are not identified
z1, z2 = 0j, -0j
self.assertEqual(atan2(z1.imag, -1.), atan2(0., -1.))
self.assertEqual(atan2(z2.imag, -1.), atan2(-0., -1.))
@support.requires_IEEE_754
def test_negated_imaginary_literal(self):
z0 = -0j
z1 = -7j
z2 = -1e1000j
# Note: In versions of Python < 3.2, a negated imaginary literal
# accidentally ended up with real part 0.0 instead of -0.0, thanks to a
# modification during CST -> AST translation (see issue #9011). That's
# fixed in Python 3.2.
self.assertFloatsAreIdentical(z0.real, -0.0)
self.assertFloatsAreIdentical(z0.imag, -0.0)
self.assertFloatsAreIdentical(z1.real, -0.0)
self.assertFloatsAreIdentical(z1.imag, -7.0)
self.assertFloatsAreIdentical(z2.real, -0.0)
self.assertFloatsAreIdentical(z2.imag, -INF)
@support.requires_IEEE_754
def test_overflow(self):
self.assertEqual(complex("1e500"), complex(INF, 0.0))
self.assertEqual(complex("-1e500j"), complex(0.0, -INF))
self.assertEqual(complex("-1e500+1.8e308j"), complex(-INF, INF))
@support.requires_IEEE_754
def test_repr_roundtrip(self):
vals = [0.0, 1e-500, 1e-315, 1e-200, 0.0123, 3.1415, 1e50, INF, NAN]
vals += [-v for v in vals]
# complex(repr(z)) should recover z exactly, even for complex
# numbers involving an infinity, nan, or negative zero
for x in vals:
for y in vals:
z = complex(x, y)
roundtrip = complex(repr(z))
self.assertFloatsAreIdentical(z.real, roundtrip.real)
self.assertFloatsAreIdentical(z.imag, roundtrip.imag)
# if we predefine some constants, then eval(repr(z)) should
# also work, except that it might change the sign of zeros
inf, nan = float('inf'), float('nan')
infj, nanj = complex(0.0, inf), complex(0.0, nan)
for x in vals:
for y in vals:
z = complex(x, y)
roundtrip = eval(repr(z))
# adding 0.0 has no effect beside changing -0.0 to 0.0
self.assertFloatsAreIdentical(0.0 + z.real,
0.0 + roundtrip.real)
self.assertFloatsAreIdentical(0.0 + z.imag,
0.0 + roundtrip.imag)
def test_format(self):
# empty format string is same as str()
self.assertEqual(format(1+3j, ''), str(1+3j))
self.assertEqual(format(1.5+3.5j, ''), str(1.5+3.5j))
self.assertEqual(format(3j, ''), str(3j))
self.assertEqual(format(3.2j, ''), str(3.2j))
self.assertEqual(format(3+0j, ''), str(3+0j))
self.assertEqual(format(3.2+0j, ''), str(3.2+0j))
# empty presentation type should still be analogous to str,
# even when format string is nonempty (issue #5920).
self.assertEqual(format(3.2+0j, '-'), str(3.2+0j))
self.assertEqual(format(3.2+0j, '<'), str(3.2+0j))
z = 4/7. - 100j/7.
self.assertEqual(format(z, ''), str(z))
self.assertEqual(format(z, '-'), str(z))
self.assertEqual(format(z, '<'), str(z))
self.assertEqual(format(z, '10'), str(z))
z = complex(0.0, 3.0)
self.assertEqual(format(z, ''), str(z))
self.assertEqual(format(z, '-'), str(z))
self.assertEqual(format(z, '<'), str(z))
self.assertEqual(format(z, '2'), str(z))
z = complex(-0.0, 2.0)
self.assertEqual(format(z, ''), str(z))
self.assertEqual(format(z, '-'), str(z))
self.assertEqual(format(z, '<'), str(z))
self.assertEqual(format(z, '3'), str(z))
self.assertEqual(format(1+3j, 'g'), '1+3j')
self.assertEqual(format(3j, 'g'), '0+3j')
self.assertEqual(format(1.5+3.5j, 'g'), '1.5+3.5j')
self.assertEqual(format(1.5+3.5j, '+g'), '+1.5+3.5j')
self.assertEqual(format(1.5-3.5j, '+g'), '+1.5-3.5j')
self.assertEqual(format(1.5-3.5j, '-g'), '1.5-3.5j')
self.assertEqual(format(1.5+3.5j, ' g'), ' 1.5+3.5j')
self.assertEqual(format(1.5-3.5j, ' g'), ' 1.5-3.5j')
self.assertEqual(format(-1.5+3.5j, ' g'), '-1.5+3.5j')
self.assertEqual(format(-1.5-3.5j, ' g'), '-1.5-3.5j')
self.assertEqual(format(-1.5-3.5e-20j, 'g'), '-1.5-3.5e-20j')
self.assertEqual(format(-1.5-3.5j, 'f'), '-1.500000-3.500000j')
self.assertEqual(format(-1.5-3.5j, 'F'), '-1.500000-3.500000j')
self.assertEqual(format(-1.5-3.5j, 'e'), '-1.500000e+00-3.500000e+00j')
self.assertEqual(format(-1.5-3.5j, '.2e'), '-1.50e+00-3.50e+00j')
self.assertEqual(format(-1.5-3.5j, '.2E'), '-1.50E+00-3.50E+00j')
self.assertEqual(format(-1.5e10-3.5e5j, '.2G'), '-1.5E+10-3.5E+05j')
self.assertEqual(format(1.5+3j, '<20g'), '1.5+3j ')
self.assertEqual(format(1.5+3j, '*<20g'), '1.5+3j**************')
self.assertEqual(format(1.5+3j, '>20g'), ' 1.5+3j')
self.assertEqual(format(1.5+3j, '^20g'), ' 1.5+3j ')
self.assertEqual(format(1.5+3j, '<20'), '(1.5+3j) ')
self.assertEqual(format(1.5+3j, '>20'), ' (1.5+3j)')
self.assertEqual(format(1.5+3j, '^20'), ' (1.5+3j) ')
self.assertEqual(format(1.123-3.123j, '^20.2'), ' (1.1-3.1j) ')
self.assertEqual(format(1.5+3j, '20.2f'), ' 1.50+3.00j')
self.assertEqual(format(1.5+3j, '>20.2f'), ' 1.50+3.00j')
self.assertEqual(format(1.5+3j, '<20.2f'), '1.50+3.00j ')
self.assertEqual(format(1.5e20+3j, '<20.2f'), '150000000000000000000.00+3.00j')
self.assertEqual(format(1.5e20+3j, '>40.2f'), ' 150000000000000000000.00+3.00j')
self.assertEqual(format(1.5e20+3j, '^40,.2f'), ' 150,000,000,000,000,000,000.00+3.00j ')
self.assertEqual(format(1.5e21+3j, '^40,.2f'), ' 1,500,000,000,000,000,000,000.00+3.00j ')
self.assertEqual(format(1.5e21+3000j, ',.2f'), '1,500,000,000,000,000,000,000.00+3,000.00j')
# Issue 7094: Alternate formatting (specified by #)
self.assertEqual(format(1+1j, '.0e'), '1e+00+1e+00j')
self.assertEqual(format(1+1j, '#.0e'), '1.e+00+1.e+00j')
self.assertEqual(format(1+1j, '.0f'), '1+1j')
self.assertEqual(format(1+1j, '#.0f'), '1.+1.j')
self.assertEqual(format(1.1+1.1j, 'g'), '1.1+1.1j')
self.assertEqual(format(1.1+1.1j, '#g'), '1.10000+1.10000j')
# Alternate doesn't make a difference for these, they format the same with or without it
self.assertEqual(format(1+1j, '.1e'), '1.0e+00+1.0e+00j')
self.assertEqual(format(1+1j, '#.1e'), '1.0e+00+1.0e+00j')
self.assertEqual(format(1+1j, '.1f'), '1.0+1.0j')
self.assertEqual(format(1+1j, '#.1f'), '1.0+1.0j')
# Misc. other alternate tests
self.assertEqual(format((-1.5+0.5j), '#f'), '-1.500000+0.500000j')
self.assertEqual(format((-1.5+0.5j), '#.0f'), '-2.+0.j')
self.assertEqual(format((-1.5+0.5j), '#e'), '-1.500000e+00+5.000000e-01j')
self.assertEqual(format((-1.5+0.5j), '#.0e'), '-2.e+00+5.e-01j')
self.assertEqual(format((-1.5+0.5j), '#g'), '-1.50000+0.500000j')
self.assertEqual(format((-1.5+0.5j), '.0g'), '-2+0.5j')
self.assertEqual(format((-1.5+0.5j), '#.0g'), '-2.+0.5j')
# zero padding is invalid
self.assertRaises(ValueError, (1.5+0.5j).__format__, '010f')
# '=' alignment is invalid
self.assertRaises(ValueError, (1.5+3j).__format__, '=20')
# integer presentation types are an error
for t in 'bcdoxX':
self.assertRaises(ValueError, (1.5+0.5j).__format__, t)
# make sure everything works in ''.format()
self.assertEqual('*{0:.3f}*'.format(3.14159+2.71828j), '*3.142+2.718j*')
# issue 3382
self.assertEqual(format(complex(NAN, NAN), 'f'), 'nan+nanj')
self.assertEqual(format(complex(1, NAN), 'f'), '1.000000+nanj')
self.assertEqual(format(complex(NAN, 1), 'f'), 'nan+1.000000j')
self.assertEqual(format(complex(NAN, -1), 'f'), 'nan-1.000000j')
self.assertEqual(format(complex(NAN, NAN), 'F'), 'NAN+NANj')
self.assertEqual(format(complex(1, NAN), 'F'), '1.000000+NANj')
self.assertEqual(format(complex(NAN, 1), 'F'), 'NAN+1.000000j')
self.assertEqual(format(complex(NAN, -1), 'F'), 'NAN-1.000000j')
self.assertEqual(format(complex(INF, INF), 'f'), 'inf+infj')
self.assertEqual(format(complex(1, INF), 'f'), '1.000000+infj')
self.assertEqual(format(complex(INF, 1), 'f'), 'inf+1.000000j')
self.assertEqual(format(complex(INF, -1), 'f'), 'inf-1.000000j')
self.assertEqual(format(complex(INF, INF), 'F'), 'INF+INFj')
self.assertEqual(format(complex(1, INF), 'F'), '1.000000+INFj')
self.assertEqual(format(complex(INF, 1), 'F'), 'INF+1.000000j')
self.assertEqual(format(complex(INF, -1), 'F'), 'INF-1.000000j')
def test_main():
support.run_unittest(ComplexTest)
if __name__ == "__main__":
test_main()
|
|
# -*- coding: utf-8 -*-
"""
Stock handling module for lpm
All stock entries are connected to a revisionless part number. The revision number is not considered since
lpm considers revisions to be compatible and thus interchangeable.
The database stores the current count and optionally a BOM for a component.
When the count is updated the BOM rules are considered and the counts of child parts updated accordingly.
The rules of access are as follows:
- anyone may view the stock
- stock_admin users may additionally:
- add new items to the stock
- correct the stock numbers
- set the BOM rules
Note: There count is not validated, i.e. may become negative.
:copyright: (c) 2016 Hannes Friederich.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from flask import Blueprint, current_app, request, redirect, render_template, url_for, flash
from flask.ext.login import login_required, current_user
from flask_wtf import Form
from wtforms import IntegerField, StringField
from wtforms.validators import InputRequired
from lpm.login import role_required
from lpm.utils import extract_errors
from lpm.components import ensure_exists
from lpm.xls_files import FileForm, read_xls, save_to_tmp, extract_filepath
bp = Blueprint('stock', __name__)
class AddSingleForm(Form):
quantity = IntegerField(label='Added Quantity', validators=[InputRequired()])
batch = StringField(label='Batch Name')
class CorrectSingleForm(Form):
quantity = IntegerField(label='New Quantity', validators=[InputRequired()])
comment = StringField(label='Comment')
@bp.route('/')
@login_required
def overview():
"""
Shows the overview page containing all components
"""
objects = list(current_app.mongo.db.stock.find())
component_records = current_app.mongo.db.components.find(projection=['name'])
names = dict((record['_id'], record['name']) for record in component_records)
for obj in objects:
obj['name'] = names.get(obj['_id'])
return render_template('stock/overview.html', data=objects)
@bp.route('/<partno>')
@login_required
def details(partno):
obj = current_app.mongo.db.stock.find_one_or_404(partno)
component_records = current_app.mongo.db.components.find(projection=['name'])
names = dict((record['_id'], record['name']) for record in component_records)
obj['name'] = names[partno]
obj['history'] = list(current_app.mongo.db.stock_history.find({'partno': partno}))
for entry in obj.get('bom', list()):
entry['name'] = names.get(entry.get('partno'))
batches = list(current_app.mongo.db.stock_batches.find({'partno': partno}))
return render_template('stock/details.html', data=obj, batches=batches)
@bp.route('/add', methods=['GET', 'POST'])
@role_required('stock_admin')
def add():
"""
Imports data from the uploaded file and updates the stock values including BOM results
"""
form = FileForm(request.form)
# WTF is NOT used for the file handling, since the file upload handling seems broken.
if request.method == 'POST' and form.validate_on_submit():
# show the validation page if a file is uploaded
# else process the tmpname parameter
if request.files.get('file'):
try:
save_to_tmp(form)
success, headers, values = _import_file(extract_filepath(form))
# Also add the current quantity to the review list
for item in values:
current_quantity = 0
obj = current_app.mongo.db.stock.find_one(item.get('partno'))
if obj:
current_quantity = obj.get('quantity')
item['current_quantity'] = current_quantity
item['new_quantity'] = item.get('quantity') + current_quantity
headers.append('current_quantity')
headers.append('new_quantity')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify Input Data',
action='Add to Stock')
except Exception as e:
flash(e, 'error')
elif form.tmpname.data:
success, headers, values = _import_file(extract_filepath(form))
if success:
try:
for v in values:
partno = v.get('partno')
quantity = v.get('quantity')
batchname = v.get('batch')
comment = v.get('comment', 'added to stock')
update_counts(partno, quantity, batchname, comment)
flash('stock import successful', 'success')
return redirect(url_for('stock.overview'))
except Exception as e:
flash(e, 'error')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify Input Data',
action='Add to Stock')
extract_errors(form)
return render_template('stock/import_form.html', form=form, title='Add Stock Items')
@bp.route('/correct', methods=['GET', 'POST'])
@role_required('stock_admin')
def correct():
"""
Imports data from the uploaded file and corrects the corresponding stock values
"""
form = FileForm(request.form)
warning = dict(category='warning',
message='You are about to override the exising stock! Are you sure you want to continue?')
# WTF is NOT used for the file handling, since the file upload handling seems broken.
if request.method == 'POST' and form.validate_on_submit():
# show the validation page if a file is uploaded
# else process the tmpname parameter
if request.files.get('file'):
try:
save_to_tmp(form)
success, headers, values = _import_file(extract_filepath(form))
for item in values:
current_quantity = 0
obj = current_app.mongo.db.stock.find_one(item.get('partno'))
if obj:
current_quantity = obj.get('quantity')
item['current_quantity'] = current_quantity
headers.append('current_quantity')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify Correction Data',
action='Apply Correction',
warning=warning)
except Exception as e:
flash(e, 'error')
elif form.tmpname.data:
success, headers, values = _import_file(extract_filepath(form))
if success:
try:
for v in values:
partno = v.get('partno')
quantity = v.get('quantity')
comment = v.get('comment', 'manual correction')
correct_counts(partno, quantity, comment)
flash('stock correction successful', 'success')
return redirect(url_for('stock.overview'))
except Exception as e:
flash(e, 'error')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify Correction Data',
action='Apply Correction',
warning=warning)
extract_errors(form)
return render_template('stock/import_form.html', form=form, title='Correct Stock Items', warning=warning)
@bp.route('/update-bom', methods=['GET', 'POST'])
@role_required('stock_admin')
def update_bom():
"""
Imports data from the uploaded file and corrects the corresponding stock values
"""
form = FileForm(request.form)
# WTF is NOT used for the file handling, since the file upload handling seems broken.
if request.method == 'POST' and form.validate_on_submit():
# show the validation page if a file is uploaded
# else process the tmpname parameter
if request.files.get('file'): # a file was uploaded
try:
save_to_tmp(form)
success, headers, values = _import_file(extract_filepath(form))
target_partno = 'unknown'
if len(values) > 0:
target_partno = values.pop(0).get('partno')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify BOM Data for '+target_partno,
action='Update BOM')
except Exception as e:
flash(e, 'error')
elif form.tmpname.data:
success, headers, values = _import_file(extract_filepath(form))
target_partno = 'unknown'
if len(values) > 0:
target_partno = values.pop(0).get('partno')
if success:
try:
set_bom(target_partno, values)
flash('BOM update successful', 'success')
return redirect(url_for('stock.overview'))
except Exception as e:
flash(e, 'error')
return render_template('stock/validate_form.html',
form=form,
headers=headers,
data=values,
title='Verify BOM Data for '+target_partno,
action='Update BOM')
extract_errors(form)
return render_template('stock/import_form.html', form=form, title='Update BOM')
@bp.route('/<partno>/add-single', methods=['GET', 'POST'])
@role_required('stock_admin')
def add_single(partno):
"""
Adds the given quantity to the stock
"""
obj = current_app.mongo.db.components.find_one_or_404(partno, projection=['name'])
form = AddSingleForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
try:
update_counts(partno, form.quantity.data, form.batch.data, 'added to stock')
flash('Stock addition successful', 'success')
return redirect(url_for('stock.details', partno=partno))
except Exception as e:
flash('stock operation failed (%s), please contact the administrator' % e, 'error')
extract_errors(form)
name = obj.get('name')
return render_template('stock/add_single.html', form=form, partno=partno, name=name)
@bp.route('/<partno>/correct-single', methods=['GET', 'POST'])
@role_required('stock_admin')
def correct_single(partno):
"""
Corrects the stock and sets the new quantity
"""
component = current_app.mongo.db.components.find_one_or_404(partno, projection=['name'])
form = CorrectSingleForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
try:
message = 'stock correction'
if form.comment.data:
message += ' (%s)' % form.comment.data
correct_counts(partno, form.quantity.data, message)
flash('Stock correction successful', 'success')
return redirect(url_for('stock.details', partno=partno))
except Exception as e:
flash('stock operation failed (%s), please contact the administrator' % e, 'error')
extract_errors(form)
name = component.get('name')
obj = current_app.mongo.db.stock.find_one(partno)
current_quantity = obj.get('quantity') if obj is not None else 0
return render_template('stock/correct_single.html',
form=form, partno=partno, name=name,
current_quantity=current_quantity)
def update_counts(partno, quantity, batchname, message):
"""
Updates the given stock entry, creating it if necessary.
Raises an exception if the part number is not valid or if there is a database problem
"""
_check_bom(partno) # validation
_do_update_counts(partno, quantity, batchname, message)
def correct_counts(partno, quantity, message):
result = current_app.mongo.db.stock.update_one(
filter={'_id': partno},
update={'$set': {'quantity': quantity}},
upsert=True,
)
if result.modified_count == 0 and result.upserted_id is None:
raise RuntimeError('no stock database object modified nor created')
result = current_app.mongo.db.stock_history.insert_one({
'date': datetime.now(),
'partno': partno,
'quantity': quantity,
'message': message
})
if result.inserted_id is None:
raise RuntimeError('no stock history object created')
def update_batch(partno, batchname, quantity):
"""
Updates the given stock batch item, creating it if necessary.
Raises an exception if the part number is not valid or if there is a database problem
"""
if quantity < 0:
raise ValueError('A batch cannot have negative quantities')
ensure_exists(partno)
if quantity == 0 or not batchname:
return # nothing to do
result = current_app.mongo.db.stock_batches.update_one(
filter={
'partno': partno,
'name': batchname
},
update={'$inc': {'quantity': quantity}},
upsert=True
)
if result.modified_count == 0 and result.upserted_id is None:
raise RuntimeError('no stock batch object modified nor created')
def set_bom(partno, data):
"""
Updates the BOM data for the given part number
"""
ensure_exists(partno)
bomdata = list()
for item in data:
p = item.get('partno')
ensure_exists(p)
bomdata.append(dict(partno=p, quantity=int(item['quantity'])))
result = current_app.mongo.db.stock.update_one(
filter={'_id': partno},
update={'$set': {'bom': bomdata}},
upsert=True)
if result.modified_count == 0 and result.upserted_id is None:
raise RuntimeError('no BOM object modified nor created')
def _do_update_counts(partno, quantity, batchname, message):
if quantity == 0:
return # nothing to do
# update the current component including batch and history before following the BOM rules.
# in case there is a database problem the highest-level stock entry will most likely be correct.
result = current_app.mongo.db.stock.update_one(
filter={'_id': partno},
update={'$inc': {'quantity': quantity}},
upsert=True
)
if result.modified_count == 0 and result.upserted_id is None:
raise RuntimeError('database update problem: %s' % str(result))
if quantity > 0:
update_batch(partno, batchname, quantity)
result = current_app.mongo.db.stock_history.insert_one({
'date': datetime.now(),
'partno': partno,
'delta': quantity,
'message': message
})
if result.inserted_id is None:
raise RuntimeError('no stock history object created')
# only run the BOM rules if something is added to the stock
if quantity <= 0:
return
bom = current_app.mongo.db.stock.find_one(partno).get('bom', list())
for item in bom:
_do_update_counts(item.get('partno'), quantity*item.get('quantity')*-1, None, '(BOM rule)')
def _check_bom(partno, tree=set()):
"""
Recursively checks that the BOM tree is valid.
The tree parameter tracks the parents of the current node, use an empty set for the top-level component
"""
ensure_exists(partno)
# obtain the BOM
obj = current_app.mongo.db.stock.find_one(partno)
if not obj:
return
bom = obj.get('bom', list())
if len(bom) == 0:
return
# recursively check the BOM
tree.add(partno)
for entry in bom:
pn = entry.get('partno')
if pn in tree:
raise RuntimeError('Infinite loop detected')
_check_bom(pn, tree)
tree.remove(partno)
def _import_file(filepath):
headers, data = read_xls(filepath)
success = True
if 'quantity' not in headers:
raise ValueError("'quantity' column is missing")
if 'partno' not in headers:
raise ValueError("'partno' column is missing")
for idx, item in enumerate(data):
try:
# the part number must exist
# quantity is optional, set as zero if missing
ensure_exists(item.get('partno'))
qstr = item.get('quantity')
if qstr:
quantity = int(qstr)
else:
quantity = 0
if quantity < 0:
raise ValueError('quantity must be non-negative')
item['quantity'] = quantity
except Exception as e:
flash('%s (row %d)' % (e, (idx+2)), 'error')
success = False
return success, headers, data
|
|
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from urllib.parse import urlencode
from urllib.request import urlopen
from django.core.urlresolvers import reverse
from django.conf import settings as project_settings
from .base import BackendBase
class PaypalBackend(BackendBase):
backend_id = 'paypal'
backend_verbose_name = _("PayPal")
backend_display_name = _("PayPal")
backend_has_recurring = True
def __init__(self, settings):
self.test = settings.get('TEST', False)
self.header_image = settings.get('HEADER_IMAGE', None)
self.title = settings.get('TITLE', 'VPN Payment')
self.currency = settings.get('CURRENCY', 'EUR')
self.account_address = settings.get('ADDRESS')
self.receiver_address = settings.get('RECEIVER', self.account_address)
if self.test:
default_api = 'https://www.sandbox.paypal.com/'
else:
default_api = 'https://www.paypal.com/'
self.api_base = settings.get('API_BASE', default_api)
if self.account_address:
self.backend_enabled = True
def new_payment(self, payment):
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick',
'notify_url': ROOT_URL + reverse('payments:cb_paypal', args=(payment.id,)),
'item_name': self.title,
'amount': '%.2f' % (payment.amount / 100),
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:view', args=(payment.id,)),
'cancel_return': ROOT_URL + reverse('payments:cancel', args=(payment.id,)),
}
if self.header_image:
params['cpp_header_image'] = self.header_image
payment.status_message = _("Waiting for PayPal to confirm the transaction... " +
"It can take up to a few minutes...")
payment.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def new_subscription(self, rps):
months = {
'3m': 3,
'6m': 6,
'12m': 12,
}[rps.period]
ROOT_URL = project_settings.ROOT_URL
params = {
'cmd': '_xclick-subscriptions',
'notify_url': ROOT_URL + reverse('payments:cb_paypal_subscr', args=(rps.id,)),
'item_name': self.title,
'currency_code': self.currency,
'business': self.account_address,
'no_shipping': '1',
'return': ROOT_URL + reverse('payments:return_subscr', args=(rps.id,)),
'cancel_return': ROOT_URL + reverse('account:index'),
'a3': '%.2f' % (rps.period_amount / 100),
'p3': str(months),
't3': 'M',
'src': '1',
}
if self.header_image:
params['cpp_header_image'] = self.header_image
rps.save()
return redirect(self.api_base + '/cgi-bin/webscr?' + urlencode(params))
def handle_verified_callback(self, payment, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if txn_type not in (None, 'web_accept', 'express_checkout'):
# Not handled here and can be ignored
return
if params['payment_status'] == 'Refunded':
payment.status = 'refunded'
payment.status_message = None
elif params['payment_status'] == 'Completed':
self.handle_completed_payment(payment, params)
def handle_verified_callback_subscr(self, subscr, params):
if self.test and params['test_ipn'] != '1':
raise ValueError('Test IPN')
txn_type = params.get('txn_type')
if not txn_type.startswith('subscr_'):
# Not handled here and can be ignored
return
if txn_type == 'subscr_payment':
if params['payment_status'] == 'Refunded':
# FIXME: Find the payment and do something
pass
elif params['payment_status'] == 'Completed':
payment = subscr.create_payment()
if not self.handle_completed_payment(payment, params):
return
subscr.last_confirmed_payment = payment.created
subscr.backend_extid = params.get('subscr_id', '')
if subscr.status == 'new' or subscr.status == 'unconfirmed':
subscr.status = 'active'
subscr.save()
elif txn_type == 'subscr_cancel' or txn_type == 'subscr_eot':
subscr.status = 'cancelled'
subscr.save()
def handle_completed_payment(self, payment, params):
from payments.models import Payment
# Prevent making duplicate Payments if IPN is received twice
pc = Payment.objects.filter(backend_extid=params['txn_id']).count()
if pc > 0:
return False
if self.receiver_address != params['receiver_email']:
raise ValueError('Wrong receiver: ' + params['receiver_email'])
if self.currency.lower() != params['mc_currency'].lower():
raise ValueError('Wrong currency: ' + params['mc_currency'])
payment.paid_amount = int(float(params['mc_gross']) * 100)
if payment.paid_amount < payment.amount:
raise ValueError('Not fully paid.')
payment.user.vpnuser.add_paid_time(payment.time)
payment.user.vpnuser.on_payment_confirmed(payment)
payment.user.vpnuser.save()
payment.backend_extid = params['txn_id']
payment.status = 'confirmed'
payment.status_message = None
payment.save()
return True
def verify_ipn(self, request):
v_url = self.api_base + '/cgi-bin/webscr?cmd=_notify-validate'
v_req = urlopen(v_url, data=request.body, timeout=5)
v_res = v_req.read()
return v_res == b'VERIFIED'
def callback(self, payment, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback(payment, params)
return True
except (KeyError, ValueError) as e:
payment.status = 'error'
payment.status_message = None
payment.backend_data['ipn_exception'] = repr(e)
payment.backend_data['ipn_last_data'] = repr(request.POST)
payment.save()
raise
def callback_subscr(self, subscr, request):
if not self.verify_ipn(request):
return False
params = request.POST
try:
self.handle_verified_callback_subscr(subscr, params)
return True
except (KeyError, ValueError) as e:
subscr.status = 'error'
subscr.status_message = None
subscr.backend_data['ipn_exception'] = repr(e)
subscr.backend_data['ipn_last_data'] = repr(request.POST)
subscr.save()
raise
def get_ext_url(self, payment):
if not payment.backend_extid:
return None
url = 'https://history.paypal.com/webscr?cmd=_history-details-from-hub&id=%s'
return url % payment.backend_extid
def get_subscr_ext_url(self, subscr):
if not subscr.backend_extid:
return None
return ('https://www.paypal.com/fr/cgi-bin/webscr?cmd=_profile-recurring-payments'
'&encrypted_profile_id=%s' % subscr.backend_extid)
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Food'
db.create_table(u'usda_food', (
('ndb_number', self.gf('django.db.models.fields.IntegerField')(primary_key=True)),
('food_group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.FoodGroup'])),
('long_description', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('short_description', self.gf('django.db.models.fields.CharField')(max_length=60, blank=True)),
('common_name', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('manufacturer_name', self.gf('django.db.models.fields.CharField')(max_length=65, blank=True)),
('survey', self.gf('django.db.models.fields.BooleanField')(default=False)),
('refuse_description', self.gf('django.db.models.fields.CharField')(max_length=135, blank=True)),
('refuse_percentage', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('scientific_name', self.gf('django.db.models.fields.CharField')(max_length=65, blank=True)),
('nitrogen_factor', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('protein_factor', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('fat_factor', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('cho_factor', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
))
db.send_create_signal(u'usda', ['Food'])
# Adding model 'FoodGroup'
db.create_table(u'usda_foodgroup', (
('code', self.gf('django.db.models.fields.IntegerField')(primary_key=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=60, blank=True)),
))
db.send_create_signal(u'usda', ['FoodGroup'])
# Adding model 'Nutrient'
db.create_table(u'usda_nutrient', (
('number', self.gf('django.db.models.fields.IntegerField')(primary_key=True)),
('units', self.gf('django.db.models.fields.CharField')(max_length=7)),
('tagname', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=60)),
('decimals', self.gf('django.db.models.fields.IntegerField')()),
('order', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal(u'usda', ['Nutrient'])
# Adding model 'NutrientData'
db.create_table(u'usda_nutrientdata', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('food', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.Food'])),
('nutrient', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.Nutrient'])),
('nutrient_value', self.gf('django.db.models.fields.FloatField')()),
('data_points', self.gf('django.db.models.fields.IntegerField')()),
('standard_error', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('data_derivation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.DataDerivation'], null=True, blank=True)),
('reference_nbd_number', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('added_nutrient', self.gf('django.db.models.fields.BooleanField')(default=False)),
('number_of_studies', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('minimum', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('maximum', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('degrees_of_freedom', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('lower_error_bound', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('upper_error_bound', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('statistical_comments', self.gf('django.db.models.fields.CharField')(max_length=10, blank=True)),
('confidence_code', self.gf('django.db.models.fields.CharField')(max_length=1, blank=True)),
))
db.send_create_signal(u'usda', ['NutrientData'])
# Adding unique constraint on 'NutrientData', fields ['food', 'nutrient']
db.create_unique(u'usda_nutrientdata', ['food_id', 'nutrient_id'])
# Adding M2M table for field source on 'NutrientData'
db.create_table(u'usda_nutrientdata_source', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('nutrientdata', models.ForeignKey(orm[u'usda.nutrientdata'], null=False)),
('source', models.ForeignKey(orm[u'usda.source'], null=False))
))
db.create_unique(u'usda_nutrientdata_source', ['nutrientdata_id', 'source_id'])
# Adding model 'Source'
db.create_table(u'usda_source', (
('code', self.gf('django.db.models.fields.IntegerField')(primary_key=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=60)),
))
db.send_create_signal(u'usda', ['Source'])
# Adding model 'DataDerivation'
db.create_table(u'usda_dataderivation', (
('code', self.gf('django.db.models.fields.CharField')(max_length=4, primary_key=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=120)),
))
db.send_create_signal(u'usda', ['DataDerivation'])
# Adding model 'Weight'
db.create_table(u'usda_weight', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('food', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.Food'])),
('sequence', self.gf('django.db.models.fields.IntegerField')()),
('amount', self.gf('django.db.models.fields.FloatField')()),
('description', self.gf('django.db.models.fields.CharField')(max_length=80)),
('gram_weight', self.gf('django.db.models.fields.FloatField')()),
('number_of_data_points', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('standard_deviation', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
))
db.send_create_signal(u'usda', ['Weight'])
# Adding unique constraint on 'Weight', fields ['food', 'sequence']
db.create_unique(u'usda_weight', ['food_id', 'sequence'])
# Adding model 'Footnote'
db.create_table(u'usda_footnote', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('food', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.Food'])),
('number', self.gf('django.db.models.fields.IntegerField')()),
('type', self.gf('django.db.models.fields.CharField')(max_length=1)),
('nutrient', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['usda.Nutrient'], null=True, blank=True)),
('text', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'usda', ['Footnote'])
# Adding unique constraint on 'Footnote', fields ['food', 'number', 'nutrient']
db.create_unique(u'usda_footnote', ['food_id', 'number', 'nutrient_id'])
# Adding model 'DataSource'
db.create_table(u'usda_datasource', (
('id', self.gf('django.db.models.fields.CharField')(max_length=6, primary_key=True)),
('authors', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('year', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('journal', self.gf('django.db.models.fields.CharField')(max_length=135, null=True, blank=True)),
('volume_or_city', self.gf('django.db.models.fields.CharField')(max_length=16, blank=True)),
('issue_or_state', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('start_page', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('end_page', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
))
db.send_create_signal(u'usda', ['DataSource'])
def backwards(self, orm):
# Removing unique constraint on 'Footnote', fields ['food', 'number', 'nutrient']
db.delete_unique(u'usda_footnote', ['food_id', 'number', 'nutrient_id'])
# Removing unique constraint on 'Weight', fields ['food', 'sequence']
db.delete_unique(u'usda_weight', ['food_id', 'sequence'])
# Removing unique constraint on 'NutrientData', fields ['food', 'nutrient']
db.delete_unique(u'usda_nutrientdata', ['food_id', 'nutrient_id'])
# Deleting model 'Food'
db.delete_table(u'usda_food')
# Deleting model 'FoodGroup'
db.delete_table(u'usda_foodgroup')
# Deleting model 'Nutrient'
db.delete_table(u'usda_nutrient')
# Deleting model 'NutrientData'
db.delete_table(u'usda_nutrientdata')
# Removing M2M table for field source on 'NutrientData'
db.delete_table('usda_nutrientdata_source')
# Deleting model 'Source'
db.delete_table(u'usda_source')
# Deleting model 'DataDerivation'
db.delete_table(u'usda_dataderivation')
# Deleting model 'Weight'
db.delete_table(u'usda_weight')
# Deleting model 'Footnote'
db.delete_table(u'usda_footnote')
# Deleting model 'DataSource'
db.delete_table(u'usda_datasource')
models = {
u'usda.dataderivation': {
'Meta': {'ordering': "['code']", 'object_name': 'DataDerivation'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '4', 'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '120'})
},
u'usda.datasource': {
'Meta': {'ordering': "['id']", 'object_name': 'DataSource'},
'authors': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'end_page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '6', 'primary_key': 'True'}),
'issue_or_state': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'journal': ('django.db.models.fields.CharField', [], {'max_length': '135', 'null': 'True', 'blank': 'True'}),
'start_page': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'volume_or_city': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'usda.food': {
'Meta': {'ordering': "['ndb_number']", 'object_name': 'Food'},
'cho_factor': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'fat_factor': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'food_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.FoodGroup']"}),
'long_description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'manufacturer_name': ('django.db.models.fields.CharField', [], {'max_length': '65', 'blank': 'True'}),
'ndb_number': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'nitrogen_factor': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'protein_factor': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'refuse_description': ('django.db.models.fields.CharField', [], {'max_length': '135', 'blank': 'True'}),
'refuse_percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'scientific_name': ('django.db.models.fields.CharField', [], {'max_length': '65', 'blank': 'True'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}),
'survey': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'usda.foodgroup': {
'Meta': {'ordering': "['code']", 'object_name': 'FoodGroup'},
'code': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'})
},
u'usda.footnote': {
'Meta': {'ordering': "['food', 'number']", 'unique_together': "(['food', 'number', 'nutrient'],)", 'object_name': 'Footnote'},
'food': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.Food']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'nutrient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.Nutrient']", 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
u'usda.nutrient': {
'Meta': {'ordering': "['number']", 'object_name': 'Nutrient'},
'decimals': ('django.db.models.fields.IntegerField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'number': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'tagname': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '7'})
},
u'usda.nutrientdata': {
'Meta': {'ordering': "['food', 'nutrient']", 'unique_together': "(['food', 'nutrient'],)", 'object_name': 'NutrientData'},
'added_nutrient': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'confidence_code': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'data_derivation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.DataDerivation']", 'null': 'True', 'blank': 'True'}),
'data_points': ('django.db.models.fields.IntegerField', [], {}),
'degrees_of_freedom': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'food': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.Food']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lower_error_bound': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'maximum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'minimum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'number_of_studies': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'nutrient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.Nutrient']"}),
'nutrient_value': ('django.db.models.fields.FloatField', [], {}),
'reference_nbd_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['usda.Source']", 'symmetrical': 'False'}),
'standard_error': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'statistical_comments': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'upper_error_bound': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'usda.source': {
'Meta': {'ordering': "['code']", 'object_name': 'Source'},
'code': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
u'usda.weight': {
'Meta': {'ordering': "['food', 'sequence']", 'unique_together': "(['food', 'sequence'],)", 'object_name': 'Weight'},
'amount': ('django.db.models.fields.FloatField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'food': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['usda.Food']"}),
'gram_weight': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_data_points': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'sequence': ('django.db.models.fields.IntegerField', [], {}),
'standard_deviation': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['usda']
|
|
from collections import defaultdict
import os
def classify(instance, classes_instances, extractor=str.split, priors=None):
"""
Using `classes_instances` as supervised learning, classify `instance` into
one of the example classes. `extractor` is a function to convert instances
into a list of events/features to be analyzed, which defaults to a simple
word extraction.
"""
priors = priors or {class_: 1.0 for class_ in classes_instances}
model = Bayes.extract_events_odds(classes_instances, extractor)
b = Bayes(priors)
b.update_from_events(extractor(instance), model)
return b.most_likely()
def classify_file(file_, folders, extractor=str.split):
"""
Classify `file_` into one of `folders`, based on the contents of the files
already there. `extractor` is a function to convert file contents
into a list of events/features to be analyzed, which defaults to a simple
word extraction.
"""
classes_instances = defaultdict(list)
for folder in folders:
for child in os.listdir(folder):
child_path = os.path.join(folder, child)
if os.path.isfile(child_path):
classes_instances[folder].append(child_path)
new_extractor = lambda f: extractor(open(f).read())
return classify(file_, classes_instances, new_extractor)
def classify_folder(folder, extractor=str.split):
"""
Move every file in `folder` into one of its subfolders, based on the
contents of the files in those subfolders. `extractor` is a function to
convert file contents into a list of events/features to be analyzed, which
defaults to a simple word extraction.
"""
subfolders = []
files = []
for item in os.listdir(folder):
path = os.path.join(folder, item)
if os.path.isdir(path):
subfolders.append(path)
else:
files.append(path)
for file_ in files:
classification = classify_file(file_, subfolders, extractor)
new_path = os.path.join(classification, os.path.basename(file_))
if not os.path.exists(new_path):
print(file_, classification)
os.rename(file_, new_path)
from math import sqrt, pi, exp
def gaussian_distribution(values):
"""
Given a list of values, returns the (mean, variance) tuple for a
Gaussian (normal) distribution.
"""
n = float(len(values))
mean = sum(values) / n
if n > 1:
variance = sum((value - mean) ** 2 for value in values) / (n - 1)
else:
variance = 0
return (mean, variance)
def gaussian_probability(sample, distribution):
"""
Given a sample value and the (mean, variance) distribution,
return the probability of this sample belonging to the
distribution.
"""
mean, variance = distribution
# Special case of degenerate distribution.
if variance == 0:
# 100% if sample is exactly at mean, otherwise 0%.
return 0 if sample != mean else 1
return (exp((sample - mean) ** 2 / (-2 * variance))
/ sqrt(2 * pi * variance))
def properties_distributions(classes_population):
"""
Converts classes populations into classes distributions by property.
{class: [{property: value}]} -> {property: {class: distribution}}
"""
distributions = defaultdict(dict)
for class_, population in classes_population.items():
properties_instances = defaultdict(list)
for properties in population:
for property, value in properties.items():
properties_instances[property].append(value)
for property, instances in properties_instances.items():
distributions[property][class_] = gaussian_distribution(instances)
return distributions
def classify_normal(instance, classes_instances, priors=None):
"""
Classify `instance` into one of the classes from `classes_instances`,
calculating the probabilities from the Gaussian (normal) distribution
from each classes' instances, starting from `priors` (uniform if not
specified).
classes_instances must be of type {class: [{property: value}]}
priors must be of type {class: odds} and is automatically normalized.
"""
priors = priors or {class_: 1.0 for class_ in classes_instances}
b = Bayes(priors)
distributions = properties_distributions(classes_instances)
for property, value in instance.items():
classes_distributions = distributions[property]
probability_by_class = {class_: gaussian_probability(value, distribution)
for class_, distribution in classes_distributions.items()}
b.update(probability_by_class)
return b.most_likely()
class Bayes(list):
"""
Class for Bayesian probabilistic evaluation through creation and update of
beliefs. This is meant for abstract reasoning, not just classification.
"""
@staticmethod
def extract_events_odds(classes_instances, event_extractor=str.split):
"""
Runs function `event_extractor` for every instance in every class in
`classes_instances` ({class: [instances]}) and returns the odds of each
event happening for each class.
The result of this function is meant to be used in a future
`update_from_events` call.
"""
small = 0.000001
events_odds = defaultdict(lambda: defaultdict(lambda: small))
for class_, instances in classes_instances.items():
for instance in instances:
for event in event_extractor(instance):
events_odds[event][class_] += 1
return events_odds
def __init__(self, value=None, labels=None):
"""
Creates a new Bayesian belief system.
`value` can be another Bayes
object to be copied, an array of odds, an array of (label, odds)
tuples or a dictionary {label: odds}.
`labels` is a list of names for the odds in `value`. Labels default to
the their indexes.
"""
if value is None:
raise ValueError('Expected non-None `value`.')
if isinstance(value, dict):
# Convert dictionary.
labels = labels or list(sorted(value.keys()))
raw_values = [value[label] for label in labels]
else:
value = list(value)
if len(value) and isinstance(value[0], tuple):
# Convert list of tuples.
labels, raw_values = zip(*value)
else:
# Convert raw list of values.
if labels is None:
labels = [str(i) for i in range(len(value))]
raw_values = value
if len(labels) != len(set(labels)):
raise ValueError('Labels must not be duplicated. Got {}.'.format(labels))
self.labels = labels
super(Bayes, self).__init__(raw_values)
def __getitem__(self, i):
""" Returns the odds at index or label `i`. """
if isinstance(i, str):
return self[self.labels.index(i)]
else:
return super(Bayes, self).__getitem__(i)
def __setitem__(self, i, value):
""" Sets the odds at index or label `i`. """
if isinstance(i, str):
self[self.labels.index(i)] = value
else:
super(Bayes, self).__setitem__(i, value)
def _cast(self, other):
"""
Converts and unknown object into a Bayes object, keeping the same
labels if possible.
"""
if isinstance(other, Bayes):
return other
else:
return Bayes(other, self.labels)
def opposite(self):
"""
Returns the opposite probabilities.
Ex: [.7, .3] -> [.3, .7]
"""
if 0 in self:
return self._cast(1 / i if i != 0 else 0 for i in self)
else:
return self._cast(1 / i for i in self)
def normalized(self):
"""
Converts the list of odds into a list probabilities that sum to 1.
"""
total = float(sum(self))
if total == 0:
return self._cast(0 for i in self)
else:
return self._cast(i / total for i in self)
def __mul__(self, other):
"""
Creates a new instance with odds from both this and the other instance.
Ex: [.5, .5] * [.9, .1] -> [.45, .05] (non normalized)
"""
return self._cast(i * j for i, j in zip(self, self._cast(other)))
def __truediv__(self, other):
"""
Creates a new instance with odds from this instance and the opposite of
the other.
Ex: [.5, .5] / [.9, .1] -> [.555, 5.0] (non normalized)
"""
return self * self._cast(other).opposite()
def update(self, event):
"""
Updates all current odds based on the likelihood of odds in event.
Modifies the instance and returns itself.
Ex: [.5, .5].update([.9, .1]) becomes [.45, .05] (non normalized)
"""
self[:] = (self * self._cast(event)).normalized()
return self
def update_from_events(self, events, events_odds):
"""
Perform an update for every event in events, taking the new odds from
the dictionary events_odds (if available).
Ex: [.5, .5].update_from_events(['pos'], {'pos': [.9, .1]})
becomes [.45, .05] (non normalized)
"""
for event in events:
if event in events_odds:
self.update(events_odds[event])
return self
def update_from_tests(self, tests_results, odds):
"""
For every binary test in `tests_results`, updates the current belief
depending on `odds`. If the test was True, use the odds as-is. If the
test was false, use the opposite odds.
Ex: [.5, .5].update_from_tests([True], [.9, .1]) becomes [.45, .05]
(non normalized)
"""
opposite_odds = self._cast(odds).opposite()
for result in tests_results:
if result:
self.update(odds)
else:
self.update(opposite_odds)
return self
def most_likely(self, cutoff=0.0):
"""
Returns the label with most probability, or None if its probability is
under `cutoff`.
Ex: {a: .4, b: .6}.most_likely() -> b
{a: .4, b: .6}.most_likely(cutoff=.7) -> None
"""
normalized = self.normalized()
max_value = max(normalized)
if max_value > cutoff:
return self.labels[normalized.index(max_value)]
else:
return None
def is_likely(self, label, minimum_probability=0.5):
"""
Returns if `label` has at least probability `minimum_probability`.
Ex: {a: .4, b: .6}.is_likely(b) -> True
"""
return self.normalized()[label] > minimum_probability
def __repr__(self):
items = []
for label, item in zip(self.labels, self.normalized()):
items.append('{}: {}%'.format(label, round(item * 100, 2)))
return 'Bayes({})'.format(', '.join(items))
def __eq__(self, other):
if isinstance(other, Bayes) and self.labels != other.labels:
return False
return list(self.normalized()) == list(self._cast(other).normalized())
if __name__ == '__main__':
import sys
for folder in sys.argv[1:]:
classify_folder(folder)
|
|
from south.db import db
from django.db import models
from ietf.ietfworkflows.models import *
class Migration:
def forwards(self, orm):
# Adding model 'StateDescription'
db.create_table('ietfworkflows_statedescription', (
('id', orm['ietfworkflows.statedescription:id']),
('state', orm['ietfworkflows.statedescription:state']),
('definition', orm['ietfworkflows.statedescription:definition']),
('order', orm['ietfworkflows.statedescription:order']),
))
db.send_create_signal('ietfworkflows', ['StateDescription'])
def backwards(self, orm):
# Deleting model 'StateDescription'
db.delete_table('ietfworkflows_statedescription')
models = {
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'idtracker.acronym': {
'Meta': {'db_table': "'acronym'"},
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'acronym_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_key': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'idtracker.idintendedstatus': {
'Meta': {'db_table': "'id_intended_status'"},
'intended_status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_column': "'status_value'"}),
'intended_status_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'idtracker.idstatus': {
'Meta': {'db_table': "'id_status'"},
'status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_column': "'status_value'"}),
'status_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'idtracker.internetdraft': {
'Meta': {'db_table': "'internet_drafts'"},
'abstract': ('django.db.models.fields.TextField', [], {}),
'b_approve_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'b_discussion_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'b_sent_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'dunn_sent_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'expired_tombstone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'extension_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'file_type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'filename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.Acronym']", 'db_column': "'group_acronym_id'"}),
'id_document_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id_document_tag': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intended_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.IDIntendedStatus']"}),
'last_modified_date': ('django.db.models.fields.DateField', [], {}),
'lc_changes': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True'}),
'lc_expiration_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'lc_sent_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'local_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'replaced_by': ('django.db.models.fields.related.ForeignKey', ["orm['idtracker.InternetDraft']"], {'related_name': "'replaces_set'", 'null': 'True', 'db_column': "'replaced_by'", 'blank': 'True'}),
'review_by_rfc_editor': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'revision': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'revision_date': ('django.db.models.fields.DateField', [], {}),
'rfc_number': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'shepherd': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.PersonOrOrgInfo']", 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.IDStatus']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "'id_document_name'"}),
'txt_page_count': ('django.db.models.fields.IntegerField', [], {}),
'wgreturn_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'idtracker.personororginfo': {
'Meta': {'db_table': "'person_or_org_info'"},
'address_type': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'first_name_key': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'last_name_key': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'middle_initial': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'middle_initial_key': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_suffix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'person_or_org_tag': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'record_type': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'})
},
'ietfworkflows.annotationtag': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['permissions.Permission']", 'null': 'True', 'blank': 'True'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'annotation_tags'", 'to': "orm['workflows.Workflow']"})
},
'ietfworkflows.annotationtagobjectrelation': {
'annotation_tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ietfworkflows.AnnotationTag']"}),
'content_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'annotation_tags'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'ietfworkflows.objectannotationtaghistoryentry': {
'objecthistoryentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ietfworkflows.ObjectHistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'setted': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'unsetted': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'ietfworkflows.objecthistoryentry': {
'comment': ('django.db.models.fields.TextField', [], {}),
'content_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'workflow_history'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.PersonOrOrgInfo']"})
},
'ietfworkflows.objectstreamhistoryentry': {
'from_stream': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'objecthistoryentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ietfworkflows.ObjectHistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'to_stream': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'ietfworkflows.objectworkflowhistoryentry': {
'from_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'objecthistoryentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ietfworkflows.ObjectHistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'to_state': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ietfworkflows.statedescription': {
'definition': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workflows.State']"})
},
'ietfworkflows.stateobjectrelationmetadata': {
'estimated_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'from_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workflows.StateObjectRelation']"})
},
'ietfworkflows.stream': {
'group_chair_model': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'group_model': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'with_groups': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ietfworkflows.WGWorkflow']"})
},
'ietfworkflows.streamedid': {
'content_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'streamed_id'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'draft': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['idtracker.InternetDraft']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stream': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ietfworkflows.Stream']", 'null': 'True', 'blank': 'True'})
},
'ietfworkflows.wgworkflow': {
'selected_states': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['workflows.State']", 'null': 'True', 'blank': 'True'}),
'selected_tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['ietfworkflows.AnnotationTag']", 'null': 'True', 'blank': 'True'}),
'workflow_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['workflows.Workflow']", 'unique': 'True', 'primary_key': 'True'})
},
'permissions.permission': {
'codename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'content_types': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'workflows.state': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'transitions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['workflows.Transition']", 'null': 'True', 'blank': 'True'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'states'", 'to': "orm['workflows.Workflow']"})
},
'workflows.stateobjectrelation': {
'Meta': {'unique_together': "(('content_type', 'content_id', 'state'),)"},
'content_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'state_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workflows.State']"})
},
'workflows.transition': {
'condition': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'destination': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'destination_state'", 'null': 'True', 'to': "orm['workflows.State']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['permissions.Permission']", 'null': 'True', 'blank': 'True'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'transitions'", 'to': "orm['workflows.Workflow']"})
},
'workflows.workflow': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initial_state': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'workflow_state'", 'null': 'True', 'to': "orm['workflows.State']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['permissions.Permission']", 'symmetrical': 'False'})
}
}
complete_apps = ['ietfworkflows']
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from builtins import range, next, zip, map
from future.utils import viewvalues
import sys
if sys.version < '3':
text_type = unicode
binary_type = str
int_type = long
else:
text_type = str
binary_type = bytes
unicode = str
int_type = int
import itertools
import time
import tempfile
import os
import operator
import random
import collections
import warnings
import functools
import numpy
class ChildProcessError(Exception) :
pass
class BlockingError(Exception):
pass
def randomPairs(n_records, sample_size):
"""
Return random combinations of indices for a square matrix of size n
records. For a discussion of how this works see
http://stackoverflow.com/a/14839010/98080
"""
n = int(n_records * (n_records - 1) / 2)
if sample_size >= n :
random_pairs = numpy.arange(n, dtype='uint')
else:
try:
random_pairs = numpy.array(random.sample(range(n), sample_size),
dtype='uint')
except OverflowError:
return randomPairsWithReplacement(n_records, sample_size)
b = 1 - 2 * n_records
i = numpy.floor((-b - 2 * numpy.sqrt(2 * (n - random_pairs) + 0.25)) / 2).astype('uint')
j = numpy.rint(random_pairs + i * (b + i + 2) / 2 + 1).astype('uint')
return zip(i, j)
def randomPairsMatch(n_records_A, n_records_B, sample_size):
"""
Return random combinations of indices for record list A and B
"""
n = int(n_records_A * n_records_B)
if sample_size >= n:
random_pairs = numpy.arange(n)
else:
random_pairs = numpy.array(random.sample(range(n), sample_size),
dtype=int)
i, j = numpy.unravel_index(random_pairs, (n_records_A, n_records_B))
return zip(i, j)
def randomPairsWithReplacement(n_records, sample_size) :
# If the population is very large relative to the sample
# size than we'll get very few duplicates by chance
warnings.warn("There may be duplicates in the sample")
try :
random_indices = numpy.random.randint(n_records,
size=sample_size*2)
except (OverflowError, ValueError):
max_int = numpy.iinfo('int').max
warnings.warn("Asked to sample pairs from %d records, will only sample pairs from first %d records" % (n_records, max_int))
random_indices = numpy.random.randint(max_int,
size=sample_size*2)
random_indices = random_indices.reshape((-1, 2))
random_indices.sort(axis=1)
return [(p.item(), q.item()) for p, q in random_indices]
class ScoreDupes(object) :
def __init__(self, data_model, classifier, threshold) :
self.data_model = data_model
self.classifier = classifier
self.threshold = threshold
self.score_queue = None
def __call__(self, records_queue, score_queue) :
self.score_queue = score_queue
while True :
record_pairs = records_queue.get()
if record_pairs is None :
break
try :
filtered_pairs = self.fieldDistance(record_pairs)
if filtered_pairs is not None :
score_queue.put(filtered_pairs)
except Exception as e :
score_queue.put(e)
raise
score_queue.put(None)
def fieldDistance(self, record_pairs) :
ids = []
records = []
for record_pair in record_pairs :
((id_1, record_1, smaller_ids_1),
(id_2, record_2, smaller_ids_2)) = record_pair
if smaller_ids_1.isdisjoint(smaller_ids_2) :
ids.append((id_1, id_2))
records.append((record_1, record_2))
if records :
distances = self.data_model.distances(records)
scores = self.classifier.predict_proba(distances)[:,-1]
mask = scores > self.threshold
if mask.any():
id_type = sniff_id_type(ids)
ids = numpy.array(ids, dtype=id_type)
dtype = numpy.dtype([('pairs', id_type, 2),
('score', 'f4', 1)])
temp_file, file_path = tempfile.mkstemp()
os.close(temp_file)
scored_pairs = numpy.memmap(file_path,
shape=numpy.count_nonzero(mask),
dtype=dtype)
scored_pairs['pairs'] = ids[mask]
scored_pairs['score'] = scores[mask]
return file_path, dtype
def mergeScores(score_queue, result_queue, stop_signals) :
scored_pairs_file, file_path = tempfile.mkstemp()
os.close(scored_pairs_file)
seen_signals = 0
end = 0
while seen_signals < stop_signals :
score_chunk = score_queue.get()
if isinstance(score_chunk, Exception) :
result_queue.put(score_chunk)
raise
elif score_chunk is None:
seen_signals += 1
else:
score_file, dtype = score_chunk
score_chunk = numpy.memmap(score_file, mode='r', dtype=dtype)
chunk_size = len(score_chunk)
fp = numpy.memmap(file_path, dtype=dtype,
offset=(end * dtype.itemsize),
shape=(chunk_size, ))
fp[:chunk_size] = score_chunk
end += chunk_size
del score_chunk
os.remove(score_file)
if end:
result_queue.put((file_path, dtype, end))
else:
result_queue.put(None)
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0) :
if num_cores < 2 :
from multiprocessing.dummy import Process, Queue
SimpleQueue = Queue
else :
from .backport import Process, SimpleQueue, Queue
first, records = peek(records)
if first is None:
raise BlockingError("No records have been blocked together. "
"Is the data you are trying to match like "
"the data you trained on?")
record_pairs_queue = Queue(2)
score_queue = SimpleQueue()
result_queue = SimpleQueue()
n_map_processes = max(num_cores, 1)
score_records = ScoreDupes(data_model, classifier, threshold)
map_processes = [Process(target=score_records,
args=(record_pairs_queue,
score_queue))
for _ in range(n_map_processes)]
[process.start() for process in map_processes]
reduce_process = Process(target=mergeScores,
args=(score_queue,
result_queue,
n_map_processes))
reduce_process.start()
fillQueue(record_pairs_queue, records, n_map_processes)
result = result_queue.get()
if isinstance(result, Exception) :
raise ChildProcessError
if result :
scored_pairs_file, dtype, size = result
scored_pairs = numpy.memmap(scored_pairs_file,
dtype=dtype,
shape=(size,))
else:
scored_pairs = numpy.array([], dtype=dtype)
reduce_process.join()
[process.join() for process in map_processes]
return scored_pairs
def fillQueue(queue, iterable, stop_signals) :
iterable = iter(iterable)
chunk_size = 10000
upper_bound = 7000000 # this number worked, but is unprincipled
multiplier = 1.1
# initial values
i = 0
n_records = 0
t0 = time.clock()
last_rate = 10000
while True :
chunk = tuple(itertools.islice(iterable, int(chunk_size)))
if chunk :
queue.put(chunk)
del chunk
n_records += chunk_size
i += 1
if i % 10 :
time_delta = max(time.clock() - t0, 0.0001)
current_rate = n_records/time_delta
# chunk_size is always either growing or shrinking, if
# the shrinking led to a faster rate, keep
# shrinking. Same with growing. If the rate decreased,
# reverse directions
if current_rate < last_rate :
multiplier = 1/multiplier
chunk_size = min(max(chunk_size * multiplier, 1), upper_bound)
last_rate = current_rate
n_records = 0
t0 = time.clock()
else :
# put poison pills in queue to tell scorers that they are
# done
[queue.put(None) for _ in range(stop_signals)]
break
class ScoreGazette(object) :
def __init__(self, data_model, classifier, threshold) :
self.data_model = data_model
self.classifier = classifier
self.threshold = threshold
def __call__(self, block):
ids = []
records = []
for record_pair in block:
((id_1, record_1, _),
(id_2, record_2, _)) = record_pair
ids.append((id_1, id_2))
records.append((record_1, record_2))
distances = self.data_model.distances(records)
scores = self.classifier.predict_proba(distances)[:,-1]
mask = scores > self.threshold
id_type = sniff_id_type(ids)
ids = numpy.array(ids, dtype=id_type)
dtype = numpy.dtype([('pairs', id_type, 2),
('score', 'f4', 1)])
scored_pairs = numpy.empty(shape=numpy.count_nonzero(mask),
dtype=dtype)
scored_pairs['pairs'] = ids[mask]
scored_pairs['score'] = scores[mask]
return scored_pairs
def scoreGazette(records, data_model, classifier, num_cores=1, threshold=0) :
if num_cores < 2 :
imap = map
else :
from .backport import Pool
n_map_processes = max(num_cores, 1)
pool = Pool(processes=n_map_processes)
imap = functools.partial(pool.imap_unordered, chunksize=1)
first, records = peek(records)
if first is None:
raise ValueError("No records to match")
score_records = ScoreGazette(data_model, classifier, threshold)
for scored_pairs in imap(score_records, records):
yield scored_pairs
def peek(records) :
try :
record = next(records)
except TypeError as e:
if "not an iterator" not in str(e) :
raise
try :
records = iter(records)
record = next(records)
except StopIteration :
return None, records
except StopIteration :
return None, records
return record, itertools.chain([record], records)
def isIndexed(data, offset) :
return all(i in data for i in range(offset, offset + len(data)))
def index(data, offset=0) :
if isIndexed(data, offset):
return data
else :
data = dict(zip(itertools.count(offset),
viewvalues(data)))
return data
def iunzip(iterable, internal_length): # pragma: no cover
"""Iunzip is the same as zip(*iter) but returns iterators, instead of
expand the iterator. Mostly used for large sequence"""
_tmp, iterable = itertools.tee(iterable, 2)
iters = itertools.tee(iterable, internal_length)
return (map(operator.itemgetter(i), it) for i, it in enumerate(iters))
def Enumerator(start=0, initial=()):
try : # py 2
return collections.defaultdict(itertools.count(start).next, initial)
except AttributeError : # py 3
return collections.defaultdict(itertools.count(start).__next__, initial)
def sniff_id_type(ids):
example = ids[0][0]
python_type = type(example)
if python_type is binary_type or python_type is text_type :
python_type = (unicode, 256)
else:
int_type(example) # make sure we can cast to int
python_type = int_type
return python_type
|
|
# REFS Some (most) of those functions come from the keras library (https://github.com/fchollet/keras)
# Some are modified to add output images and output centerline
# keras.preprocessing.image: flip_axis, random_channel_shift, apply_transform, transform_matrix_offset_center, ApplyRandomTransformations
import time
import numpy as np
import random
import scipy as sp
import scipy.interpolate
import scipy.ndimage
import scipy.ndimage.interpolation
from NnetsX import IS_CHANNELS_FIRST
# from File import SavePickle
INTENSITY_FACTOR = 0.2
VECTOR_FIELD_SIGMA = 5. # in pixel
ROTATION_FACTOR = 10 # degree
TRANSLATION_FACTOR = 0.2 # proportion of the image size
SHEAR_FACTOR = 2*np.pi/180 # in radian
ZOOM_FACTOR = 0.1
def flip_axis(x, axis):
x = np.asarray(x).swapaxes(axis, 0)
x = x[::-1, ...]
x = x.swapaxes(0, axis)
return x
def random_channel_shift(x, intensity, channel_index=0):
x = np.rollaxis(x, channel_index, 0)
min_x, max_x = np.min(x), np.max(x)
shift = np.random.uniform(-intensity, intensity) # TODO add a choice if we want the same shift for all channels
channel_images = [np.clip(x_channel + shift, min_x, max_x)
for x_channel in x]
# channel_images = [np.clip(x_channel + np.random.uniform(-intensity, intensity), min_x, max_x)
# for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def apply_transform(x, transform_matrix, channel_index=0, fill_mode='nearest', cval=0.):
x = np.rollaxis(x, channel_index, 0)
final_affine_matrix = transform_matrix[:2, :2]
final_offset = transform_matrix[:2, 2]
channel_images = [sp.ndimage.interpolation.affine_transform(x_channel, final_affine_matrix,
final_offset, order=0, mode=fill_mode, cval=cval) for x_channel in x]
x = np.stack(channel_images, axis=0)
x = np.rollaxis(x, 0, channel_index+1)
return x
def transform_matrix_offset_center(matrix, x, y):
o_x = float(x) / 2 + 0.5
o_y = float(y) / 2 + 0.5
offset_matrix = np.array([[1, 0, o_x], [0, 1, o_y], [0, 0, 1]])
reset_matrix = np.array([[1, 0, -o_x], [0, 1, -o_y], [0, 0, 1]])
transform_matrix = np.dot(np.dot(offset_matrix, matrix), reset_matrix)
return transform_matrix
def ApplyRandomTransformations(_x, _y, _pts, _trans, _rot, _zoom, _shear, _elastix, _row_index=1, _col_index=2, _channel_index=0, _fill_mode='constant', _cval=0.):
if _elastix != 0:
sigma = _elastix # in pixel
kernelSize = 3
sizeAll = kernelSize + 2
imgShape = (_x.shape[1], _x.shape[2])
# create the indices of the 5x5 vector field (fieldPts.shape = (25,2))
fieldPts = np.mgrid[0.:1.:complex(sizeAll), 0.:1.:complex(sizeAll)].swapaxes(0,2).swapaxes(0,1).reshape((sizeAll*sizeAll, 2))
# create the displacement (x and y) of the 5x5 vector field (border have no displacement so it's 0) (displacementX.shape = (25))
displacementX = np.zeros((sizeAll*sizeAll))
displacementY = np.zeros((sizeAll*sizeAll))
for i in range(0, sizeAll*sizeAll):
if fieldPts[i][0] != 0. and fieldPts[i][0] != 1. \
and fieldPts[i][1] != 0. and fieldPts[i][1] != 1.:
displacementX[i] = np.random.normal(0, sigma, 1)
displacementY[i] = np.random.normal(0, sigma, 1)
# transform the indice of the 5x5 vector field in the image coordinate system (TODO WARNING works only with square images)
fieldPts = fieldPts*imgShape[0] # TODO check if it's not imgShape[0] - 1?
# create the indices of all pixels in the image (gridX.shape = (1024,1024))
gridX, gridY = np.mgrid[0.:(imgShape[0] - 1):complex(imgShape[0]), 0.:(imgShape[1] - 1):complex(imgShape[1])]
# interpolate the vector field for every pixels in the image (dxGrid.shape = (1024,1024))
dxGrid = scipy.interpolate.griddata(fieldPts, displacementX, (gridX, gridY), method='cubic')
dyGrid = scipy.interpolate.griddata(fieldPts, displacementY, (gridX, gridY), method='cubic')
# apply the displacement on every pixels (indices = [indices.shape[0] = 1024*1024, indices.shape[1] = 1024*1024])
indices = np.reshape(gridY + dyGrid, (-1, 1)), np.reshape(gridX + dxGrid, (-1, 1))
for chan in range(_x.shape[0]):
_x[chan] = scipy.ndimage.interpolation.map_coordinates(_x[chan], indices, order=2, mode='reflect').reshape(imgShape)
_x[chan] = np.clip(_x[chan], 0., 1.)
if _y is not None:
for chan in range(_y.shape[0]):
_y[chan] = scipy.ndimage.interpolation.map_coordinates(_y[chan], indices, order=2, mode='reflect').reshape(imgShape)
_y[chan] = np.clip(_y[chan], 0., 1.)
#if _pts is not None:
matrix = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
if _rot != 0:
theta = np.pi/180*np.random.uniform(-_rot, _rot)
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
matrix = np.dot(matrix, rotation_matrix)
if _trans != 0:
ty = np.random.uniform(-_trans, _trans)*_x.shape[_row_index]
tx = np.random.uniform(-_trans, _trans)*_x.shape[_col_index]
translation_matrix = np.array([[1, 0, ty],
[0, 1, tx],
[0, 0, 1]])
matrix = np.dot(matrix, translation_matrix)
if _shear != 0:
shear = np.random.uniform(-_shear, _shear)
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
matrix = np.dot(matrix, shear_matrix)
if _zoom != 0:
zx, zy = np.random.uniform(1 - _zoom, 1 + _zoom, 2)
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
matrix = np.dot(matrix, zoom_matrix)
h, w = _x.shape[_row_index], _x.shape[_col_index]
transformMatrix = transform_matrix_offset_center(matrix, h, w)
_x = apply_transform(_x, transformMatrix, _channel_index, _fill_mode, _cval)
if _y is not None:
_y = apply_transform(_y, transformMatrix, _channel_index, _fill_mode, _cval)
if _pts is not None:
matrix = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
if _rot != 0:
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
matrix = np.dot(matrix, rotation_matrix)
if _trans != 0:
translation_matrix = np.array([[1, 0, -tx],
[0, 1, -ty],
[0, 0, 1]])
matrix = np.dot(translation_matrix, matrix)
if _shear != 0:
shear_matrix = np.array([[np.cos(shear), 0, 0],
[-np.sin(shear), 1, 0],
[0, 0, 1]])
shear_matrix = np.linalg.inv(shear_matrix) # TODO write the inverse properly without computing it
matrix = np.dot(shear_matrix, matrix)
if _zoom != 0:
zoom_matrix = np.array([[1./zy, 0, 0],
[0, 1./zx, 0],
[0, 0, 1]])
matrix = np.dot(zoom_matrix, matrix)
transformMatrix = transform_matrix_offset_center(matrix, h, w)
_pts = np.dot(_pts, transformMatrix.T)
return _x, _y, _pts
# if _nbData = -1 then the function creates infinite data
def GenerateImageOnTheFly(_createImageXFct, _GetIdFromNeedFct, _X, _previousImages, _Y, _outputTrain, _previousOutput, _setFiles, _needSetX, _needSetY, _batchSize, _epochSize , _nbData, _keepPctOriginal=0.5, _trans=TRANSLATION_FACTOR, _rot=ROTATION_FACTOR, _zoom=ZOOM_FACTOR, _shear=SHEAR_FACTOR, _elastix=VECTOR_FIELD_SIGMA, _intensity=INTENSITY_FACTOR, _hflip=True, _vflip=True, _3Dshape=False):
shapeX = _X.shape
shapeY = _Y.shape
nbChannels = shapeX[1] + _previousImages + _previousOutput
currentBatch = 0
imgX = np.empty((nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
blackImage = np.zeros((shapeX[2], shapeX[3]), dtype=np.float32)
if _nbData != -1:
seedList = np.random.randint(999999, size=_nbData)
nbSeedPerImage = _nbData//_epochSize
# debug = 0
while 1:
# if _nbData != -1:
# shuffleList = np.arange(_nbData)
# else:
# shuffleList = np.arange(_epochSize)
shuffleList = np.arange(_epochSize)
np.random.shuffle(shuffleList)
for i in range(_epochSize):
if currentBatch == 0:
if _3Dshape == False:
if IS_CHANNELS_FIRST == True:
x = np.empty((_batchSize, nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
else:
x = np.empty((_batchSize, shapeX[2], shapeX[3], nbChannels), dtype=np.float32)
else:
x = np.empty((_batchSize, 1, nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
if IS_CHANNELS_FIRST == True:
y = np.empty((_batchSize, shapeY[1], shapeY[2], shapeY[3]), dtype=np.float32)
else:
y = np.empty((_batchSize, shapeY[2], shapeY[3], shapeY[1]), dtype=np.float32)
# if _nbData != -1:
# rndStateNp = np.random.get_state()
# rndState = random.getstate()
# # np.random.seed(int(seedList[shuffleList[i]]))
# # random.seed(int(seedList[shuffleList[i]]))
# np.random.seed(int(seedList[shuffleList[i]*nbSeedPerImage + random.randint(0, nbSeedPerImage - 1)]))
# random.seed(int(seedList[shuffleList[i]*nbSeedPerImage + random.randint(0, nbSeedPerImage - 1)]))
# imgId = shuffleList[i]%len(_setFiles)
imgId = shuffleList[i]
_createImageXFct(imgX, imgId, _X, _previousImages, _outputTrain, _previousOutput, _setFiles, _needSetX, _needSetY, blackImage)
imgY = _Y[_GetIdFromNeedFct(imgId, _setFiles, _needSetY),...]
if random.random() > _keepPctOriginal:
# if False:
if _nbData != -1:
rndStateNp = np.random.get_state()
rndState = random.getstate()
np.random.seed(int(seedList[shuffleList[i]*nbSeedPerImage + random.randint(0, nbSeedPerImage - 1)]))
random.seed(int(seedList[shuffleList[i]*nbSeedPerImage + random.randint(0, nbSeedPerImage - 1)]))
if _intensity != 0:
imgX = random_channel_shift(imgX, _intensity)
imgX, imgY, _ = ApplyRandomTransformations(imgX, imgY, None, _trans, _rot, _zoom, _shear, _elastix)
if _hflip == True and random.random() > 0.5:
imgX = flip_axis(imgX, 1)
imgY = flip_axis(imgY, 1)
if _vflip == True and random.random() > 0.5:
imgX = flip_axis(imgX, 2)
imgY = flip_axis(imgY, 2)
if _nbData != -1:
np.random.set_state(rndStateNp)
random.setstate(rndState)
if IS_CHANNELS_FIRST == True:
x[currentBatch][...] = imgX[...]
y[currentBatch][...] = imgY[...]
else:
imgXtmp = np.rollaxis(imgX, 0, 3)
imgYtmp = np.rollaxis(imgY, 0, 3)
x[currentBatch][...] = imgXtmp[...]
y[currentBatch][...] = imgYtmp[...]
currentBatch = currentBatch + 1
if currentBatch == _batchSize:
currentBatch = 0
# if debug <= 4: # debug
# SavePickle([x,y], "generated/testPickle" + str(debug))
# debug = debug + 1
yield (x, y)
elif i == _epochSize - 1:
yield (x[:currentBatch], y[:currentBatch])
currentBatch = 0
def GenerateValidationOnTheFly(_createImageXFct, _GetIdFromNeedFct, _X, _previousImages, _Y, _outputTest, _previousOutput, _setFiles, _needSetX, _needSetY, _batchSize=None, _3Dshape=False):
shapeX = _X.shape
shapeY = _Y.shape
nbChannels = shapeX[1] + _previousImages + _previousOutput
# currentBatch = 0
imgX = np.empty((nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
blackImage = np.zeros((shapeX[2], shapeX[3]), dtype=np.float32)
# debug = 0
while 1:
for imgId in range(0, len(_setFiles)):
# if currentBatch == 0:
# if _3Dshape == False:
# if IS_CHANNELS_FIRST == True:
# x = np.empty((_batchSize, nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
# else:
# x = np.empty((_batchSize, shapeX[2], shapeX[3], nbChannels), dtype=np.float32)
# else:
# x = np.empty((_batchSize, 1, nbChannels, shapeX[2], shapeX[3]), dtype=np.float32)
# if IS_CHANNELS_FIRST == True:
# y = np.empty((_batchSize, shapeY[1], shapeY[2], shapeY[3]), dtype=np.float32)
# else:
# y = np.empty((_batchSize, shapeY[2], shapeY[3], shapeY[1]), dtype=np.float32)
_createImageXFct(imgX, imgId, _X, _previousImages, _outputTest, _previousOutput, _setFiles, _needSetX, _needSetY, blackImage)
imgY = _Y[_GetIdFromNeedFct(imgId, _setFiles, _needSetY),...]
if IS_CHANNELS_FIRST == True:
# x[currentBatch][...] = imgX[...]
# y[currentBatch][...] = imgY[...]
yield (np.copy(imgX), np.copy(imgY))
else:
imgXtmp = np.rollaxis(imgX, 0, 3)
imgYtmp = np.rollaxis(imgY, 0, 3)
# x[currentBatch][...] = imgXtmp[...]
# y[currentBatch][...] = imgYtmp[...]
yield (imgXtmp, imgYtmp)
# currentBatch = currentBatch + 1
# if currentBatch == _batchSize:
# currentBatch = 0
# if debug <= 4: # debug
# SavePickle([x,y], "generated/testPickle33" + str(debug))
# debug = debug + 1
# yield (x, y)
# elif imgId == len(_setFiles) - 1:
# yield (x[:currentBatch], y[:currentBatch])
# currentBatch = 0
|
|
from twisted.internet import defer, reactor
from twisted.trial import unittest
from twisted.internet import base
from twisted.python import failure, filepath
base.DelayedCall.debug = True
from txtemplate import templates
from txtemplate import clearsilver
TEMPLATE_DIR = filepath.FilePath(__file__
).parent().parent().child("templates").path
class TestClearsilverTemplates(unittest.TestCase):
if clearsilver.CS is None:
skip = "Skipping Clearsilver tests because Clearsilver is not installed."
def setUp(self):
self.c = {"request":"foo",
"segments":["hello","world"],
"message":"error message",
}
self.loader = templates.ClearsilverTemplateLoader(
TEMPLATE_DIR
)
class TestClass(object):
someProperty = "Some Property Value"
def __str__(self):
return self.someProperty
def test_loader_failure(self):
self.assertRaises(templates.TemplateException, self.loader.load, "filethatdoesnotexist.cst")
def test_loader_success(self):
template = self.loader.load("test.cst")
self.assertTrue(isinstance(template, templates.ClearsilverTemplateAdapter))
def test_render_blocking(self):
template = self.loader.load("test.cst")
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("foo") !=-1)
def test_render_blocking_list(self):
template = self.loader.load("test.cst")
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("<div>hello</div>") !=-1)
def test_render_blocking_list_objects(self):
template = self.loader.load("test.cst")
self.c["segments"][0] = self.TestClass()
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("<div>Some Property Value</div>") !=-1)
def test_render_blocking_list_of_lists(self):
template = self.loader.load("test.cst")
self.c["segments"][0] = ["h","e","l","l","o"]
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("<p>h</p>") !=-1)
def test_render_blocking_dict(self):
template = self.loader.load("test.cst")
self.c['dict'] = {"test":"1 key",
"test_list":[1,2,3],
"test_dict":{"nested":"2 key",
"nested_list":[4,5,6],
},
}
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("<p>1 key</p>") !=-1)
self.assertTrue(xhtml.find("<p>2 key</p>") !=-1)
def test_render_blocking_hdf(self):
template = self.loader.load("test.cst")
hdf = clearsilver.hdfFromKwargs(**self.c)
xhtml = template.render_blocking(hdf=hdf)
self.assertTrue(xhtml.find("foo") !=-1)
def _success(self, result):
self.assertTrue(result.find("</html>")!=-1)
def test_render_success(self):
template = self.loader.load("test.cst")
d = template.render(**self.c)
d.addCallback(self._success)
class TestGenshiTemplates(unittest.TestCase):
if templates.genshitemplate is None:
skip = "Skipping Genshi tests because genshi is not installed."
def setUp(self):
self.c = {"request":"foo",
"segments":["hello","world"],
"message":"error message"
}
self.loader = templates.GenshiTemplateLoader(TEMPLATE_DIR)
def test_loader_failure(self):
self.assertRaises(templates.TemplateException, self.loader.load, "filethatdoesnotexist.xhtml")
def test_loader_success(self):
template = self.loader.load("error.xhtml")
self.assertTrue(isinstance(template, templates.GenshiTemplateAdapter))
def test_render_blocking(self):
template = self.loader.load("error.xhtml")
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("foo") !=-1)
def test_failed(self):
template = self.loader.load("error.xhtml")
#self.assertRaises(templates.TemplateException, template._failed, failure.Failure(ValueError))
f = failure.Failure(ValueError)
errorMessage = "Failed to generate template"
self.assertTrue(errorMessage in template._failed(f))
def test_rendered(self):
template = self.loader.load("error.xhtml")
template._buffer.write("Some string o' crap!")
result = template._rendered(None)
self.assertEqual(result, "Some string o' crap!")
def _success(self, result):
self.assertTrue(result.find("</html>")!=-1)
def test_populateBuffer(self):
template = self.loader.load("error.xhtml")
template._deferred = defer.Deferred()
template._deferred.addCallbacks(template._rendered, template._failed)
template._deferred.addCallback(self._success)
template._stream = template.template.generate(**self.c)
s = template._stream.serialize()
reactor.callLater(0.01, template._populateBuffer, s, templates.POPULATE_N_STEPS)
return template._deferred
def test_render(self):
template = self.loader.load("error.xhtml")
d = template.render(**self.c)
d.addCallback(self._success)
return d
class TestJinja2Templates(unittest.TestCase):
if templates.jinja2 is None:
skip = "Skipping Jinja2 tests because jinja2 is not installed."
def setUp(self):
self.c = {"request":"foo",
"segments":["hello","world"],
"message":"error message"
}
self.loader = templates.Jinja2TemplateLoader(TEMPLATE_DIR)
def test_loader_failure(self):
self.assertRaises(templates.TemplateException, self.loader.load, "filethatdoesnotexist.jinja2")
def test_loader_success(self):
template = self.loader.load("test.jinja2")
self.assertTrue(isinstance(template, templates.Jinja2TemplateAdapter))
def test_render_blocking(self):
template = self.loader.load("test.jinja2")
html = template.render_blocking(**self.c)
self.assertTrue(html.find("foo") !=-1)
def _success(self, result):
self.assertTrue(result.find("foo")!=-1)
def test_render(self):
template = self.loader.load("test.jinja2")
d = template.render(**self.c)
d.addCallback(self._success)
return d
class TestStringTemplates(unittest.TestCase):
def setUp(self):
self.c = {"request":"foo",
"segments":["hello","world"],
"message":"error message",
}
self.loader = templates.StringTemplateLoader(
TEMPLATE_DIR
)
class TestClass(object):
someProperty = "Some Property Value"
def __str__(self):
return self.someProperty
def test_loader_failure(self):
self.assertRaises(templates.TemplateException, self.loader.load, "filethatdoesnotexist.cst")
def test_loader_success(self):
template = self.loader.load("test.txt")
self.assertTrue(isinstance(template, templates.StringTemplateAdapter))
def test_render_blocking(self):
template = self.loader.load("test.txt")
xhtml = template.render_blocking(**self.c)
self.assertTrue(xhtml.find("foo") !=-1)
def _success(self, result):
self.assertTrue(result.find("</html>")!=-1)
def test_render_success(self):
template = self.loader.load("test.txt")
d = template.render(**self.c)
d.addCallback(self._success)
|
|
###
# Copyright (c) 2009, Juju, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author of this software nor the names of
# the contributors to the software may be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# start stop restart reload kill
import os
import sys
import time
import errno
import signal
import util
import compat
from util import error
from OrderedDict import OrderedDict
def waitFor(seconds):
until = time.time() + seconds
while time.time() < until:
time.sleep(1)
class InvalidConfiguration(Exception):
pass
class Command(object):
def __init__(self, config, name=None):
if name is None:
name = self.__class__.__name__.lower()
self.name = name
self.config = config
def checkConfig(self, config):
return # No checking performed by default.
@classmethod
def help(cls):
return cls.__doc__
def run(self, args, environ):
raise NotImplementedError
def checkProcessAlive(self, pid=None):
if pid is None:
pid = self.getPidFromFile()
if pid is None:
return 0
return util.checkProcessAlive(pid)
def getPidFromFile(self, pidfile=None):
if pidfile is None:
pidfile = self.config.options.pidfile()
if pidfile is None:
error('finitd.options.pidfile is not configured.')
return util.getPidFromFile(pidfile)
def writePidfile(self, pid, pidfile=None):
if pidfile is None:
pidfile = self.config.options.pidfile()
if pidfile is not None:
fp = open(pidfile, 'w')
try:
fp.write('%s\n' % pid)
finally:
fp.close()
def removePidfile(self, pidfile=None):
if pidfile is None:
pidfile = self.config.options.pidfile()
if pidfile is not None:
os.remove(pidfile)
def chdir(self):
try:
os.chdir(self.config.child.chdir())
except EnvironmentError, e:
error('chdir to %r failed: %s' % (self.config.child.chdir(), e))
def chroot(self):
if self.config.child.chroot():
os.chroot(self.config.child.chdir())
def umask(self):
os.umask(self.config.child.umask())
def setuid(self):
uid = self.config.child.setuid()
if uid is not None:
os.setuid(uid)
def setgid(self):
gid = self.config.child.setgid()
if gid is not None:
os.setgid(gid)
def execute(self, environ, command=None):
if command is None:
command = self.config.child.command()
os.execle('/bin/sh', 'sh', '-c', 'exec ' + command, environ)
class start(Command):
"""Starts the configured child process."""
def checkConfig(self, config):
if config.options.pidfile() is None:
raise InvalidConfiguration('finitd.options.pidfile must be configured.')
if config.watcher.restart() and not config.watcher.wait():
raise InvalidConfiguration('finitd.watcher.wait must be set if '
'finitd.watcher.restart is set.')
if config.child.setuid() and os.getuid():
raise InvalidConfiguration('You must be root if finitd.child.setuid is set.')
if config.child.setgid() and os.getuid():
raise InvalidConfiguration('You must be root if finitd.child.setgid is set.')
def run(self, args, environ):
pid = self.checkProcessAlive()
if pid:
# (the exit code used here matches start-stop-daemon)
error("""Process appears to be alive at pid %s. If this is not the process
you're attempting to start, remove the pidfile %r and start again.""" %
(pid, self.config.options.pidfile()), code=1)
# Before we fork, we replace sys.stdout/sys.stderr with sysloggers
sys.stdout = util.SyslogFile()
sys.stderr = util.SyslogFile(util.SyslogFile.LOG_ERR)
pid = os.fork()
if pid:
os._exit(0)
# Set a new session id.
sid = os.setsid()
if sid == -1:
error('setsid failed') # So apparently errno isn't available to Python...
self.chdir()
self.chroot()
# Close open files before going into watcher loop.
try:
# "Borrowed" from the subprocess module ;)
MAXFD = os.sysconf('SC_OPEN_MAX')
except:
MAXFD = 256
os.closerange(0, MAXFD)
fd = os.open(self.config.child.stdin(), os.O_CREAT | os.O_RDONLY)
assert fd == 0, 'stdin fd = %r' % fd
fd = os.open(self.config.child.stdout(), os.O_CREAT | os.O_WRONLY | os.O_APPEND)
assert fd == 1, 'stdout fd = %r' % fd
if self.config.child.stderr() != self.config.child.stdout():
fd = os.open(self.config.child.stderr(),
os.O_CREAT | os.O_WRONLY | os.O_APPEND)
assert fd == 2, 'stderr fd = %r' % fd
else:
os.dup2(1, 2)
lastRestart = 0
restartWait = self.config.watcher.restart.wait()
watcherPid = os.getpid()
def log(s):
print 'Watcher[%s]: %s' % (watcherPid, s)
while time.time() > lastRestart + restartWait:
lastRestart = time.time()
log('starting process')
pid = os.fork() # This spawns what will become the actual child process.
if pid:
def sigusr1(signum, frame):
log('received SIGUSR1, removing watcher pidfile and exiting')
# XXX All we really need to do is configure not to restart, right?
# Originally I removed both pidfiles here, but it's needed in order
# to kill the child process, which must necessarily happen after the
# watcher exits, if the watcher is configured to restart the child.
self.removePidfile(self.config.watcher.pidfile())
os._exit(0)
signal.signal(signal.SIGUSR1, sigusr1)
log('child process started at pid %s' % pid)
self.writePidfile(pid)
if self.config.watcher.wait():
self.writePidfile(watcherPid, self.config.watcher.pidfile())
(_, status) = os.waitpid(pid, 0)
log('process exited with status %s' % status)
# Remove pidfile when child has exited.
self.removePidfile()
if self.config.watcher.restart() and status != 0:
command = self.config.watcher.restart.command()
if command:
log('running %r before restart' % command)
status = os.system(command)
if status:
log('%r exited with nonzero status %s, exiting' % status)
break
continue # Not strictly necessary, but we're way nested here.
else:
break
else:
break
else:
# This is the child process, pre-exec.
self.umask()
self.setgid()
self.setuid()
# Now we're ready to actually spawn the process.
self.execute(environ)
self.removePidfile(self.config.watcher.pidfile())
log('exiting')
os._exit(0)
class debug(start): # subclassing start to inherit checkConfig
"""Starts the configured child process without daemonizing or redirecting
stdin/stdout/stderr, for debugging problems with starting the process."""
def run(self, args, environ):
self.chdir()
self.chroot()
self.umask()
self.setgid()
self.setuid()
self.execute(environ)
class stop(Command):
"""Stops the running child process by sending it SIGTERM."""
def checkConfig(self, config):
if not config.options.pidfile():
raise InvalidConfiguration('Cannot stop the process without a configured'
'finitd.options.pidfile.')
if config.commands.stop.command() and config.commands.stop.signal():
raise InvalidConfiguration('finitd.commands.stop.command and '
'finitd.commands.stop.signal cannot be '
'configured simultaneously.')
def run(self, args, environ):
self.chdir() # If the pidfile is a relative pathname, it's relative to here.
pid = self.checkProcessAlive()
if pid:
if self.config.watcher.pidfile() and self.config.watcher.restart():
watcherPid = self.getPidFromFile(self.config.watcher.pidfile())
if watcherPid:
# Tell the watcher to remove the pidfile and exit.
os.kill(watcherPid, signal.SIGUSR1)
time.sleep(1) # Wait to make sure the watcher exits.
if self.config.commands.stop.command():
self.execute(environ, self.config.commands.stop.command())
else:
os.kill(pid, self.config.commands.stop.signal())
else:
print 'Process is not running.'
sys.exit(1) # to match start-stop-daemon
class restart(Command):
"""Restarts the process. Equivalent to `stop` followed by `start`"""
def run(self, args, environ):
stop(self.config).run([], environ)
waitFor(self.config.options.restartWaitTime())
pid = self.checkProcessAlive()
if pid:
error('Process is still running at pid %s' % pid)
start(self.config).run([], environ)
class kill(Command):
"""Attempts to stop the process ordinarily, but if that fails, sends the process
SIGKILL."""
def run(self, args, environ):
stop(self.config).run([], environ)
waitingUntil = time.time() + self.config.options.killWaitTime()
while time.time() < waitingUntil and self.checkProcessAlive():
time.sleep(1)
if self.checkProcessAlive():
os.kill(self.getPidFromFile(), signal.SIGKILL)
time.sleep(self.config.options.restartWaitTime())
if self.checkProcessAlive():
error('Cannot kill process %s' % self.getPidFromFile())
class status(Command):
"""Returns whether the process is alive or not. Prints a message and exits with
error status 0 if the process exists, with error status 1 if the process does not
exist."""
def run(self, args, environ):
pid = self.checkProcessAlive()
if pid:
print 'Process is running at pid %s' % pid
sys.exit(0)
else:
print 'Process is not running.'
sys.exit(1)
class annotate(Command):
"""Annotates the given configuration file and outputs it to stdout. Useful with
/dev/null as a configuration file just to output an annotated configuration file
ready for modification."""
def run(self, args, environ):
self.config.writefp(sys.stdout)
class ArbitraryCommand(Command):
def __init__(self, config, name):
self.command = config.commands.arbitrary.get(name)
Command.__init__(self, config, name)
def checkConfig(self, config):
if not self.command.command():
raise InvalidConfiguration('finitd.commands.%s.command must be set.'
% self.name)
def help(self):
return self.command.help() or ''
def run(self, args, environ):
self.chdir()
self.chroot()
os.system(self.command.command())
#self.execute(environ, self.command.command())
commands = [
'start',
'stop',
'kill',
'restart',
'status',
'debug',
'annotate',
]
|
|
import random
import hashlib
from typing import Sequence, List
from django.conf import settings
NAMES = [
"3_d_man",
"alars",
"aardwolf",
"abdul_alhazred",
"abe_brown",
"abigail_brand",
"abner_jenkins",
"abner_little",
"abominable_snowman",
"abomination",
"abominatrix",
"abraham_cornelius",
"abraxas",
"absalom",
"absorbing_man",
"abyss",
"access",
"achebe",
"achelous",
"achilles",
"acrobat",
"adam_ii",
"adam_warlock",
"adam_x",
"adaptoid",
"administrator",
"adonis",
"adrenazon",
"adri_nital",
"adrian_corbo",
"adrian_toomes",
"adrienne_frost",
"adversary",
"advisor",
"aegis",
"aelfyre_whitemane",
"aero",
"aftershock",
"agamemnon",
"agamotto",
"agatha_harkness",
"aged_genghis",
"agent",
"agent_axis",
"agent_cheesecake",
"agent_x",
"agent_zero",
"aggamon",
"aginar",
"agon",
"agony",
"agron",
"aguja",
"ahab",
"ahmet_abdol",
"ahura",
"air_walker",
"airborne",
"aireo",
"airstrike",
"ajak",
"ajax",
"ajaxis",
"akasha",
"akhenaten",
"al_mackenzie",
"alaris",
"albert",
"albino",
"albion",
"alchemy",
"alcmena",
"aldebron",
"aleksander_lukin",
"aleksei_sytsevich",
"aleta_ogord",
"alex",
"alex_hayden",
"alex_power",
"alex_wilder",
"alexander_bont",
"alexander_goodwin_pierce",
"alexander_lexington",
"alexander_summers",
"alfie_omeggan",
"algrim_the_strong",
"alibar",
"alicia_masters",
"alistair_smythe",
"alistaire_stuart",
"aliyah_bishop",
"alkhema",
"all_american",
"allato",
"allison_blaire",
"alpha_ray",
"alpha_the_ultimate_mutant",
"alyosha_kravinoff",
"alysande_stuart",
"alyssa_moy",
"amahl_farouk",
"amalgam",
"amanda_sefton",
"amatsu_mikaboshi",
"amazon",
"amber_hunt",
"amelia_voght",
"amergin",
"american_ace",
"american_dream",
"american_eagle",
"american_samurai",
"americop",
"ameridroid",
"amiko_kobayashi",
"amina_synge",
"aminedi",
"ammo",
"amphibian",
"amphibion",
"amphibius",
"amun",
"anaconda",
"anais",
"analyzer",
"anarchist",
"ancient_one",
"andreas_von_strucker",
"andrew_chord",
"andrew_gervais",
"android_man",
"andromeda",
"anelle",
"angar_the_screamer",
"angel",
"angel_dust",
"angel_face",
"angel_salvadore",
"angela_cairn",
"angela_del_toro",
"angelica_jones",
"angelo_unuscione",
"angler",
"ani_mator",
"animus",
"ankhi",
"annalee",
"anne_marie_cortez",
"annex",
"annie_ghazikhanian",
"annihilus",
"anole",
"anomalito",
"anomaloco",
"anomaly",
"answer",
"ant_man",
"anthropomorpho",
"anti_cap",
"anti_phoenix_force",
"anti_venom",
"anti_vision",
"antimatter",
"antiphon_the_overseer",
"antonio",
"anubis",
"anvil",
"anything",
"apache_kid",
"apalla",
"ape",
"ape_man",
"ape_x",
"apocalypse",
"apollo",
"apryll",
"aquarian",
"aquarius",
"aqueduct",
"arabian_knight",
"arachne",
"aragorn",
"araki",
"aralune",
"arana",
"arc",
"arcade",
"arcademan",
"arcanna",
"archangel",
"archenemy",
"archer",
"archie_corrigan",
"archimage",
"architect",
"arclight",
"arcturus_rann",
"ardina",
"ardroman",
"arena",
"ares",
"argo",
"argus",
"ariann",
"arides",
"ariel",
"aries",
"arishem_the_judge",
"arize",
"arizona_annie",
"arkady_rossovich",
"arkon",
"arkus",
"arlette_truffaut",
"arlok",
"armadillo",
"armageddon",
"armand_martel",
"armor",
"armory",
"arnim_zola",
"arno_stark",
"arranger",
"arsenal",
"arsenic",
"artemis",
"arthur_parks",
"artie",
"artie_maddicks",
"arturo_falcones",
"asbestos_lady",
"asbestos_man",
"ashcan",
"asmodeus",
"asp",
"assassin",
"asteroth",
"astra",
"astrid_bloom",
"astron",
"astronomer",
"asylum",
"atalanta",
"atalon",
"athena",
"atlas",
"atleza",
"atom_bob",
"atom_smasher",
"att_lass",
"attuma",
"atum",
"aunt_may_parker",
"auntie_freeze",
"auric",
"aurora",
"authority",
"autolycus",
"avalanche",
"avarrish",
"awesome_android",
"axum",
"azazel",
"baal",
"balder",
"balor",
"balthakk",
"bandit",
"banshee",
"bantam",
"baphomet",
"barbarus",
"barnacle",
"baron_blood",
"baron_brimstone",
"baron_macabre",
"baron_mordo",
"baron_samedi",
"baron_strucker",
"baron_von_blitzschlag",
"baron_zemo",
"baroness_blood",
"barracuda",
"bart_hamilton",
"base",
"basil_sandhurst",
"basilisk",
"bast",
"bastion",
"batragon",
"batroc_the_leaper",
"battering_ram",
"battleaxe",
"battlestar",
"battletide",
"batwing",
"beast",
"beautiful_dreamer",
"bedlam",
"bedlam_ii",
"beetle",
"beetle_ii",
"behemoth",
"bela",
"belasco",
"belathauzer",
"bella_donna",
"ben_parker",
"ben_reilly",
"ben_urich",
"benazir_kaur",
"benedict_kine",
"bengal",
"benjamin_jacob_grimm",
"bennet_du_paris",
"benny_beckley",
"bentley_wittman",
"bereet",
"berzerker",
"bes",
"beta_ray_bill",
"bethany_cabe",
"betty_brant",
"betty_brant_leeds",
"betty_ross_banner",
"bevatron",
"beyonder",
"bi_beast",
"bible_john",
"big_bertha",
"big_man",
"big_wheel",
"bill_foster",
"binary",
"bird_brain",
"bird_man",
"bishop",
"bison",
"bizarnage",
"black_bolt",
"black_box",
"black_cat",
"black_crow",
"black_death",
"black_dragon",
"black_fox",
"black_goliath",
"black_jack_tarr",
"black_king",
"black_knight",
"black_lama",
"black_mamba",
"black_marvel",
"black_panther",
"black_queen",
"black_talon",
"black_tarantula",
"black_tom_cassidy",
"black_widow",
"blackbird",
"blackheart",
"blackheath",
"blacklash",
"blackout",
"blackwing",
"blackwulf",
"blade",
"blaquesmith",
"blastaar",
"blaze",
"blazing_skull",
"blind_faith",
"blind_justice",
"blindside",
"blindspot",
"bling",
"blink",
"blistik",
"blitziana",
"blitzkrieger",
"blizzard",
"blizzard_ii",
"blob",
"blockbuster",
"bloke",
"blonde_phantom",
"blood_brothers",
"blood_rose",
"blood_spider",
"bloodaxe",
"bloodhawk",
"bloodlust",
"bloodlust_ii",
"bloodscream",
"bloodshed",
"bloodsport",
"bloodstorm",
"bloodtide",
"bloodwraith",
"blowhard",
"blue_bullet",
"blue_diamond",
"blue_marvel",
"blue_shield",
"blue_streak",
"blur",
"bob",
"bob_diamond",
"bobster",
"bogeyman",
"bombshell",
"boneyard",
"bonita_juarez",
"boobytrap",
"book",
"boom_boom",
"boom_boy",
"boomer",
"boomerang",
"boomslang",
"boost",
"bora",
"bounty",
"bounty_hunter",
"bova",
"box",
"box_iv",
"brain_cell",
"brain_drain",
"brain_child",
"brainchild",
"bram_velsing",
"brass",
"bres",
"brian_braddock",
"brian_falsworth",
"brigade",
"briquette",
"brother_nature",
"brother_tode",
"brother_voodoo",
"brothers_grimm",
"bruiser",
"brunnhilda",
"brutacus",
"brute_i",
"brute_ii",
"brute_iii",
"brynocki",
"bucky",
"bucky_iii",
"bug",
"bulldozer",
"bullet",
"bullseye",
"burner",
"burstarr",
"bushman",
"bushmaster",
"bushwacker",
"butterball",
"buzz",
"buzzard",
"byrrah",
"caber",
"cable",
"cadaver",
"cagliostro",
"caiera",
"caiman",
"cain",
"cain_marko",
"caleb_alexander",
"caliban",
"callisto",
"calvin_rankin",
"calypso",
"cameron_hodge",
"canasta",
"cancer",
"candra",
"cannonball",
"cannonball_i",
"cap_n_hawk",
"caprice",
"capricorn",
"captain_america",
"captain_atlas",
"captain_barracuda",
"captain_britain",
"captain_fate",
"captain_germany",
"captain_marvel",
"captain_omen",
"captain_savage",
"captain_uk",
"captain_ultra",
"captain_universe",
"captain_wings",
"captain_zero",
"cardiac",
"cardinal",
"caregiver",
"caretaker",
"carl_crusher_creel",
"carlos_lobo",
"carmella_unuscione",
"carmilla_black",
"carnage",
"carnivore",
"carolyn_parmenter",
"carolyn_trainer",
"carrie_alexander",
"carrion",
"carter_ghazikhanian",
"cassandra_nova",
"cassie_lang",
"cassiopea",
"cat",
"cat_man",
"catiana",
"cayman",
"cecelia_reyes",
"cecilia_reyes",
"celestial_madonna",
"centennial",
"centurion",
"centurious",
"centurius",
"cerberus",
"cerebra",
"cerise",
"cessily_kincaid",
"cethlann",
"chod",
"chaka",
"challenger",
"chamber",
"chameleon",
"champion_of_the_universe",
"chan_luichow",
"chance",
"changeling",
"chaos",
"charcoal",
"charles_xavier",
"charlie_27",
"charon",
"chase_stein",
"cheetah",
"chemistro",
"chen_l",
"chi_demon",
"chief_examiner",
"chimera",
"chloe_tran",
"choice",
"chondu_the_mystic",
"christopher_summers",
"chrome",
"chronos",
"chthon",
"chtylok",
"citizen_v",
"claire_voyant",
"claudette_st_croix",
"clea",
"clearcut",
"cletus_kasady",
"clint_barton",
"clive",
"cloak",
"cloud",
"cloud_9",
"clown",
"coach",
"coachwhip",
"cobalt_man",
"cobra",
"cody_mushumanski_gun_man",
"cold_war",
"coldblood",
"coldfire",
"collective_man",
"collector",
"colleen_wing",
"colonel",
"colonel_america",
"colossus",
"comet",
"comet_man",
"commander_kraken",
"commando",
"conan_the_barbarian",
"condor",
"conquer_lord",
"conquest",
"conquistador",
"conrad_josten",
"constrictor",
"contemplator",
"contessa",
"contrary",
"controller",
"copperhead",
"copycat",
"coral",
"cordelia_frost",
"cornelius_van_lunt",
"corona",
"corruptor",
"corsair",
"cottonmouth",
"count_abyss",
"count_nefaria",
"courier",
"cowgirl",
"crazy_eight",
"crime_master",
"crime_buster",
"crimebuster",
"crimson",
"crimson_cavalier",
"crimson_commando",
"crimson_cowl",
"crimson_craig",
"crimson_daffodil",
"crimson_dynamo",
"crimson_dynamo_v",
"crimson_and_the_raven",
"crippler",
"crooked_man",
"crossbones",
"crossfire",
"crown",
"crucible",
"crusader",
"crusher",
"crystal",
"curtis_connors",
"cutthroat",
"cybele",
"cybelle",
"cyber",
"cyborg_x",
"cyclone",
"cyclops",
"cypher",
"dken",
"dspayre",
"d_man",
"dj",
"dagger",
"daisy_johnson",
"dakimh_the_enchanter",
"dakota_north",
"damballah",
"damion_hellstrom",
"damon_dran",
"dan_ketch",
"danger",
"daniel_rand",
"danielle_moonstar",
"dansen_macabre",
"danvers_carol",
"daredevil",
"dark_angel",
"dark_beast",
"dark_phoenix",
"dark_crawler",
"darkdevil",
"darkhawk",
"darkoth",
"darkstar",
"david_cannon",
"daytripper",
"dazzler",
"deacon_frost",
"dead_girl",
"deadhead",
"deadly_ernest",
"deadpool",
"death",
"death_adder",
"deaths_head",
"deaths_head_ii",
"death_stalker",
"deathbird",
"deathlok",
"deathstroke",
"deathurge",
"deathwatch",
"deborah_ritter",
"debra_whitman",
"decay",
"decay_ii",
"defensor",
"delilah",
"delphi",
"delphine_courtney",
"dementia",
"demiurge",
"demogoblin",
"demogorge_the_god_eater",
"demolition_man",
"derrick_slegers_speed",
"desmond_pitt",
"destiny",
"destroyer",
"destroyer_of_demons",
"devastator",
"devil_dinosaur",
"devil_hunter_gabriel",
"devil_slayer",
"devos_the_devastator",
"diablo",
"diamanda_nero",
"diamond_lil",
"diamondback",
"diamondhead",
"digitek",
"dionysus",
"dirtnap",
"discus",
"dittomaster",
"dmitri_bukharin",
"dmitri_smerdyakov",
"doc_samson",
"doctor_anthony_droom",
"doctor_arthur_nagan",
"doctor_bong",
"doctor_demonicus",
"doctor_doom",
"doctor_dorcas",
"doctor_droom",
"doctor_druid",
"doctor_faustus",
"doctor_glitternight",
"doctor_leery",
"doctor_minerva",
"doctor_octopus",
"doctor_spectrum",
"doctor_strange",
"doctor_sun",
"domina",
"dominic_fortune",
"dominic_petros",
"domino",
"dominus",
"domo",
"don_fortunato",
"donald_donny_gill",
"donald_pierce",
"donald_ritter",
"doop",
"doorman",
"doppelganger",
"doppleganger",
"dorma",
"dormamm",
"double_helix",
"doug_ramsey",
"doug_and_jerry",
"dougboy",
"doughboy",
"douglas_birely",
"douglas_ramsey",
"douglock",
"dr_john_grey",
"dr_lemuel_dorcas",
"dr_marla_jameson",
"dr_otto_octavius",
"dracula",
"dragon_lord",
"dragon_man",
"dragon_of_the_moon",
"dragoness",
"dragonfly",
"dragonwing",
"drax_the_destroyer",
"dreadknight",
"dreadnought",
"dream_weaver",
"dreaming_celestial",
"dreamqueen",
"dredmund_druid",
"dromedan",
"druid",
"druig",
"dum_dum_dugan",
"dusk",
"dust",
"dweller_in_darkness",
"dyna_mite",
"earth_lord",
"earthquake",
"ebenezer_laughton",
"ebon_seeker",
"echo",
"ecstasy",
"ectokid",
"eddie_brock",
"edward_ned_buckman",
"edwin_jarvis",
"eel",
"egghead",
"ego_the_living_planet",
"el_aguila",
"el_muerto",
"elaine_grey",
"elathan",
"electric_eve",
"electro",
"electrocute",
"electron",
"eleggua",
"elektra",
"elektra_natchios",
"elektro",
"elf_with_a_gun",
"elfqueen",
"elias_bogan",
"eliminator",
"elixir",
"elizabeth_betsy_braddock",
"elizabeth_twoyoungmen",
"ellie_phimster",
"elsie_dee",
"elven",
"elysius",
"emil_blonsky",
"emma_frost",
"empath",
"empathoid",
"emplate",
"en_sabah_nur",
"enchantress",
"energizer",
"enforcer",
"enigma",
"ent",
"entropic_man",
"eon",
"epoch",
"equilibrius",
"equinox",
"ereshkigal",
"erg",
"eric_slaughter",
"eric_williams",
"eric_the_red",
"erik_josten",
"erik_killmonger",
"erik_magnus_lehnsherr",
"ernst",
"eros",
"esh",
"eson_the_searcher",
"eternal_brain",
"eternity",
"ethan_edwards",
"eugene_judd",
"ev_teel_urizen",
"evangeline_whedon",
"ever",
"everett_thomas",
"everyman",
"evilhawk",
"executioner",
"exodus",
"exploding_man",
"exterminator",
"ezekiel",
"ezekiel_sims",
"ezekiel_stane",
"fabian_cortez",
"fafnir",
"fagin",
"falcon",
"fallen_one",
"famine",
"fan_boy",
"fandral",
"fang",
"fantasia",
"fantastic_four",
"fantomex",
"farallah",
"fasaud",
"fashima",
"fatale",
"fateball",
"father_time",
"fault_zone",
"fearmaster",
"feedback",
"felicia_hardy",
"feline",
"fenris",
"fenris_wolf",
"fer_de_lance",
"feral",
"feron",
"fever_pitch",
"fight_man",
"fin",
"fin_fang_foom",
"firearm",
"firebird",
"firebolt",
"firebrand",
"firefrost",
"firelord",
"firepower",
"firestar",
"fixer",
"fixx",
"flag_smasher",
"flambe",
"flash_thompson",
"flatman",
"flex",
"flint_marko",
"flubber",
"fly",
"flygirl",
"flying_tiger",
"foggy_nelson",
"fontanelle",
"foolkiller",
"forbush_man",
"force",
"forearm",
"foreigner",
"forge",
"forgotten_one",
"foxfire",
"frank_castle",
"frank_drake",
"frank_payne",
"frank_simpson",
"frankensteins_monster",
"frankie_raye",
"frankie_and_victoria",
"franklin_hall",
"franklin_richards",
"franklin_storm",
"freak",
"freak_of_science",
"freakmaster",
"freakshow",
"fred_myers",
"frederick_slade",
"free_spirit",
"freedom_ring",
"frenzy",
"frey",
"frigga",
"frog_man",
"fury",
"fusion",
"futurist",
"g_force",
"gabe_jones",
"gabriel_summers",
"gabriel_the_air_walker",
"gaea",
"gaia",
"gailyn_bailey",
"galactus",
"galaxy_master",
"gambit",
"gammenon_the_gatherer",
"gamora",
"ganymede",
"gardener",
"gargantua",
"gargantus",
"gargouille",
"gargoyle",
"garokk_the_petrified_man",
"garrison_kane",
"gatecrasher",
"gateway",
"gauntlet",
"gavel",
"gaza",
"gazelle",
"gazer",
"geb",
"gee",
"geiger",
"geirrodur",
"gemini",
"general_orwell_taylor",
"genis_vell",
"george_stacy",
"george_tarleton",
"george_washington_bridge",
"georgianna_castleberry",
"gertrude_yorkes",
"ghaur",
"ghost",
"ghost_dancer",
"ghost_girl",
"ghost_maker",
"ghost_rider",
"ghost_rider_2099",
"ghoul",
"giant_man",
"gibbon",
"gibborim",
"gideon",
"gideon_mace",
"giganto",
"gigantus",
"gin_genie",
"gladiator",
"gladiatrix",
"glamor",
"glenn_talbot",
"glitch",
"glob",
"glob_herman",
"gloom",
"glorian",
"goblin_queen",
"goblyn",
"godfrey_calthrop",
"gog",
"goldbug",
"golden_archer",
"golden_girl",
"golden_oldie",
"goldeneye",
"golem",
"goliath",
"gomi",
"googam",
"gorgeous_george",
"gorgilla",
"gorgon",
"gorilla_girl",
"gorilla_man",
"gorr",
"gosamyr",
"grand_director",
"grandmaster",
"grappler",
"grasshopper",
"grasshopper_ii",
"graviton",
"gravity",
"graydon_creed",
"great_gambonnos",
"great_video",
"green_goblin",
"green_goblin_iv",
"greer_grant",
"greer_grant_nelson",
"gregor_shapanka",
"gregory_gideon",
"gremlin",
"grenade",
"grey_gargoyle",
"grey_king",
"griffin",
"grim_hunter",
"grim_reaper",
"grizzly",
"grog_the_god_crusher",
"gronk",
"grotesk",
"groundhog",
"growing_man",
"guardsman",
"guido_carosella",
"gunthar_of_rigel",
"gwen_stacy",
"gypsy_moth",
"herbie",
"hack",
"hag",
"hairbag",
"halflife",
"halloween_jack",
"hamilton_slade",
"hammer_harrison",
"hammer_and_anvil",
"hammerhead",
"hangman",
"hank_mccoy",
"hank_pym",
"hanna_levy",
"hannah_levy",
"hannibal_king",
"harald_jaekelsson",
"hardcase",
"hardcore",
"hardnose",
"hardshell",
"hardwire",
"hargen_the_measurer",
"harmonica",
"harness",
"harold_happy_hogan",
"harold_h_harold",
"harpoon",
"harpy",
"harrier",
"harry_leland",
"harry_osborn",
"hate_monger",
"haven",
"havok",
"hawkeye",
"hawkeye_ii",
"hawkshaw",
"haywire",
"hazard",
"hazmat",
"headknocker",
"headlok",
"heart_attack",
"heather_cameron",
"hebe",
"hecate",
"hector",
"heimdall",
"heinrich_zemo",
"hela",
"helio",
"hellcat",
"helleyes",
"hellfire",
"hellion",
"hellrazor",
"helmut_zemo",
"henry_hank_mccoy",
"henry_peter_gyrich",
"hensley_fargus",
"hephaestus",
"hepzibah",
"her",
"hera",
"herbert_edgar_wyndham",
"hercules",
"herman_schultz",
"hermes",
"hermod",
"hero",
"hero_for_hire",
"herr_kleiser",
"hideko_takata",
"high_evolutionary",
"high_tech",
"hijacker",
"hildegarde",
"him",
"hindsight_lad",
"hippolyta",
"hisako_ichiki",
"hit_maker",
"hitman",
"hobgoblin",
"hobgoblin_ii",
"hoder",
"hogun",
"holly",
"honcho",
"honey_lemon",
"hood",
"hornet",
"horus",
"howard_the_duck",
"hrimhari",
"hub",
"hugh_jones",
"hulk",
"hulk_2099",
"hulkling",
"human_cannonball",
"human_fly",
"human_robot",
"human_top",
"human_top_ii",
"human_torch",
"human_torch_ii",
"humbug",
"humus_sapien",
"huntara",
"hurricane",
"husk",
"hussar",
"hybrid",
"hybrid_ii",
"hyde",
"hydro",
"hydro_man",
"hydron",
"hyperion",
"hyperkind",
"hyperstorm",
"hypnotia",
"hyppokri",
"isaac",
"icarus",
"iceman",
"icemaster",
"idunn",
"iguana",
"ikaris",
"ikonn",
"ikthalon",
"illusion",
"illyana_rasputin",
"immortus",
"impala",
"imperial_hydra",
"impossible_man",
"impulse",
"in_betweener",
"indech",
"indra",
"inertia",
"infamnia",
"infant_terrible",
"infectia",
"inferno",
"infinity",
"interloper",
"invisible_girl",
"invisible_woman",
"inza",
"ion",
"iridia",
"iron_cross",
"iron_fist",
"iron_lad",
"iron_maiden",
"iron_man",
"iron_man_2020",
"iron_monger",
"ironclad",
"isaac_christians",
"isaiah_bradley",
"isbisa",
"isis",
"ivan_kragoff",
"j_jonah_jameson",
"j2",
"jack_flag",
"jack_frost",
"jack_kirby",
"jack_olantern",
"jack_power",
"jack_of_hearts",
"jack_in_the_box",
"jackal",
"jackdaw",
"jackhammer",
"jackpot",
"jackson_arvad",
"jacob_jake_fury",
"jacqueline_falsworth",
"jacques_duquesne",
"jade_dragon",
"jaeger",
"jaguar",
"jamal_afari",
"james_jimmy_marks",
"james_dr_power",
"james_howlett",
"james_jaspers",
"james_madrox",
"james_proudstar",
"james_rhodes",
"james_sanders",
"jamie_braddock",
"jane_foster",
"jane_kincaid",
"janet_van_dyne",
"jann",
"janus",
"jared_corbo",
"jarella",
"jaren",
"jason",
"jawynn_dueck_the_iron_christian_of_faith",
"jazz",
"jean_dewolff",
"jean_grey",
"jean_grey_summers",
"jean_paul_beaubier",
"jeanne_marie_beaubier",
"jebediah_guthrie",
"jeffrey_mace",
"jekyll",
"jennifer_kale",
"jennifer_walters",
"jens_meilleur_slap_shot",
"jericho_drumm",
"jerome_beechman",
"jerry_jaxon",
"jessica_drew",
"jessica_jones",
"jester",
"jigsaw",
"jim_hammond",
"jimaine_szardos",
"jimmy_woo",
"jocasta",
"joe_cartelli",
"joe_fixit",
"joey_bailey",
"johann_schmidt",
"john_doe",
"john_falsworth",
"john_jameson",
"john_proudstar",
"john_ryker",
"john_sublime",
"john_walker",
"johnny_blaze",
"johnny_ohm",
"johnny_storm",
"jolt",
"jon_spectre",
"jonas_harrow",
"jonathan_john_garrett",
"jonathan_richards",
"jonothon_starsmore",
"jordan_seberius",
"joseph",
"joshua_guthrie",
"joystick",
"jubilee",
"judas_traveller",
"jude_the_entropic_man",
"juggernaut",
"julie_power",
"jumbo_carnation",
"junkpile",
"junta",
"justice",
"justin_hammer",
"justine_hammer",
"ka_zar",
"kaine",
"kala",
"kalu",
"kamal",
"kamo_tharnn",
"kamu",
"kang_the_conqueror",
"kangaroo",
"karen_page",
"karima_shapandar",
"karkas",
"karl_lykos",
"karl_malus",
"karl_mordo",
"karla_sofen",
"karma",
"karnak",
"karnilla",
"karolina_dean",
"karthon_the_quester",
"kasper_cole",
"kate_bishop",
"kate_neville",
"katherine_kitty_pryde",
"katherine_reynolds",
"katie_power",
"katrina_luisa_van_horne",
"kat",
"keen_marlow",
"kehl_of_tauran",
"keith_kilham",
"kem_horkus",
"kenneth_crichton",
"key",
"khaos",
"khonsh",
"khoryphos",
"kiber_the_cruel",
"kick_ass",
"kid_colt",
"kid_nova",
"kiden_nixon",
"kierrok",
"killer_shrike",
"killpower",
"killraven",
"kilmer",
"kimura",
"king_bedlam",
"kingo_sunen",
"kingpin",
"kirigi",
"kirtsyn_perrin_short_stop",
"kismet",
"kismet_deadly",
"kiss",
"kiwi_black",
"kkallakk",
"klrt",
"klaat",
"klaw",
"kleinstocks",
"knickknack",
"kofi_whitemane",
"kogar",
"kohl_harder_boulder_man",
"korath_the_pursuer",
"korg",
"kormok",
"korrek",
"korvac",
"korvus",
"kosmos",
"kraken",
"krakkan",
"krang",
"kraven_the_hunter",
"krista_marwan",
"kristoff_vernard",
"kristoff_von_doom",
"kro",
"krystalin",
"kubik",
"kukulcan",
"kurse",
"kurt_wagner",
"kwannon",
"kyle_gibney",
"kylun",
"kymaera",
"la_lunatica",
"la_nuit",
"lacuna",
"lady_deathstrike",
"lady_jacqueline_falsworth_crichton",
"lady_killer",
"lady_lark",
"lady_lotus",
"lady_mandarin",
"lady_mastermind",
"lady_octopus",
"lament",
"lancer",
"landslide",
"larry_bodine",
"lasher",
"laura_dean",
"layla_miller",
"lazarus",
"leader",
"leap_frog",
"leash",
"lee_forrester",
"leech",
"left_hand",
"left_winger",
"legacy",
"legion",
"leila_davis",
"leir",
"lemuel_dorcas",
"leo",
"leonard_samson",
"leonus",
"letha",
"levan",
"lianda",
"libra",
"lifeforce",
"lifeguard",
"lifter",
"lightbright",
"lighting_rod",
"lightmaster",
"lightspeed",
"lila_cheney",
"lilandra_neramani",
"lilith_the_daughter_of_dracula",
"lin_sun",
"link",
"lionheart",
"live_wire",
"living_brain",
"living_colossus",
"living_diamond",
"living_eraser",
"living_hulk",
"living_laser",
"living_lightning",
"living_monolith",
"living_mummy",
"living_pharaoh",
"living_planet",
"living_totem",
"living_tribunal",
"liz_allan",
"lizard",
"llan_the_sorcerer",
"lloigoroth",
"llyra",
"llyron",
"loa",
"lockdown",
"lockheed",
"lockjaw",
"locksmith",
"locus",
"locust",
"lodestone",
"logan",
"loki",
"longneck",
"longshot",
"lonnie_thompson_lincoln",
"looter",
"lord_chaos",
"lord_dark_wind",
"lord_pumpkin",
"lorelei",
"lorelei_ii",
"lorelei_travis",
"lorna_dane",
"lorvex",
"loss",
"louise_mason",
"lucas_brand",
"luchino_nefaria",
"lucifer",
"ludi",
"luke_cage",
"luna",
"lunatica",
"lunatik",
"lupa",
"lupo",
"lurking_unknown",
"lyja",
"lynx",
"m",
"m_twins",
"mn_e_ultraverse",
"modam",
"modok",
"mac_gargan",
"mach_iv",
"machine_man",
"machine_teen",
"machinesmith",
"mad_dog_rassitano",
"mad_jack",
"mad_jim_jaspers",
"mad_thinker",
"mad_thinkers_awesome_android",
"mad_dog",
"madam_slay",
"madame_hydra",
"madame_macevil",
"madame_masque",
"madame_menace",
"madame_web",
"madcap",
"madeline_joyce",
"madelyne_pryor",
"madison_jeffries",
"maelstrom",
"maestro",
"magdalena",
"magdalene",
"maggott",
"magician",
"magik",
"magilla",
"magma",
"magneto",
"magnum",
"magnus",
"magus",
"maha_yogi",
"mahkizmo",
"major_mapleleaf",
"makkari",
"malekith_the_accursed",
"malice",
"mammomax",
"man_mountain_marko",
"man_ape",
"man_beast",
"man_brute",
"man_bull",
"man_eater",
"man_elephant",
"man_killer",
"man_spider",
"man_thing",
"man_wolf",
"manbot",
"mandarin",
"mandrill",
"mandroid",
"mangle",
"mangog",
"manikin",
"manslaughter",
"manta",
"mantis",
"mantra",
"mar_vell",
"marc_spector",
"marduk_kurios",
"margali_szardos",
"margaret_power",
"margo_damian",
"maria_hill",
"mariko_yashida",
"marius_st_croix",
"mark_gervaisnight_shade",
"mark_raxton",
"mark_scarlotti",
"mark_todd",
"marlene_alraune",
"marrina",
"marrina_smallwood",
"marrow",
"marsha_rosenberg",
"martha_johansson",
"martin_gold",
"martin_preston",
"martinex",
"marvel_boy",
"marvel_girl",
"marvel_man",
"marvin_flumm",
"mary_skeeter_macpherran",
"mary_jane_parker",
"mary_jane_watson",
"mary_walker",
"mary_zero",
"masked_marauder",
"masked_marvel",
"masked_rose",
"masque",
"mass_master",
"master_khan",
"master_man",
"master_menace",
"master_mold",
"master_order",
"master_pandemonium",
"master_of_vengeance",
"mastermind",
"mastermind_of_the_uk",
"matador",
"match",
"matsuo_tsurayaba",
"matt_murdock",
"mauler",
"maur_konn",
"mauvais",
"maverick",
"max",
"maxam",
"maximus",
"maxwell_dillon",
"may_mayday_parker",
"may_parker",
"mayhem",
"maynard_tiboldt",
"meanstreak",
"meathook",
"mechamage",
"medusa",
"meggan",
"meggan_braddock",
"mekano",
"meld",
"melee",
"melissa_gold",
"melody_guthrie",
"meltdown",
"melter",
"mentallo",
"mentor",
"mentus",
"mephisto",
"mercurio",
"mercury",
"mercy",
"merlin",
"mesmero",
"metal_master",
"metalhead",
"meteor_man",
"meteorite",
"meteorite_ii",
"michael_nowman",
"michael_twoyoungmen",
"micro",
"microchip",
"micromax",
"midas",
"midgard_serpent",
"midnight",
"midnight_man",
"midnight_sun",
"miek",
"miguel_espinosa",
"miguel_ohara",
"miguel_santos",
"mikado",
"mikey",
"mikhail_rasputin",
"mikula_golubev",
"milan",
"miles_warren",
"milos_masaryk",
"mimic",
"mimir",
"mindmeld",
"mindworm",
"miracle_man",
"mirage",
"mirage_ii",
"misfit",
"miss_america",
"missing_link",
"mist_mistress",
"mister_buda",
"mister_doll",
"mister_fear",
"mister_hyde",
"mister_jip",
"mister_machine",
"mister_one",
"mister_sensitive",
"mister_sinister",
"mister_two",
"mister_x",
"misty_knight",
"mockingbird",
"modred_the_mystic",
"mogul_of_the_mystic_mountain",
"moira_brandon",
"moira_mactaggert",
"mojo",
"mole_man",
"molecule_man",
"molly_hayes",
"molten_man",
"mondo",
"monet_st_croix",
"mongoose",
"monica_rappaccini",
"monsoon",
"monstra",
"monstro_the_mighty",
"moon_knight",
"moon_boy",
"moondark",
"moondragon",
"moonhunter",
"moonstone",
"mop_man",
"morbius",
"mordred",
"morg",
"morgan_le_fay",
"morlun",
"morning_star",
"morph",
"morpheus",
"morris_bench",
"mortimer_toynbee",
"moses_magnum",
"mosha",
"mother_earth",
"mother_nature",
"mother_night",
"mother_superior",
"motormouth",
"mountjoy",
"mr_fish",
"mr_justice",
"mr_m",
"mr_w",
"ms_modok",
"ms_marvel",
"ms_steed",
"multiple_man",
"murmur",
"murmur_ii",
"mutant_master",
"mutant_x",
"myron_maclain",
"mys_tech",
"mysterio",
"mystique",
"ngabthoth",
"ngarai",
"nastirh",
"nfl_superpro",
"naga",
"nameless_one",
"namor_mckenzie",
"namor_the_sub_mariner",
"namora",
"namorita",
"nanny",
"nate_grey",
"nathaniel_essex",
"nathaniel_richards",
"native",
"nebula",
"nebulo",
"nebulon",
"nebulos",
"necrodamus",
"necromantra",
"ned_horrocks",
"ned_leeds",
"needle",
"nefarius",
"negasonic_teenage_warhead",
"nekra",
"nekra_sinclar",
"nemesis",
"neophyte",
"neptune",
"network",
"neuronne",
"neurotap",
"new_goblin",
"nezarr_the_calculator",
"nicholas_maunder",
"nicholas_scratch",
"nick_fury",
"nico_minor",
"nicole_st_croix",
"night_nurse",
"night_rider",
"night_thrasher",
"nightcrawler",
"nighthawk",
"nightmare",
"nightshade",
"nightside",
"nightwatch",
"nightwind",
"nikki",
"niles_van_roekel",
"nimrod",
"ningal",
"nitro",
"nobilus",
"nocturne",
"noh_varr",
"nomad",
"norman_osborn",
"norns",
"norrin_radd",
"northstar",
"nosferata",
"nova",
"nova_prime",
"novs",
"nox",
"nth_man",
"nth_man_the_ultimate_ninja",
"nuke_frank_simpson",
"nuke_squadron_supreme_member",
"nuklo",
"numinus",
"nut",
"obadiah_stane",
"obituary",
"obliterator",
"oblivion",
"occulus",
"ocean",
"ocelot",
"oddball",
"odin",
"ogre",
"ogress",
"omega",
"omega_red",
"omega_the_unknown",
"omen",
"omerta",
"one_above_all",
"oneg_the_prober",
"onslaught",
"onyxx",
"ooze",
"optoman",
"oracle",
"orator",
"orb",
"orbit",
"orchid",
"ord",
"order",
"orikal",
"orka",
"ororo_munroe",
"orphan",
"orphan_maker",
"osiris",
"outlaw",
"outrage",
"overkill",
"overmind",
"overrider",
"owl",
"ox",
"ozone",
"ozymandias",
"paibo",
"paige_guthrie",
"paladin",
"paradigm",
"paragon",
"paralyzer",
"paris",
"pasco",
"paste_pot_pete",
"patch",
"pathway",
"patriot",
"patriot_ii",
"patsy_hellstrom",
"patsy_walker",
"paul_bailey",
"paul_norbert_ebersol",
"paul_patterson",
"payback",
"peace_monger",
"peepers",
"peggy_carter",
"penance",
"penance_ii",
"peregrine",
"perfection",
"perseus",
"persuader",
"persuasion",
"perun",
"pete_wisdom",
"peter_criss",
"peter_noble",
"peter_parker",
"peter_petruski",
"phade",
"phage",
"phalanx",
"phantazia",
"phantom_blonde",
"phantom_eagle",
"phantom_rider",
"phastos",
"phat",
"phil_urich",
"philip_fetter",
"phineas_t_horton",
"phoenix",
"photon",
"phyla_vell",
"pietro_maximoff",
"piledriver",
"piotr_rasputin",
"pip_the_troll",
"pipeline",
"piper",
"piranha",
"pisces",
"pistol",
"pixie",
"pixx",
"plague",
"plantman",
"plasma",
"plazm",
"plug",
"plunderer",
"pluto",
"poison",
"polaris",
"poltergeist",
"porcupine",
"portal",
"possessor",
"postman",
"postmortem",
"poundcakes",
"powderkeg",
"power_broker",
"power_man",
"power_princess",
"power_skrull",
"powerhouse",
"powerpax",
"presence",
"pressure",
"prester_john",
"pretty_persuasions",
"preview",
"primal",
"prime",
"prime_mover",
"primevil",
"primus",
"princess_python",
"proctor",
"prodigy",
"professor_power",
"professor_x",
"projector",
"prometheus",
"protector",
"proteus",
"prototype",
"prowler",
"psi_lord",
"psyche",
"psycho_man",
"psyklop",
"psylocke",
"puck",
"puff_adder",
"puishannt",
"pulse",
"puma",
"punchout",
"punisher",
"punisher_2099",
"puppet_master",
"purge",
"purple_girl",
"purple_man",
"pyre",
"pyro",
"quagmire",
"quantum",
"quasar",
"quasar_ii",
"quasimodo",
"quentin_beck",
"quentin_quire",
"quicksand",
"quicksilver",
"quincy_harker",
"raa_of_the_caves",
"rachel_grey",
"rachel_summers",
"rachel_van_helsing",
"radian",
"radioactive_man",
"radion_the_atomic_man",
"radius",
"rafferty",
"rage",
"raggadorr",
"rahne_sinclair",
"rainbow",
"rama_tut",
"raman",
"ramrod",
"ramshot",
"rancor",
"randall_shire",
"random",
"ranger",
"ransak_the_reject",
"rattler",
"ravage_2099",
"raving_beauty",
"rawhide_kid",
"rax",
"raymond_sikorsky",
"raza",
"razor_fist",
"razorback",
"reaper",
"rebel",
"recorder",
"red_claw",
"red_ghost",
"red_guardian",
"red_lotus",
"red_nine",
"red_raven",
"red_ronin",
"red_shift",
"red_skull",
"red_skull_ii",
"red_wolf",
"redeemer",
"redneck",
"redwing",
"reeva_payge",
"reignfire",
"reject",
"remnant",
"remy_lebea",
"reptyl",
"revanche",
"rex_mundi",
"rhiannon",
"rhino",
"ricadonna",
"richard_fisk",
"richard_parker",
"richard_rider",
"rick_jones",
"ricochet",
"rictor",
"rigellian_recorder",
"right_winger",
"ringer",
"ringleader",
"ringmaster",
"ringo_kid",
"rintrah",
"riot",
"riot_grrl",
"ripfire",
"ritchie_gilmore",
"rlnnd",
"robbie_robertson",
"robert_bobby_drake",
"robert_bruce_banner",
"robert_hunter",
"robert_kelly",
"robert_da_costa",
"rock",
"rock_python",
"rocket_raccoon",
"rocket_racer",
"rodstvow",
"rogue",
"rom_the_spaceknight",
"roma",
"romany_wisdom",
"ronan_the_accuser",
"rose",
"roughhouse",
"roulette",
"royal_roy",
"ruby_thursday",
"ruckus",
"rumiko_fujikawa",
"rune",
"runner",
"rush",
"rusty_collins",
"ruth_bat_seraph",
"ryder",
"sbyll",
"sym",
"sabra",
"sabreclaw",
"sabretooth",
"sack",
"sage",
"sagittarius",
"saint_anna",
"saint_elmo",
"sally_blevins",
"sally_floyd",
"salvo",
"sam_sawyer",
"sam_wilson",
"samuel_starr_saxon",
"samuel_guthrie",
"samuel_silke",
"samuel_smithers",
"sandman",
"sangre",
"sara_grey",
"sasquatch",
"satana",
"satannish",
"saturnyne",
"sauron",
"savage_steel",
"sayge",
"scaleface",
"scalphunter",
"scanner",
"scarecrow",
"scarecrow_ii",
"scarlet_beetle",
"scarlet_centurion",
"scarlet_scarab",
"scarlet_spider",
"scarlet_spiders",
"scarlet_witch",
"schemer",
"scimitar",
"scintilla",
"scorcher",
"scorpia",
"scorpio",
"scorpion",
"scott_summers",
"scott_washington",
"scourge_of_the_underworld",
"scrambler",
"scream",
"screaming_mimi",
"screech",
"scrier",
"sea_urchin",
"seamus_mellencamp",
"sean_cassidy",
"sean_garrison",
"sebastian_shaw",
"seeker",
"sekhmet",
"selene",
"senator_robert_kelly",
"senor_muerte",
"sentry",
"sepulchre",
"sergeant_fury",
"sergei_kravinoff",
"serpentina",
"sersi",
"set",
"seth",
"shadow_king",
"shadow_slasher",
"shadow_hunter",
"shadowcat",
"shadowmage",
"shadrac",
"shalla_bal",
"shaman",
"shamrock",
"shang_chi",
"shanga",
"shanna_the_she_devil",
"shaper_of_worlds",
"shard",
"sharon_carter",
"sharon_friedlander",
"sharon_ventura",
"shathra",
"shatter",
"shatterfist",
"shatterstar",
"she_hulk",
"she_thing",
"she_venom",
"shellshock",
"shen_kuei",
"shiar_gladiator",
"shinchuko_lotus",
"shingen_harada",
"shinobi_shaw",
"shirow_ishihara",
"shiva",
"shiver_man",
"shocker",
"shockwave",
"shola_inkosi",
"shooting_star",
"shotgun",
"shriek",
"shriker",
"shroud",
"shrunken_bones",
"shuma_gorath",
"sidewinder",
"siege",
"siena_blaze",
"sif",
"sigmar",
"sigyn",
"sikorsky",
"silhouette",
"silly_seal",
"silver",
"silver_dagger",
"silver_fox",
"silver_sable",
"silver_samurai",
"silver_scorpion",
"silver_squire",
"silver_surfer",
"silverclaw",
"silvermane",
"simon_williams",
"sin",
"sin_eater",
"sinister",
"sir_steel",
"siryn",
"sise_neg",
"skein",
"skids",
"skin",
"skinhead",
"skull_the_slayer",
"skullcrusher",
"skullfire",
"skunge_the_laxidazian_troll",
"skyhawk",
"skywalker",
"slab",
"slapstick",
"sleek",
"sleeper",
"sleepwalker",
"slick",
"sligguth",
"slipstream",
"slither",
"sludge",
"slug",
"sluggo",
"sluk",
"slyde",
"smart_alec",
"smartship_friday",
"smasher",
"smuggler",
"smuggler_ii",
"snowbird",
"snowfall",
"solara",
"solarman",
"solarr",
"soldier_x",
"solitaire",
"solo",
"solomon_osullivan",
"son_of_satan",
"songbird",
"soulfire",
"space_phantom",
"space_turnip",
"specialist",
"spectra",
"spectral",
"speed",
"speed_demon",
"speedball",
"speedo",
"spellbinder",
"spellcheck",
"spencer_smythe",
"sphinx",
"sphinxor",
"spider_doppelganger",
"spider_girl",
"spider_ham",
"spider_man",
"spider_slayer",
"spider_woman",
"spidercide",
"spike",
"spike_freeman",
"spinnerette",
"spiral",
"spirit_of_76",
"spitfire",
"spoilsport",
"spoor",
"spot",
"sprite",
"sputnik",
"spyder",
"spymaster",
"spyne",
"squidboy",
"squirrel_girl",
"st_john_allerdyce",
"stacy_x",
"stained_glass_scarlet",
"stakar",
"stallior",
"stanley_stewart",
"star_stalker",
"star_thief",
"star_dancer",
"star_lord",
"starbolt",
"stardust",
"starfox",
"starhawk",
"starlight",
"starr_the_slayer",
"starshine",
"starstreak",
"stature",
"steel_raven",
"steel_serpent",
"steel_spider",
"stegron",
"stellaris",
"stem_cell",
"stentor",
"stephen_colbert",
"stephen_strange",
"steve_rogers",
"steven_lang",
"stevie_hunter",
"stick",
"stiletto",
"stilt_man",
"stinger",
"stingray",
"stitch",
"stone",
"stonecutter",
"stonewall",
"storm",
"stranger",
"stratosfire",
"straw_man",
"strobe",
"strong_guy",
"strongarm",
"stryfe",
"stunner",
"stuntmaster",
"stygorr",
"stygyro",
"styx_and_stone",
"sub_mariner",
"sugar_man",
"suicide",
"sultan",
"sun_girl",
"sunder",
"sundragon",
"sunfire",
"sunpyre",
"sunset_bain",
"sunspot",
"sunstreak",
"sunstroke",
"sunturion",
"super_rabbit",
"super_sabre",
"super_adaptoid",
"super_nova",
"super_skrull",
"superpro",
"supercharger",
"superia",
"supernalia",
"suprema",
"supreme_intelligence",
"supremor",
"surge",
"surtur",
"susan_richards",
"susan_storm",
"sushi",
"svarog",
"swarm",
"sweetface",
"swordsman",
"sybil_dorn",
"sybil_dvorak",
"synch",
"t_ray",
"tabitha_smith",
"tag",
"tagak_the_leopard_lord",
"tailhook",
"taj_nital",
"talia_josephine_wagner",
"talisman",
"tamara_rahn",
"tana_nile",
"tantra",
"tanya_anderssen",
"tarantula",
"tarot",
"tartarus",
"taskmaster",
"tatterdemalion",
"tattletale",
"tattoo",
"taurus",
"techno",
"tefral_the_surveyor",
"tempest",
"tempo",
"tempus",
"temugin",
"tenpin",
"termagaira",
"terminator",
"terminatrix",
"terminus",
"terrax_the_tamer",
"terraxia",
"terror",
"tess_one",
"tessa",
"tether",
"tethlam",
"tex_dawson",
"texas_twister",
"thakos",
"thane_ector",
"thanos",
"the_amazing_tanwir_ahmed",
"the_angel",
"the_blank",
"the_destroyer",
"the_entity",
"the_grip",
"the_night_man",
"the_profile",
"the_russian",
"the_stepford_cuckoos",
"the_symbiote",
"the_wink",
"thena",
"theresa_cassidy",
"thermo",
"thin_man",
"thing",
"thinker",
"thirty_three",
"thog",
"thomas_halloway",
"thor",
"thor_girl",
"thornn",
"threnody",
"thumbelina",
"thunderball",
"thunderbird",
"thunderbolt",
"thunderclap",
"thunderfist",
"thunderstrike",
"thundra",
"tiboro",
"tiger_shark",
"tigra",
"timberius",
"time_bomb",
"timeshadow",
"timeslip",
"tinkerer",
"titan",
"titania",
"titanium_man",
"tito_bohusk",
"toad",
"toad_in_waiting",
"todd_arliss",
"tom_cassidy",
"tom_corsi",
"tom_foster",
"tom_thumb",
"tomazooma",
"tombstone",
"tommy",
"tommy_lightning",
"tomorrow_man",
"tony_stark",
"topaz",
"topspin",
"torgo_of_mekka",
"torgo_the_vampire",
"toro",
"torpedo",
"torrent",
"torso",
"tower",
"toxin",
"trader",
"trapper",
"trapster",
"tremolo",
"trevor_fitzroy",
"tri_man",
"triathlon",
"trick_shot",
"trioccula",
"trip_monroe",
"triton",
"troll",
"trump",
"tuc",
"tugun",
"tumbler",
"tundra",
"turac",
"turbo",
"turner_century",
"turner_d_century",
"tusk",
"tutinax_the_mountain_mover",
"two_gun_kid",
"tyger_tiger",
"typeface",
"typhoid",
"typhoid_mary",
"typhon",
"tyr",
"tyrak",
"tyrannosaur",
"tyrannus",
"tyrant",
"tzabaoth",
"u_go_girl",
"u_man",
"usagent",
"uat",
"ulik",
"ultimo",
"ultimus",
"ultra_marine",
"ultragirl",
"ultron",
"ulysses",
"umar",
"umbo",
"uncle_ben_parker",
"uni_mind",
"unicorn",
"union_jack",
"unseen",
"unthinnk",
"unus_the_untouchable",
"unuscione",
"ursa_major",
"urthona",
"utgard_loki",
"vagabond",
"vague",
"vakume",
"valentina_allegra_de_la_fontaine",
"valerie_cooper",
"valinor",
"valkin",
"valkyrie",
"valtorr",
"vamp",
"vampire_by_night",
"vance_astro",
"vance_astrovik",
"vanguard",
"vanisher",
"vapor",
"vargas",
"varnae",
"vashti",
"vavavoom",
"vector",
"vegas",
"veil",
"vengeance",
"venom",
"venomm",
"venus",
"venus_dee_milo",
"veritas",
"vermin",
"vertigo",
"vesta",
"vibraxas",
"vibro",
"victor_creed",
"victor_mancha",
"victor_strange",
"victor_von_doom",
"victorius",
"vidar",
"vincente",
"vindaloo",
"vindicator",
"viper",
"virako",
"virginia_pepper_potts",
"virgo",
"vishanti",
"visimajoris",
"vision",
"vivisector",
"vixen",
"volcana",
"volla",
"volpan",
"volstagg",
"vulcan",
"vulture",
"wade_wilson",
"wallflower",
"walter_newell",
"wanda_maximoff",
"war",
"war_eagle",
"war_machine",
"war_v",
"warbird",
"warhawk",
"warlock",
"warpath",
"warren_iii_worthington",
"warrior_woman",
"warstar",
"warstrike",
"warwolves",
"washout",
"wasp",
"watcher",
"water_wizard",
"watoomb",
"weapon_x",
"wendell_vaughn",
"wendigo",
"werewolf_by_night",
"western_kid",
"whiplash",
"whirlwind",
"whistler",
"white_fang",
"white_pilgrim",
"white_queen",
"white_rabbit",
"white_tiger",
"whiteout",
"whizzer",
"wiccan",
"wicked",
"widget",
"wilbur_day",
"wild_child",
"wild_thing",
"wildboys",
"wildpride",
"wildside",
"will_o_the_wisp",
"william_baker",
"william_stryker",
"willie_lumpkin",
"wilson_fisk",
"wind_dancer",
"wind_warrior",
"windeagle",
"windshear",
"winky_man",
"winter_soldier",
"witchfire",
"wiz_kid",
"wizard",
"wolf",
"wolfsbane",
"wolverine",
"wonder_man",
"wong",
"woodgod",
"worm",
"wraith",
"wrath",
"wreckage",
"wrecker",
"wundarr_the_aquarian",
"wyatt_wingfoot",
"wysper",
"x_23",
"x_cutioner",
"x_man",
"x_ray",
"x_treme",
"xand",
"xavin",
"xemnu_the_titan",
"xem",
"xian_chi_xan",
"xorn",
"xorr_the_god_jewel",
"ygaron",
"yandroth",
"yellow_claw",
"yellowjacket",
"yeti",
"yith",
"ymir",
"yond",
"yrial",
"yukio",
"yukon_jack",
"yuri_topolov",
"yuriko_oyama",
"zab",
"zach",
"zaladane",
"zarathos",
"zarek",
"zartra",
"zebediah_killgrave",
"zeitgeist",
"zero",
"zero_g",
"zeus",
"ziggy_pig",
"zip_zap",
"zodiak",
"zom",
"zombie",
"zuras",
"zzzax",
"gen_harada",
"the_living_colossus_it",
"the_living_darkness_null",
"the_renegade_watcher_aron",
"the_tomorrow_man_zarrko",
]
def get_float_from_string(seed):
"""Probably bad way to get a float from secret key"""
max_sha_float = float(
115792089237316195423570985008687907853269984665640564039457584007913129639935
)
h = hashlib.sha256(seed.encode("utf-8"))
return int(h.hexdigest(), 16) / max_sha_float
def shuffle_list(original, seed):
"""
Same shuffle for same seed
FIXME: Python 3.9 deprecated random.shuffle with seed
"""
float_seed = get_float_from_string(seed)
return random.shuffle(original, lambda: float_seed)
def new_shuffle_list(seq: Sequence[str], seed: str) -> List[str]:
"""Deterministically shuffle"""
fixed_random = random.Random()
fixed_random.seed(seed, version=2)
seq = list(seq)
fixed_random.shuffle(seq)
return seq
shuffle_list(NAMES, settings.SECRET_KEY)
NEW_NAMES = new_shuffle_list(NAMES, settings.SECRET_KEY)
def get_old_name_from_number(num):
# FIXME: remove after maybe a year
x = num % len(NAMES)
return NAMES[x]
def get_name_from_number(num):
x = num % len(NEW_NAMES)
return NEW_NAMES[x]
|
|
"""
Tests for L{eliot.testing}.
"""
from __future__ import unicode_literals
from unittest import SkipTest, TestResult, TestCase
from ..testing import (
issuperset, assertContainsFields, LoggedAction, LoggedMessage,
validateLogging, UnflushedTracebacks, assertHasMessage, assertHasAction,
validate_logging, capture_logging,
)
from .._output import MemoryLogger
from .._action import startAction
from .._message import Message
from .._validation import ActionType, MessageType, ValidationError, Field
from .._traceback import writeTraceback
from .. import add_destination, remove_destination, _output
class IsSuperSetTests(TestCase):
"""
Tests for L{issuperset}.
"""
def test_equal(self):
"""
Equal dictionaries are supersets of each other.
"""
a = {"a": 1}
b = a.copy()
self.assertTrue(issuperset(a, b))
def test_additionalIsSuperSet(self):
"""
If C{A} is C{B} plus some extra entries, C{A} is superset of C{B}.
"""
a = {"a": 1, "b": 2, "c": 3}
b = {"a": 1, "c": 3}
self.assertTrue(issuperset(a, b))
def test_missingIsNotSuperSet(self):
"""
If C{A} is C{B} minus some entries, C{A} is not a superset of C{B}.
"""
a = {"a": 1, "c": 3}
b = {"a": 1, "b": 2, "c": 3}
self.assertFalse(issuperset(a, b))
class LoggedActionTests(TestCase):
"""
Tests for L{LoggedAction}.
"""
def test_values(self):
"""
The values given to the L{LoggedAction} constructor are stored on it.
"""
d1 = {'x': 1}
d2 = {'y': 2}
root = LoggedAction(d1, d2, [])
self.assertEqual((root.startMessage, root.endMessage), (d1, d2))
def fromMessagesIndex(self, messages, index):
"""
Call L{LoggedAction.fromMessages} using action specified by index in
a list of message dictionaries.
@param messages: A C{list} of message dictionaries.
@param index: Index to the logger's messages.
@return: Result of L{LoggedAction.fromMessages}.
"""
uuid = messages[index]["task_uuid"]
level = messages[index]["task_level"]
return LoggedAction.fromMessages(uuid, level, messages)
def test_fromMessagesCreatesLoggedAction(self):
"""
L{LoggedAction.fromMessages} returns a L{LoggedAction}.
"""
logger = MemoryLogger()
with startAction(logger, "test"):
pass
logged = self.fromMessagesIndex(logger.messages, 0)
self.assertIsInstance(logged, LoggedAction)
def test_fromMessagesStartAndSuccessfulFinish(self):
"""
L{LoggedAction.fromMessages} finds the start and successful finish
messages of an action and stores them in the result.
"""
logger = MemoryLogger()
Message.new(x=1).write(logger)
with startAction(logger, "test"):
Message.new(x=1).write(logger)
# Now we should have x message, start action message, another x message
# and finally finish message.
logged = self.fromMessagesIndex(logger.messages, 1)
self.assertEqual((logged.startMessage, logged.endMessage),
(logger.messages[1], logger.messages[3]))
def test_fromMessagesStartAndErrorFinish(self):
"""
L{LoggedAction.fromMessages} finds the start and successful finish
messages of an action and stores them in the result.
"""
logger = MemoryLogger()
try:
with startAction(logger, "test"):
raise KeyError()
except KeyError:
pass
logged = self.fromMessagesIndex(logger.messages, 0)
self.assertEqual((logged.startMessage, logged.endMessage),
(logger.messages[0], logger.messages[1]))
def test_fromMessagesStartNotFound(self):
"""
L{LoggedAction.fromMessages} raises a L{ValueError} if a start message
is not found.
"""
logger = MemoryLogger()
with startAction(logger, "test"):
pass
self.assertRaises(ValueError,
self.fromMessagesIndex, logger.messages[1:], 0)
def test_fromMessagesFinishNotFound(self):
"""
L{LoggedAction.fromMessages} raises a L{ValueError} if a finish message
is not found.
"""
logger = MemoryLogger()
with startAction(logger, "test"):
pass
self.assertRaises(ValueError,
self.fromMessagesIndex, logger.messages[:1], 0)
def test_fromMessagesAddsChildMessages(self):
"""
L{LoggedAction.fromMessages} adds direct child messages to the
constructed L{LoggedAction}.
"""
logger = MemoryLogger()
# index 0:
Message.new(x=1).write(logger)
# index 1 - start action
with startAction(logger, "test"):
# index 2
Message.new(x=2).write(logger)
# index 3
Message.new(x=3).write(logger)
# index 4 - end action
# index 5
Message.new(x=4).write(logger)
logged = self.fromMessagesIndex(logger.messages, 1)
expectedChildren = [LoggedMessage(logger.messages[2]),
LoggedMessage(logger.messages[3])]
self.assertEqual(logged.children, expectedChildren)
def test_fromMessagesAddsChildActions(self):
"""
L{LoggedAction.fromMessages} recursively adds direct child actions to
the constructed L{LoggedAction}.
"""
logger = MemoryLogger()
# index 0
with startAction(logger, "test"):
# index 1:
with startAction(logger, "test"):
# index 2
Message.new(x=2).write(logger)
# index 3 - end action
# index 4 - end action
logged = self.fromMessagesIndex(logger.messages, 0)
self.assertEqual(logged.children[0],
self.fromMessagesIndex(logger.messages, 1))
def test_ofType(self):
"""
L{LoggedAction.ofType} returns a list of L{LoggedAction} created by the
specified L{ActionType}.
"""
ACTION = ActionType("myaction", [], [], "An action!")
logger = MemoryLogger()
# index 0
with startAction(logger, "test"):
# index 1:
with ACTION(logger):
# index 2
Message.new(x=2).write(logger)
# index 3 - end action
# index 4 - end action
# index 5
with ACTION(logger):
pass
# index 6 - end action
logged = LoggedAction.ofType(logger.messages, ACTION)
self.assertEqual(logged, [self.fromMessagesIndex(logger.messages, 1),
self.fromMessagesIndex(logger.messages, 5)])
def test_ofTypeNotFound(self):
"""
L{LoggedAction.ofType} returns an empty list if actions of the given
type cannot be found.
"""
ACTION = ActionType("myaction", [], [], "An action!")
logger = MemoryLogger()
self.assertEqual(LoggedAction.ofType(logger.messages, ACTION), [])
def test_descendants(self):
"""
L{LoggedAction.descendants} returns all descendants of the
L{LoggedAction}.
"""
ACTION = ActionType("myaction", [], [], "An action!")
logger = MemoryLogger()
# index 0
with ACTION(logger):
# index 1:
with startAction(logger, "test"):
# index 2
Message.new(x=2).write(logger)
# index 3 - end action
# index 4
Message.new(x=2).write(logger)
# index 5 - end action
loggedAction = LoggedAction.ofType(logger.messages, ACTION)[0]
self.assertEqual(list(loggedAction.descendants()),
[self.fromMessagesIndex(logger.messages, 1),
LoggedMessage(logger.messages[2]),
LoggedMessage(logger.messages[4])])
def test_succeeded(self):
"""
If the action succeeded, L{LoggedAction.succeeded} will be true.
"""
logger = MemoryLogger()
with startAction(logger, "test"):
pass
logged = self.fromMessagesIndex(logger.messages, 0)
self.assertTrue(logged.succeeded)
def test_notSucceeded(self):
"""
If the action failed, L{LoggedAction.succeeded} will be false.
"""
logger = MemoryLogger()
try:
with startAction(logger, "test"):
raise KeyError()
except KeyError:
pass
logged = self.fromMessagesIndex(logger.messages, 0)
self.assertFalse(logged.succeeded)
class LoggedMessageTest(TestCase):
"""
Tests for L{LoggedMessage}.
"""
def test_values(self):
"""
The values given to the L{LoggedMessage} constructor are stored on it.
"""
message = {'x': 1}
logged = LoggedMessage(message)
self.assertEqual(logged.message, message)
def test_ofType(self):
"""
L{LoggedMessage.ofType} returns a list of L{LoggedMessage} created by the
specified L{MessageType}.
"""
MESSAGE = MessageType("mymessage", [], "A message!")
logger = MemoryLogger()
# index 0
MESSAGE().write(logger)
# index 1
Message.new(x=2).write(logger)
# index 2
MESSAGE().write(logger)
logged = LoggedMessage.ofType(logger.messages, MESSAGE)
self.assertEqual(logged, [LoggedMessage(logger.messages[0]),
LoggedMessage(logger.messages[2])])
def test_ofTypeNotFound(self):
"""
L{LoggedMessage.ofType} returns an empty list if messages of the given
type cannot be found.
"""
MESSAGE = MessageType("mymessage", [], "A message!")
logger = MemoryLogger()
self.assertEqual(LoggedMessage.ofType(logger.messages, MESSAGE), [])
class AssertContainsFields(TestCase):
"""
Tests for L{assertContainsFields}.
"""
class ContainsTest(TestCase):
"""
A test case that uses L{assertContainsFields}.
"""
def __init__(self, message, expectedFields):
TestCase.__init__(self)
self.message = message
self.expectedFields = expectedFields
def runTest(self):
assertContainsFields(self, self.message, self.expectedFields)
def test_equal(self):
"""
Equal dictionaries contain each other.
"""
message = {"a": 1}
expected = message.copy()
test = self.ContainsTest(message, expected)
# No exception raised:
test.debug()
def test_additionalIsSuperSet(self):
"""
If C{A} is C{B} plus some extra entries, C{A} contains the fields in
C{B}.
"""
message = {"a": 1, "b": 2, "c": 3}
expected = {"a": 1, "c": 3}
test = self.ContainsTest(message, expected)
# No exception raised:
test.debug()
def test_missingFields(self):
"""
If C{A} is C{B} minus some entries, C{A} does not contain the fields in
C{B}.
"""
message = {"a": 1, "c": 3}
expected = {"a": 1, "b": 2, "c": 3}
test = self.ContainsTest(message, expected)
self.assertRaises(AssertionError, test.debug)
def test_differentValues(self):
"""
If C{A} has a different value for a specific field than C{B}, C{A} does
not contain the fields in C{B}.
"""
message = {"a": 1, "c": 3}
expected = {"a": 1, "c": 2}
test = self.ContainsTest(message, expected)
self.assertRaises(AssertionError, test.debug)
class ValidateLoggingTestsMixin(object):
"""
Tests for L{validateLogging} and L{capture_logging}.
"""
validate = None
def test_decoratedFunctionCalledWithMemoryLogger(self):
"""
The underlying function decorated with L{validateLogging} is called with
a L{MemoryLogger} instance in addition to any other arguments if the
wrapper is called.
"""
result = []
class MyTest(TestCase):
@self.validate(None)
def test_foo(this, logger):
result.append((this, logger.__class__))
theTest = MyTest("test_foo")
theTest.run()
self.assertEqual(result, [(theTest, MemoryLogger)])
def test_newMemoryLogger(self):
"""
The underlying function decorated with L{validateLogging} is called with
a new L{MemoryLogger} every time the wrapper is called.
"""
result = []
class MyTest(TestCase):
@self.validate(None)
def test_foo(this, logger):
result.append(logger)
theTest = MyTest("test_foo")
theTest.run()
theTest.run()
self.assertIsNot(result[0], result[1])
def test_returns(self):
"""
The result of the underlying function is returned by wrapper when called.
"""
class MyTest(TestCase):
@self.validate(None)
def test_foo(self, logger):
return 123
self.assertEqual(MyTest("test_foo").test_foo(), 123)
def test_raises(self):
"""
The exception raised by the underlying function is passed through by the
wrapper when called.
"""
exc = Exception()
class MyTest(TestCase):
@self.validate(None)
def test_foo(self, logger):
raise exc
raised = None
try:
MyTest("test_foo").debug()
except Exception as e:
raised = e
self.assertIs(exc, raised)
def test_name(self):
"""
The wrapper has the same name as the wrapped function.
"""
class MyTest(TestCase):
@self.validate(None)
def test_foo(self, logger):
pass
self.assertEqual(MyTest.test_foo.__name__, "test_foo")
def test_addCleanupValidate(self):
"""
When a test method is decorated with L{validateLogging} it has
L{MemoryLogger.validate} registered as a test cleanup.
"""
MESSAGE = MessageType("mymessage", [], "A message")
class MyTest(TestCase):
@self.validate(None)
def runTest(self, logger):
self.logger = logger
logger.write({"message_type": "wrongmessage"},
MESSAGE._serializer)
test = MyTest()
self.assertRaises(ValidationError, test.debug)
self.assertEqual(list(test.logger.messages[0].keys()), ["message_type"])
def test_addCleanupTracebacks(self):
"""
When a test method is decorated with L{validateLogging} it has has a
check unflushed tracebacks in the L{MemoryLogger} registered as a
test cleanup.
"""
class MyTest(TestCase):
@self.validate(None)
def runTest(self, logger):
try:
1 / 0
except ZeroDivisionError:
writeTraceback(logger)
test = MyTest()
self.assertRaises(UnflushedTracebacks, test.debug)
def test_assertion(self):
"""
If a callable is passed to L{validateLogging}, it is called with the
L{TestCase} instance and the L{MemoryLogger} passed to the test
method.
"""
result = []
class MyTest(TestCase):
def assertLogging(self, logger):
result.append((self, logger))
@self.validate(assertLogging)
def runTest(self, logger):
self.logger = logger
test = MyTest()
test.run()
self.assertEqual(result, [(test, test.logger)])
def test_assertionArguments(self):
"""
If a callable together with additional arguments and keyword arguments are
passed to L{validateLogging}, the callable is called with the additional
args and kwargs.
"""
result = []
class MyTest(TestCase):
def assertLogging(self, logger, x, y):
result.append((self, logger, x, y))
@self.validate(assertLogging, 1, y=2)
def runTest(self, logger):
self.logger = logger
test = MyTest()
test.run()
self.assertEqual(result, [(test, test.logger, 1, 2)])
def test_assertionAfterTest(self):
"""
If a callable is passed to L{validateLogging}, it is called with the
after the main test code has run, allowing it to make assertions
about log messages from the test.
"""
class MyTest(TestCase):
def assertLogging(self, logger):
self.result.append(2)
@self.validate(assertLogging)
def runTest(self, logger):
self.result = [1]
test = MyTest()
test.run()
self.assertEqual(test.result, [1, 2])
def test_assertionBeforeTracebackCleanup(self):
"""
If a callable is passed to L{validateLogging}, it is called with the
before the check for unflushed tracebacks, allowing it to flush
traceback log messages.
"""
class MyTest(TestCase):
def assertLogging(self, logger):
logger.flushTracebacks(ZeroDivisionError)
self.flushed = True
@self.validate(assertLogging)
def runTest(self, logger):
self.flushed = False
try:
1 / 0
except ZeroDivisionError:
writeTraceback(logger)
test = MyTest()
test.run()
self.assertTrue(test.flushed)
class ValidateLoggingTests(ValidateLoggingTestsMixin, TestCase):
"""
Tests for L{validate_logging}.
"""
validate = staticmethod(validate_logging)
class CaptureLoggingTests(ValidateLoggingTestsMixin, TestCase):
"""
Tests for L{capture_logging}.
"""
validate = staticmethod(capture_logging)
def setUp(self):
# Since we're not always calling the test method via the TestCase
# infrastructure, sometimes cleanup methods are not called. This
# means the original default logger is not restored. So we do so
# manually. If the issue is a bug in capture_logging itself the
# tests below will catch that.
original_logger = _output._DEFAULT_LOGGER
def cleanup():
_output._DEFAULT_LOGGER = original_logger
self.addCleanup(cleanup)
def test_default_logger(self):
"""
L{capture_logging} captures messages from logging that
doesn't specify a L{Logger}.
"""
class MyTest(TestCase):
@capture_logging(None)
def runTest(self, logger):
Message.log(some_key=1234)
self.logger = logger
test = MyTest()
test.run()
self.assertEqual(test.logger.messages[0][u"some_key"], 1234)
def test_global_cleanup(self):
"""
After the function wrapped with L{capture_logging} finishes,
logging that doesn't specify a logger is logged normally.
"""
class MyTest(TestCase):
@capture_logging(None)
def runTest(self, logger):
pass
test = MyTest()
test.run()
messages = []
add_destination(messages.append)
self.addCleanup(remove_destination, messages.append)
Message.log(some_key=1234)
self.assertEqual(messages[0][u"some_key"], 1234)
def test_global_cleanup_exception(self):
"""
If the function wrapped with L{capture_logging} throws an exception,
logging that doesn't specify a logger is logged normally.
"""
class MyTest(TestCase):
@capture_logging(None)
def runTest(self, logger):
raise RuntimeError()
test = MyTest()
test.run()
messages = []
add_destination(messages.append)
self.addCleanup(remove_destination, messages.append)
Message.log(some_key=1234)
self.assertEqual(messages[0][u"some_key"], 1234)
def test_validationNotRunForSkip(self):
"""
If the decorated test raises L{SkipTest} then the logging validation is
also skipped.
"""
class MyTest(TestCase):
recorded = False
def record(self, logger):
self.recorded = True
@validateLogging(record)
def runTest(self, logger):
raise SkipTest("Do not run this test.")
test = MyTest()
result = TestResult()
test.run(result)
# Verify that the validation function did not run and that the test was
# nevertheless marked as a skip with the correct reason.
self.assertEqual(
(test.recorded, result.skipped, result.errors, result.failures),
(False, [(test, "Do not run this test.")], [], [])
)
def test_unflushedTracebacksDontFailForSkip(self):
"""
If the decorated test raises L{SkipTest} then the unflushed traceback
checking normally implied by L{validateLogging} is also skipped.
"""
class MyTest(TestCase):
@validateLogging(lambda self, logger: None)
def runTest(self, logger):
try:
1 / 0
except:
writeTraceback(logger)
raise SkipTest("Do not run this test.")
test = MyTest()
result = TestResult()
test.run(result)
# Verify that there was only a skip, no additional errors or failures
# reported.
self.assertEqual(
(1, [], []),
(len(result.skipped), result.errors, result.failures)
)
MESSAGE1 = MessageType("message1", [Field.forTypes("x", [int], "A number")],
"A message for testing.")
MESSAGE2 = MessageType("message2", [], "A message for testing.")
class AssertHasMessageTests(TestCase):
"""
Tests for L{assertHasMessage}.
"""
class UnitTest(TestCase):
"""
Test case that can be instantiated.
"""
def runTest(self):
pass
def test_failIfNoMessagesOfType(self):
"""
L{assertHasMessage} raises L{AssertionError} if the given L{MemoryLogger}
has no messages of the given L{MessageType}.
"""
test = self.UnitTest()
logger = MemoryLogger()
MESSAGE1(x=123).write(logger)
self.assertRaises(AssertionError,
assertHasMessage, test, logger, MESSAGE2)
def test_returnsIfMessagesOfType(self):
"""
L{assertHasMessage} returns the first message of the given L{MessageType}.
"""
test = self.UnitTest()
logger = MemoryLogger()
MESSAGE1(x=123).write(logger)
self.assertEqual(assertHasMessage(test, logger, MESSAGE1),
LoggedMessage.ofType(logger.messages, MESSAGE1)[0])
def test_failIfNotSubset(self):
"""
L{assertHasMessage} raises L{AssertionError} if the found message doesn't
contain the given fields.
"""
test = self.UnitTest()
logger = MemoryLogger()
MESSAGE1(x=123).write(logger)
self.assertRaises(AssertionError,
assertHasMessage, test, logger, MESSAGE1, {"x": 24})
def test_returnsIfSubset(self):
"""
L{assertHasMessage} returns the first message of the given L{MessageType} if
it contains the given fields.
"""
test = self.UnitTest()
logger = MemoryLogger()
MESSAGE1(x=123).write(logger)
self.assertEqual(assertHasMessage(test, logger, MESSAGE1, {"x": 123}),
LoggedMessage.ofType(logger.messages, MESSAGE1)[0])
ACTION1 = ActionType("action1", [Field.forTypes("x", [int], "A number")],
[Field.forTypes("result", [int], "A number")],
"A action for testing.")
ACTION2 = ActionType("action2", [], [], "A action for testing.")
class AssertHasActionTests(TestCase):
"""
Tests for L{assertHasAction}.
"""
class UnitTest(TestCase):
"""
Test case that can be instantiated.
"""
def runTest(self):
pass
def test_failIfNoActionsOfType(self):
"""
L{assertHasAction} raises L{AssertionError} if the given L{MemoryLogger}
has no actions of the given L{ActionType}.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123): pass
self.assertRaises(AssertionError,
assertHasAction, test, logger, ACTION2, True)
def test_failIfWrongSuccessStatus(self):
"""
L{assertHasAction} raises L{AssertionError} if the given success status does
not match that of the found actions.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123): pass
try:
with ACTION2(logger):
1/0
except ZeroDivisionError:
pass
self.assertRaises(AssertionError,
assertHasAction, test, logger, ACTION1, False)
self.assertRaises(AssertionError,
assertHasAction, test, logger, ACTION2, True)
def test_returnsIfMessagesOfType(self):
"""
A successful L{assertHasAction} returns the first message of the given
L{ActionType}.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123): pass
self.assertEqual(assertHasAction(test, logger, ACTION1, True),
LoggedAction.ofType(logger.messages, ACTION1)[0])
def test_failIfNotStartSubset(self):
"""
L{assertHasAction} raises L{AssertionError} if the found action doesn't
contain the given start fields.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123): pass
self.assertRaises(AssertionError,
assertHasAction, test, logger, ACTION1, True, {"x": 24})
def test_failIfNotEndSubset(self):
"""
L{assertHasAction} raises L{AssertionError} if the found action doesn't
contain the given end fields.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123) as act: act.addSuccessFields(result=5)
self.assertRaises(AssertionError,
assertHasAction, test, logger, ACTION1, True,
startFields={"x": 123}, endFields={"result": 24})
def test_returns(self):
"""
A successful L{assertHasAction} returns the first message of the given
L{ActionType} after doing all validation.
"""
test = self.UnitTest()
logger = MemoryLogger()
with ACTION1(logger, x=123) as act: act.addSuccessFields(result=5)
self.assertEqual(
assertHasAction(test, logger, ACTION1, True,
{"x": 123}, {"result": 5}),
LoggedAction.ofType(logger.messages, ACTION1)[0])
class PEP8Tests(TestCase):
"""
Tests for PEP 8 method compatibility.
"""
def test_LoggedAction_from_messages(self):
"""
L{LoggedAction.from_messages} is the same as
L{LoggedAction.fromMessages}.
"""
self.assertEqual(LoggedAction.from_messages, LoggedAction.fromMessages)
def test_LoggedAction_of_type(self):
"""
L{LoggedAction.of_type} is the same as
L{LoggedAction.ofType}.
"""
self.assertEqual(LoggedAction.of_type, LoggedAction.ofType)
def test_LoggedAction_end_message(self):
"""
L{LoggedAction.end_message} is the same as L{LoggedAction.endMessage}.
"""
action = LoggedAction({1: 2}, {3: 4}, [])
self.assertEqual(action.end_message, action.endMessage)
def test_LoggedAction_start_message(self):
"""
L{LoggedAction.start_message} is the same as
L{LoggedAction.startMessage}.
"""
action = LoggedAction({1: 2}, {3: 4}, [])
self.assertEqual(action.start_message, action.startMessage)
def test_LoggedMessage_of_type(self):
"""
L{LoggedMessage.of_type} is the same as
L{LoggedMessage.ofType}.
"""
self.assertEqual(LoggedMessage.of_type, LoggedMessage.ofType)
def test_validate_logging(self):
"""
L{validate_logging} is the same as L{validateLogging}.
"""
self.assertEqual(validate_logging, validateLogging)
|
|
from . import controllers
from . import messages
from grow.pods import locales
from grow.pods import urls
from datetime import datetime
import fnmatch
import hashlib
import mimetypes
import os
import re
import time
import webob
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('image/bmp', '.cur')
mimetypes.add_type('image/svg+xml', '.svg')
mimetypes.add_type('text/css', '.css')
SKIP_PATTERNS = [
'**/.**',
]
class Error(Exception):
pass
class BadStaticFileError(Error):
pass
class StaticFile(object):
def __init__(self, pod_path, serving_path, locale=None, localization=None,
controller=None, fingerprinted=False, pod=None):
self.pod = pod
self.default_locale = pod.podspec.default_locale
self.locale = pod.normalize_locale(locale)
self.localization = localization
self.pod_path = pod_path
self.serving_path = serving_path
self.controller = controller
self.basename = os.path.basename(pod_path)
self.fingerprinted = fingerprinted
self.base, self.ext = os.path.splitext(self.basename)
def __repr__(self):
if self.locale:
return "<StaticFile({}, locale='{}')>".format(self.pod_path, self.locale)
return "<StaticFile({})>".format(self.pod_path)
def __eq__(self, other):
return (self.pod_path == other.pod_path and self.pod == other.pod
and other.locale == self.locale)
def __ne__(self, other):
return not self.__eq__(other)
@property
def exists(self):
return self.pod.file_exists(self.pod_path)
@property
def modified(self):
return self.pod.file_modified(self.pod_path)
@property
def size(self):
return self.pod.file_size(self.pod_path)
@property
def fingerprint(self):
return StaticFile._create_fingerprint(self.pod, self.pod_path)
@staticmethod
def _create_fingerprint(pod, pod_path):
md5 = hashlib.md5()
with pod.open_file(pod_path, 'rb') as fp:
content = fp.read()
md5.update(content)
return md5.hexdigest()
@staticmethod
def remove_fingerprint(path):
base, _ = os.path.splitext(path)
if base.endswith('.min'):
return re.sub('(.*)-([a-fA-F\d]{32})\.min\.(.*)', r'\g<1>.min.\g<3>', path)
return re.sub('(.*)-([a-fA-F\d]{32})\.(.*)', r'\g<1>.\g<3>', path)
@staticmethod
def apply_fingerprint(path, fingerprint):
base, ext = os.path.splitext(path)
# Special case to preserve ".min.<ext>" extension lockup.
if base.endswith('.min'):
base = base[:-4]
return '{}-{}.min{}'.format(base, fingerprint, ext)
else:
return '{}-{}{}'.format(base, fingerprint, ext)
@property
def url(self):
serving_path = self.serving_path
path_format = self.controller.path_format.replace('{filename}', '')
if '{fingerprint}' in path_format:
path_format = path_format.replace('{fingerprint}', self.fingerprint)
# Determine suffix only after all replacements are made.
suffix = serving_path.replace(path_format, '')
if self.localization:
if self.fingerprinted:
suffix = StaticFile.remove_fingerprint(suffix)
localized_pod_path = self.localization['static_dir'] + suffix
localized_pod_path = localized_pod_path.format(locale=self.locale)
localized_pod_path = localized_pod_path.replace('//', '/')
if self.pod.file_exists(localized_pod_path):
# TODO(jeremydw): Centralize path formatting.
# Internal paths use Babel locales, serving paths use aliases.
locale = self.locale.alias if self.locale is not None else self.locale
localized_serving_path = self.localization['serve_at'] + suffix
kwargs = {
'locale': locale,
'root': self.pod.podspec.root,
}
if '{fingerprint}' in localized_serving_path:
fingerprint = StaticFile._create_fingerprint(
self.pod, localized_pod_path)
kwargs['fingerprint'] = fingerprint
localized_serving_path = localized_serving_path.format(**kwargs)
if self.fingerprinted and localized_serving_path:
fingerprint = StaticFile._create_fingerprint(self.pod, localized_pod_path)
localized_serving_path = StaticFile.apply_fingerprint(localized_serving_path, fingerprint)
serving_path = localized_serving_path.replace('//', '/')
if serving_path:
return urls.Url(
path=serving_path,
host=self.pod.env.host,
port=self.pod.env.port,
scheme=self.pod.env.scheme)
class StaticController(controllers.BaseController):
KIND = messages.Kind.STATIC
def __init__(self, path_format, source_format=None, localized=False,
localization=None, fingerprinted=False, pod=None):
# path_format: "serve_at"
# source_format: "static_dir"
self.path_format = path_format.replace('<grow:', '{').replace('>', '}')
self.source_format = source_format.replace('<grow:', '{').replace('>', '}')
self.pod = pod
self.localized = localized
self.localization = localization
self.fingerprinted = fingerprinted
def __repr__(self):
return '<Static(format=\'{}\')>'.format(self.source_format)
def get_localized_pod_path(self, params):
if (self.localization
and '{locale}' in self.localization['static_dir']
and 'locale' in params):
source_format = self.localization['serve_at']
source_format += '/{filename}'
source_format = source_format.replace('//', '/')
kwargs = params
kwargs['root'] = self.pod.podspec.root
if 'locale' in kwargs:
locale = locales.Locale.from_alias(self.pod, kwargs['locale'])
kwargs['locale'] = str(locale)
if '{root}' in source_format:
kwargs['root'] = self.pod.podspec.root
pod_path = source_format.format(**kwargs)
if self.fingerprinted:
pod_path = StaticFile.remove_fingerprint(pod_path)
if self.pod.file_exists(pod_path):
return pod_path
def get_pod_path(self, params):
# If a localized file exists, serve it. Otherwise, serve the base file.
pod_path = self.get_localized_pod_path(params)
if pod_path:
return pod_path
pod_path = self.source_format.format(**params)
if self.fingerprinted:
pod_path = StaticFile.remove_fingerprint(pod_path)
return pod_path
def validate(self, params):
pod_path = self.get_pod_path(params)
if not self.pod.file_exists(pod_path):
path = self.pod.abs_path(pod_path)
message = '{} does not exist.'.format(path)
raise webob.exc.HTTPNotFound(message)
def render(self, params, inject=False):
pod_path = self.get_pod_path(params)
return self.pod.read_file(pod_path)
def get_mimetype(self, params):
pod_path = self.get_pod_path(params)
return mimetypes.guess_type(pod_path)[0]
def get_http_headers(self, params):
pod_path = self.get_pod_path(params)
path = self.pod.abs_path(pod_path)
headers = super(StaticController, self).get_http_headers(params)
self.pod.storage.update_headers(headers, path)
modified = self.pod.storage.modified(path)
time_obj = datetime.fromtimestamp(modified).timetuple()
time_format = '%a, %d %b %Y %H:%M:%S GMT'
headers['Last-Modified'] = time.strftime(time_format, time_obj)
headers['ETag'] = '"{}"'.format(headers['Last-Modified'])
headers['X-Grow-Pod-Path'] = pod_path
if self.locale:
headers['X-Grow-Locale'] = self.locale
return headers
def match_pod_path(self, pod_path):
if self.path_format == pod_path:
if self.fingerprinted:
fingerprint = StaticFile._create_fingerprint(self.pod, pod_path)
return StaticFile.apply_fingerprint(self.path_format, fingerprint)
return self.path_format
tokens = re.findall('.?{([^}]+)}.?', self.path_format)
if 'filename' in tokens:
source_regex = self.source_format.replace(
'{filename}', '(?P<filename>.*)')
source_regex = source_regex.replace('{locale}', '(?P<locale>[^/]*)')
source_regex = source_regex.replace('{fingerprint}', '(?P<fingerprint>[^/])')
source_regex = source_regex.replace('{root}', '(?P<root>[^/])')
match = re.match(source_regex, pod_path)
if match:
kwargs = match.groupdict()
kwargs['root'] = self.pod.podspec.root
if 'fingerprint' in tokens:
fingerprint = StaticFile._create_fingerprint(self.pod, pod_path)
kwargs['fingerprint'] = fingerprint
if 'locale' in kwargs:
locale = locales.Locale.from_alias(self.pod, kwargs['locale'])
kwargs['locale'] = str(locale)
path = self.path_format.format(**kwargs)
path = path.replace('//', '/')
if self.fingerprinted:
fingerprint = StaticFile._create_fingerprint(self.pod, pod_path)
path = StaticFile.apply_fingerprint(path, fingerprint)
return path
def list_concrete_paths(self):
concrete_paths = set()
tokens = re.findall('.?{([^}]+)}.?', self.path_format)
source_regex = self.source_format.replace('{filename}', '(?P<filename>.*)')
source_regex = source_regex.replace('{locale}', '(?P<locale>[^/]*)')
if '{' not in self.path_format:
if self.fingerprinted:
fingerprint = StaticFile._create_fingerprint(self.pod, self.path_format)
path = StaticFile.apply_fingerprint(self.path_format, fingerprint)
concrete_paths.add(path)
else:
concrete_paths.add(self.path_format)
elif 'filename' in tokens:
# NOTE: This should be updated to support globbing directories,
# and not simply strip all sub-paths beneath {locale}.
source = self.source_format.replace('{filename}', '')[1:]
source = re.sub('{locale}.*', '', source)
source = source.rstrip('/')
paths = self.pod.list_dir(source)
paths = [('/' + source + path).replace(self.pod.root, '')
for path in paths]
# Exclude paths matched by skip patterns.
for pattern in SKIP_PATTERNS:
# .gitignore-style treatment of paths without slashes.
if '/' not in pattern:
pattern = '**{}**'.format(pattern)
for skip_paths in fnmatch.filter(paths, pattern):
paths = [path for path in paths
if path.replace(self.pod.root, '') not in skip_paths]
for pod_path in paths:
match = re.match(source_regex, pod_path)
# Skip adding localized paths in subfolders of other rules.
if not self.localized and self.localization:
localized_source_format = self.localization['static_dir']
localized_source_regex = localized_source_format.replace(
'{filename}', '(?P<filename>.*)')
localized_source_regex = localized_source_regex.replace(
'{locale}', '(?P<locale>[^/]*)')
if re.match(localized_source_regex, pod_path):
continue
if match:
kwargs = match.groupdict()
kwargs['root'] = self.pod.podspec.root
if 'fingerprint' in self.path_format:
fingerprint = StaticFile._create_fingerprint(self.pod, pod_path)
kwargs['fingerprint'] = fingerprint
if 'locale' in kwargs:
normalized_locale = self.pod.normalize_locale(kwargs['locale'])
kwargs['locale'] = (
normalized_locale.alias if normalized_locale is not None
else normalized_locale)
matched_path = self.path_format.format(**kwargs)
matched_path = matched_path.replace('//', '/')
if self.fingerprinted:
fingerprint = StaticFile._create_fingerprint(self.pod, pod_path)
matched_path = StaticFile.apply_fingerprint(matched_path, fingerprint)
concrete_paths.add(matched_path)
return list(concrete_paths)
|
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""URL routing definitions, and some basic error/warmup handlers."""
__author__ = 'Sean Lip'
import feconf
import logging
from core.controllers import admin
from core.controllers import base
from core.controllers import editor
from core.controllers import feedback
from core.controllers import galleries
from core.controllers import home
from core.controllers import moderator
from core.controllers import pages
from core.controllers import profile
from core.controllers import reader
from core.controllers import recent_commits
from core.controllers import resources
from core.controllers import services
from core.platform import models
transaction_services = models.Registry.import_transaction_services()
from mapreduce import main as mapreduce_main
from mapreduce import parameters as mapreduce_parameters
import webapp2
from webapp2_extras.routes import RedirectRoute
class FrontendErrorHandler(base.BaseHandler):
"""Handles errors arising from the frontend."""
REQUIRE_PAYLOAD_CSRF_CHECK = False
def post(self):
"""Records errors reported by the frontend."""
logging.error('Frontend error: %s' % self.payload.get('error'))
self.render_json(self.values)
class WarmupHandler(base.BaseHandler):
"""Handles warmup requests."""
def get(self):
"""Handles GET warmup requests."""
pass
# Regex for base64 id encoding
r = '[A-Za-z0-9=_-]+'
def generate_static_url_tuples():
static_urls = []
url_tuples = []
for url in feconf.PATH_MAP:
static_urls.append(url + '.+')
for url in static_urls:
url_tuples.append((url, resources.StaticFileHandler))
return url_tuples
def get_redirect_route(regex_route, handler, name, defaults=None):
"""Returns a route that redirects /foo/ to /foo.
Warning: this method strips off parameters after the trailing slash. URLs
with parameters should be formulated without the trailing slash.
"""
if defaults is None:
defaults = {}
return RedirectRoute(
regex_route, handler, name, strict_slash=True, defaults=defaults)
def authorization_wrapper(self, *args, **kwargs):
# developers.google.com/appengine/docs/python/taskqueue/overview-push
# promises that this header cannot be set by external callers. If this
# is present, we can be certain that the request is internal and from
# the task queue worker.
if 'X-AppEngine-TaskName' not in self.request.headers:
self.response.out.write('Forbidden')
self.response.set_status(403)
return
self.real_dispatch(*args, **kwargs)
def ui_access_wrapper(self, *args, **kwargs):
self.real_dispatch(*args, **kwargs)
mapreduce_handlers = []
for path, handler_class in mapreduce_main.create_handlers_map():
if path.startswith('.*/pipeline'):
if 'pipeline/rpc/' in path or path == '.*/pipeline(/.+)':
path = path.replace('.*/pipeline', '/mapreduce/ui/pipeline')
else:
path = path.replace('.*/pipeline', '/mapreduce/worker/pipeline')
else:
if '_callback' in path:
path = path.replace('.*', '/mapreduce/worker', 1)
elif '/list_configs' in path:
continue
else:
path = path.replace('.*', '/mapreduce/ui', 1)
if '/ui/' in path or path.endswith('/ui'):
if (hasattr(handler_class, 'dispatch') and
not hasattr(handler_class, 'real_dispatch')):
handler_class.real_dispatch = handler_class.dispatch
handler_class.dispatch = ui_access_wrapper
mapreduce_handlers.append((path, handler_class))
else:
if (hasattr(handler_class, 'dispatch') and
not hasattr(handler_class, 'real_dispatch')):
handler_class.real_dispatch = handler_class.dispatch
handler_class.dispatch = authorization_wrapper
mapreduce_handlers.append((path, handler_class))
# Tell map/reduce internals that this is now the base path to use.
mapreduce_parameters.config.BASE_PATH = '/mapreduce/worker'
# Register the URLs with the classes responsible for handling them.
urls = [
get_redirect_route(r'/_ah/warmup', WarmupHandler, 'warmup_handler'),
get_redirect_route(
r'/timeline', home.TimelinePage, 'timeline_page'),
get_redirect_route(
r'/timelinehandler/data', home.TimelineHandler,
'timeline_handler'),
get_redirect_route(
r'/my_explorations', home.MyExplorationsPage, 'my_explorations_page'),
get_redirect_route(
r'/myexplorationshandler/data', home.MyExplorationsHandler,
'my_explorations_handler'),
get_redirect_route(r'/about', pages.AboutPage, 'about_page'),
get_redirect_route(
r'/participate', pages.ParticipatePage, 'participate_page'),
get_redirect_route(
r'/site_guidelines', pages.ParticipatePage,
'redirect_to_participate_page'),
get_redirect_route(
r'/contact', pages.AboutPage, 'redirect_to_about_page'),
get_redirect_route(r'/forum', pages.ForumPage, 'forum_page'),
get_redirect_route(r'/admin', admin.AdminPage, 'admin_page'),
get_redirect_route(r'/adminhandler', admin.AdminHandler, 'admin_handler'),
get_redirect_route(
r'/adminjoboutput', admin.AdminJobOutput, 'admin_job_output'),
get_redirect_route(
r'/imagehandler/<exploration_id>/<encoded_filepath>',
resources.ImageHandler, 'image_handler'),
get_redirect_route(
r'/object_editor_template/<obj_type>',
resources.ObjectEditorTemplateHandler, 'object_editor_template'),
get_redirect_route(
r'/value_generator_handler/<generator_id>',
resources.ValueGeneratorHandler, 'value_generator_handler'),
get_redirect_route(r'/', galleries.GalleryPage, 'gallery_page'),
get_redirect_route(
r'%s' % feconf.GALLERY_URL, galleries.GalleryPage, 'gallery_page'),
get_redirect_route(
r'%s' % feconf.GALLERY_DATA_URL, galleries.GalleryHandler,
'gallery_handler'),
get_redirect_route(
r'%s' % feconf.LEARN_GALLERY_URL, galleries.GalleryRedirectPage,
'learn_gallery_page'),
get_redirect_route(
r'%s' % feconf.PLAYTEST_QUEUE_URL, galleries.GalleryRedirectPage,
'playtest_queue_page'),
get_redirect_route(
r'%s' % feconf.CONTRIBUTE_GALLERY_URL, galleries.GalleryRedirectPage,
'contribute_gallery_page'),
get_redirect_route(
r'%s' % feconf.NEW_EXPLORATION_URL,
galleries.NewExploration, 'new_exploration'),
get_redirect_route(
r'%s' % feconf.UPLOAD_EXPLORATION_URL,
galleries.UploadExploration, 'upload_exploration'),
get_redirect_route(
r'/explorationsummarieshandler/data',
galleries.ExplorationSummariesHandler, 'exploration_summaries_handler'),
get_redirect_route(
r'/profile/<username>', profile.ViewProfilePage, 'profile_page'),
get_redirect_route(
r'/preferences', profile.PreferencesPage, 'preferences_page'),
get_redirect_route(
r'/preferenceshandler/data', profile.PreferencesHandler,
'preferences_handler'),
get_redirect_route(
r'/preferenceshandler/profile_picture', profile.ProfilePictureHandler,
'profle_picture_handler'),
get_redirect_route(
r'%s' % feconf.SIGNUP_URL, profile.SignupPage, 'signup_page'),
get_redirect_route(
r'%s' % feconf.SIGNUP_DATA_URL, profile.SignupHandler,
'signup_handler'),
get_redirect_route(
r'%s' % feconf.USERNAME_CHECK_DATA_URL,
profile.UsernameCheckHandler, 'username_check_handler'),
get_redirect_route(
r'/moderator', moderator.ModeratorPage, 'moderator_page'),
get_redirect_route(
r'/moderatorhandler/user_services',
moderator.UserServiceHandler, 'moderator_user_service_handler'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.EXPLORATION_URL_PREFIX,
reader.ExplorationPage, 'exploration_page'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.EXPLORATION_INIT_URL_PREFIX,
reader.ExplorationHandler, 'exploration_handler'),
get_redirect_route(
r'/explorehandler/exploration_start_event/<exploration_id>',
reader.ExplorationStartEventHandler,
'exploration_start_event_handler'),
get_redirect_route(
r'/explorehandler/state_hit_event/<exploration_id>',
reader.StateHitEventHandler, 'state_hit_event_handler'),
get_redirect_route(
r'/explorehandler/answer_submitted_event/<exploration_id>',
reader.AnswerSubmittedEventHandler, 'answer_submitted_event_handler'),
get_redirect_route(
r'/explorehandler/give_feedback/<exploration_id>',
reader.ReaderFeedbackHandler, 'reader_feedback_handler'),
get_redirect_route(
r'/explorehandler/exploration_maybe_leave_event/<exploration_id>',
reader.ExplorationMaybeLeaveHandler, 'reader_leave_handler'),
get_redirect_route(
r'/explorehandler/classify/<exploration_id>', reader.ClassifyHandler,
'reader_classify_handler'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.EDITOR_URL_PREFIX,
editor.ExplorationPage, 'editor_exploration_page'),
get_redirect_route(
r'/createhandler/data/<exploration_id>', editor.ExplorationHandler,
'editor_exploration_handler'),
get_redirect_route(
r'/createhandler/change_list_summary/<exploration_id>',
editor.ChangeListSummaryHandler, 'change_list_summary'),
get_redirect_route(
r'/createhandler/download/<exploration_id>',
editor.ExplorationDownloadHandler, 'exploration_download_handler'),
get_redirect_route(
r'/createhandler/download_state/<exploration_id>',
editor.StateDownloadHandler, 'state_download_handler'),
get_redirect_route(
r'/createhandler/imageupload/<exploration_id>',
editor.ImageUploadHandler, 'image_upload_handler'),
get_redirect_route(
r'/createhandler/resolved_answers/<exploration_id>/<escaped_state_name>',
editor.ResolvedAnswersHandler, 'resolved_answers_handler'),
get_redirect_route(
r'/createhandler/resource_list/<exploration_id>',
editor.ExplorationResourcesHandler, 'exploration_resources_handler'),
get_redirect_route(
r'/createhandler/revert/<exploration_id>',
editor.ExplorationRevertHandler, 'exploration_revert_handler'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.EXPLORATION_RIGHTS_PREFIX,
editor.ExplorationRightsHandler, 'exploration_rights_handler'),
get_redirect_route(
r'/createhandler/snapshots/<exploration_id>',
editor.ExplorationSnapshotsHandler, 'exploration_snapshots_handler'),
get_redirect_route(
r'/createhandler/statisticsversion/<exploration_id>',
editor.ExplorationStatsVersionsHandler, 'exploration_stats_versions_handler'),
get_redirect_route(
r'/createhandler/statistics/<exploration_id>/<exploration_version>',
editor.ExplorationStatisticsHandler, 'exploration_statistics_handler'),
get_redirect_route(
r'/createhandler/state_rules_stats/<exploration_id>/<escaped_state_name>',
editor.StateRulesStatsHandler, 'state_rules_stats_handler'),
get_redirect_route(
r'/createhandler/started_tutorial_event/<exploration_id>',
editor.StartedTutorialEventHandler, 'started_tutorial_event_handler'),
get_redirect_route(
r'%s' % feconf.RECENT_COMMITS_DATA_URL,
recent_commits.RecentCommitsHandler, 'recent_commits_handler'),
get_redirect_route(
r'%s' % feconf.RECENT_FEEDBACK_MESSAGES_DATA_URL,
feedback.RecentFeedbackMessagesHandler,
'recent_feedback_messages_handler'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.FEEDBACK_LAST_UPDATED_URL_PREFIX,
feedback.FeedbackLastUpdatedHandler, 'feedback_last_updated_handler'),
get_redirect_route(
r'%s/<exploration_id>' % feconf.FEEDBACK_THREADLIST_URL_PREFIX,
feedback.ThreadListHandler, 'feedback_threadlist_handler'),
get_redirect_route(
r'%s/<exploration_id>/<thread_id>' % feconf.FEEDBACK_THREAD_URL_PREFIX,
feedback.ThreadHandler, 'feedback_thread_handler'),
get_redirect_route(
r'/notificationshandler', home.NotificationsHandler,
'notifications_handler'),
get_redirect_route(
r'/filereadhandler', services.FileReadHandler, 'file_read_handler'),
get_redirect_route(
r'/frontend_errors', FrontendErrorHandler, 'frontend_error_handler'),
get_redirect_route(
r'/logout', base.LogoutPage, 'logout_page_handler'),
# 404 error handler.
get_redirect_route(r'/<:.*>', base.Error404Handler, 'error_404_handler'),
]
urls = mapreduce_handlers + urls
app = transaction_services.toplevel_wrapper(
webapp2.WSGIApplication(urls, debug=feconf.DEBUG))
|
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework import versions
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import constant_op
# Import gradients to register _IndexedSlicesToTensor.
import tensorflow.python.ops.gradients # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class TensorTest(test_util.TensorFlowTestCase):
def testShape(self):
op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(),
[], [dtypes.float32])
t = op.outputs[0]
self.assertEqual(tensor_shape.unknown_shape(), t.get_shape())
t.set_shape([1, 2, 3])
self.assertEqual([1, 2, 3], t.get_shape())
def testIterable(self):
op = ops.Operation(
ops._NodeDef("noop", "myop"), ops.Graph(), [], [dtypes.float32])
t = op.outputs[0]
self.assertTrue(isinstance(t, ops.Tensor))
with self.assertRaisesRegexp(TypeError, "not iterable"):
for _ in t:
pass
class SparseTensorTest(test_util.TensorFlowTestCase):
def testPythonConstruction(self):
sp = ops.SparseTensor([[1, 2], [2, 0], [3, 4]], ["a", "b", "c"], [4, 5])
self.assertEqual(sp.indices.dtype, dtypes.int64)
self.assertEqual(sp.values.dtype, dtypes.string)
self.assertEqual(sp.shape.dtype, dtypes.int64)
class IndexedSlicesTest(test_util.TensorFlowTestCase):
def testToTensor(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
dense_shape = constant_op.constant([3, 2])
x = ops.IndexedSlices(values, indices, dense_shape)
tensor = ops.convert_to_tensor(x, name="tensor")
self.assertAllEqual(tensor.eval(), [[2, 3], [0, 0], [5, 7]])
def testNegation(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = -ops.IndexedSlices(values, indices)
self.assertAllEqual(x.values.eval(), [[-2, -3], [-5, -7]])
self.assertAllEqual(x.indices.eval(), [0, 2])
def testScalarMul(self):
with self.test_session():
values = constant_op.constant([2, 3, 5, 7], shape=[2, 2])
indices = constant_op.constant([0, 2])
x = math_ops.scalar_mul(-2, ops.IndexedSlices(values, indices))
self.assertAllEqual(x.values.eval(), [[-4, -6], [-10, -14]])
self.assertAllEqual(x.indices.eval(), [0, 2])
class NodeDefConstructorTest(test_util.TensorFlowTestCase):
def testNoArgs(self):
nodedef = ops._NodeDef("noop", "bar")
self.assertProtoEquals("op: 'noop' name: 'bar'", nodedef)
def testArgs(self):
nodedef = ops._NodeDef("foo", "bar", device="/device:baz:*")
self.assertProtoEquals("op:'foo' name:'bar' device:'/device:baz:*'",
nodedef)
nodedef = ops._NodeDef("foo", "bar", device=pydev.Device(job="j"))
self.assertProtoEquals("op:'foo' name:'bar' device:'/job:j'", nodedef)
# NOTE(mrry): Dummy shape registrations for ops used in the tests.
ops.RegisterShape("a")(None)
ops.RegisterShape("b")(None)
ops.RegisterShape("c")(None)
ops.RegisterShape("add")(None)
ops.RegisterShape("an_op")(None)
ops.RegisterShape("const")(None)
ops.RegisterShape("copy")(None)
ops.RegisterShape("foo")(None)
ops.RegisterShape("identity")(None)
ops.RegisterShape("mul")(None)
ops.RegisterShape("nonrefop")(None)
ops.RegisterShape("noop")(None)
ops.RegisterShape("refop")(None)
def _apply_op(g, *args, **kwargs):
op = g.create_op(*args, **kwargs)
if len(op.outputs) == 1:
return op.outputs[0]
else:
return op.outputs
class OperationTest(test_util.TensorFlowTestCase):
def testNoInputs(self):
op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(),
[],
[dtypes.float32, dtypes.string])
self.assertEqual(2, len(op.values()))
self.assertEqual(0, len(op.inputs))
self.assertEqual("myop", op.name)
float_t, label_str_t = op.values()
self.assertEqual(dtypes.float32, float_t.dtype)
self.assertEqual(op, float_t.op)
self.assertEqual(0, float_t._value_index)
self.assertEqual(0, len(float_t._consumers))
self.assertEqual("myop", float_t._as_node_def_input())
self.assertEqual(dtypes.string, label_str_t.dtype)
self.assertEqual(op, label_str_t.op)
self.assertEqual(1, label_str_t._value_index)
self.assertEqual(0, len(label_str_t._consumers))
self.assertEqual("myop:1", label_str_t._as_node_def_input())
self.assertProtoEquals("op:'noop' name:'myop'", op.node_def)
def testNoOutputs(self):
g = ops.Graph()
op1 = ops.Operation(
ops._NodeDef("noop", "myop1"), g, [], [dtypes.float32])
float_t, = op1.values()
op2 = ops.Operation(ops._NodeDef("reop", "myop2"), g, [float_t], [])
self.assertEqual(0, len(op2.values()))
self.assertEqual(1, len(op2.inputs))
self.assertIs(float_t, op2.inputs[0])
self.assertEqual(1, len(float_t._consumers))
self.assertEqual(op2, float_t._consumers[0])
self.assertProtoEquals("op:'noop' name:'myop1'", op1.node_def)
self.assertProtoEquals("op:'reop' name:'myop2' input:'myop1'",
op2.node_def)
def testInputsAndOutputs(self):
g = ops.Graph()
op1 = ops.Operation(
ops._NodeDef("noop", "myop1"), g, [], [dtypes.float32])
self.assertEqual(1, len(op1.values()))
float1_t, = op1.values()
op2 = ops.Operation(ops._NodeDef("reop", "myop2"), g,
[], [dtypes.float32, dtypes.string])
self.assertEqual(2, len(op2.values()))
float2_t, label2_str_t = op2.values()
# Note that we consume label2_str_t twice here.
op3 = ops.Operation(ops._NodeDef("add", "myop3"), g,
[float1_t, label2_str_t, label2_str_t],
[dtypes.float32, dtypes.int32])
self.assertEqual(2, len(op3.values()))
self.assertEqual(1, len(float1_t._consumers))
self.assertEqual(op3, float1_t._consumers[0])
self.assertEqual(0, len(float2_t._consumers))
self.assertEqual(2, len(label2_str_t._consumers))
self.assertEqual(op3, label2_str_t._consumers[0])
self.assertEqual(op3, label2_str_t._consumers[1])
self.assertProtoEquals("""
op:'add' name:'myop3'
input:'myop1' input:'myop2:1' input:'myop2:1'
""", op3.node_def)
def testDeviceObject(self):
op = ops.Operation(ops._NodeDef("noop", "myop"), ops.Graph(), [], [])
op._set_device("/job:goo/device:GPU:0")
self.assertProtoEquals(
"op:'noop' name:'myop' device:'/job:goo/device:GPU:0' ",
op.node_def)
op = ops.Operation(ops._NodeDef("noop", "op2"), ops.Graph(), [], [])
op._set_device(pydev.Device(job="muu", device_type="CPU", device_index=0))
self.assertProtoEquals(
"op:'noop' name:'op2' device:'/job:muu/device:CPU:0'",
op.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = ops.Operation(ops._NodeDef("noop", "op1"), g, [],
[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals("op:'noop' name:'op1'",
op1.node_def)
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = ops.Operation(
ops._NodeDef("refop", "op2"), g, [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32])
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
op3 = ops.Operation(
ops._NodeDef("nonrefop", "op3"), g, [ref_t, nonref_t], [])
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testInvalidNames(self):
g = ops.Graph()
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", ""), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "_invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "-invalid"), g)
with self.assertRaises(ValueError):
ops.Operation(ops._NodeDef("op", "/invalid"), g)
def testShapeFunctionAbsence(self):
def _test():
pass
g = ops.Graph()
with self.assertRaises(RuntimeError):
g.create_op("shapeless_op", [], [dtypes.float32])
def testNoShapeFunction(self):
g = ops.Graph()
op = ops.Operation(ops._NodeDef("op", "an_op"), g,
output_types = [dtypes.float32])
self.assertEqual(tensor_shape.unknown_shape(),
_apply_op(g, "an_op", [], [dtypes.float32]).get_shape())
class CreateOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
op1 = g.create_op("const", [], [dtypes.float32], None, name="myop1")
with g.device("/device:GPU:0"):
op2 = g.create_op("add",
[],
[dtypes.float32, dtypes.string], None,
name="myop2")
op3 = g.create_op("foo",
[list(op1.values())[0], list(op2.values())[1],
list(op2.values())[0]],
[dtypes.float32, dtypes.int32],
None,
name="myop3")
self.assertDeviceEqual(None, op1.device)
self.assertDeviceEqual("/device:GPU:0", op2.device)
self.assertDeviceEqual(None, op3.device)
self.assertProtoEquals("name:'myop1' op:'const'", op1.node_def)
self.assertProtoEquals("name:'myop2' op:'add' device:'/device:GPU:0'",
op2.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'foo'",
op3.node_def)
def testReferenceInput(self):
g = ops.Graph()
op1 = g.create_op("noop", [],
[dtypes.float32_ref, dtypes.float32], name="op1")
self.assertProtoEquals("op:'noop' name:'op1'", op1.node_def)
ref_t, nonref_t = op1.values()
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
op2 = g.create_op("refop", [ref_t, nonref_t], [],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
op2.node_def)
op3 = g.create_op("nonrefop", [ref_t, nonref_t], [], name="op3")
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
op3.node_def)
def testFinalized(self):
g = ops.Graph()
g.finalize()
with self.assertRaises(RuntimeError):
g.create_op("const", [], [dtypes.float32], None, name="myop1")
class ApplyOpTest(test_util.TensorFlowTestCase):
def testNodeDefArgs(self):
g = ops.Graph()
t1 = _apply_op(g, "const", [], [dtypes.float32], name="myop1")
with g.device("/device:GPU:0"):
t2 = _apply_op(g, "add",
[],
[dtypes.float32, dtypes.string],
name="myop2")
t3 = _apply_op(g, "foo", [t1, t2[1], t2[0]],
[dtypes.float32, dtypes.int32], name="myop3")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, list))
self.assertTrue(isinstance(t3, list))
self.assertTrue(isinstance(t3[0], ops.Tensor))
self.assertEqual("myop1", t1._as_node_def_input())
self.assertEqual("myop2", t2[0]._as_node_def_input())
self.assertEqual("myop2:1", t2[1]._as_node_def_input())
self.assertEqual("myop3", t3[0]._as_node_def_input())
# Validate that we got the right ops as well
self.assertProtoEquals("name:'myop1' op:'const'", t1.op.node_def)
self.assertProtoEquals("name:'myop2' op:'add' device:'/device:GPU:0'",
t2[0].op.node_def)
self.assertProtoEquals(
"name:'myop3' input:'myop1' input:'myop2:1' input:'myop2' op:'foo'",
t3[0].op.node_def)
def testReferenceInput(self):
g = ops.Graph()
ref_t, nonref_t = _apply_op(
g, "noop", [], [dtypes.float32_ref, dtypes.float32], name="op1")
self.assertProtoEquals("op:'noop' name:'op1'", ref_t.op.node_def)
# NOTE(mrry): Must specify input_types to preserve ref-typed input.
out_2 = _apply_op(g, "refop", [ref_t, nonref_t], [dtypes.int32],
input_types=[dtypes.float32_ref, dtypes.float32],
name="op2")
self.assertProtoEquals("op:'refop' name:'op2' input:'op1' input:'op1:1'",
out_2.op.node_def)
out_3 = _apply_op(g, "nonrefop", [ref_t, nonref_t], [dtypes.int32],
name="op3")
self.assertProtoEquals("op:'nonrefop' name:'op3' input:'op1' input:'op1:1'",
out_3.op.node_def)
class NameStackTest(test_util.TensorFlowTestCase):
def testBasics(self):
g = ops.Graph()
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_2", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_2", g.unique_name("foo"))
self.assertEqual("foo_1_1", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_1", g.unique_name("foo_1"))
self.assertEqual("foo_1_2", g.unique_name("foo_1", mark_as_used=False))
self.assertEqual("foo_1_2", g.unique_name("foo_1"))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2", mark_as_used=False))
self.assertEqual("foo_1_2_1", g.unique_name("foo_1_2"))
with g.name_scope("bar"):
self.assertEqual("bar/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo", g.unique_name("foo"))
self.assertEqual("bar/foo_1", g.unique_name("foo", mark_as_used=False))
self.assertEqual("bar/foo_1", g.unique_name("foo"))
with g.name_scope(None):
self.assertEqual("foo_3", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_3", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual("bar/baz/foo", g.unique_name("foo",
mark_as_used=False))
self.assertEqual("bar/baz/foo", g.unique_name("foo"))
self.assertEqual("bar/baz/foo_1", g.unique_name("foo",
mark_as_used=False))
self.assertEqual("bar/baz/foo_1", g.unique_name("foo"))
with g.name_scope("baz"):
self.assertEqual("bar/baz_1/foo", g.unique_name("foo",
mark_as_used=False))
self.assertEqual("bar/baz_1/foo", g.unique_name("foo"))
self.assertEqual("bar/baz_1/foo_1", g.unique_name("foo",
mark_as_used=False))
self.assertEqual("bar/baz_1/foo_1", g.unique_name("foo"))
with g.name_scope("quux"):
self.assertEqual("quux/foo", g.unique_name("foo", mark_as_used=False))
self.assertEqual("quux/foo", g.unique_name("foo"))
with g.name_scope("bar"):
with g.name_scope("baz"):
self.assertEqual("bar_1/baz/foo", g.unique_name("foo",
mark_as_used=False))
self.assertEqual("bar_1/baz/foo", g.unique_name("foo"))
self.assertEqual("foo_4", g.unique_name("foo", mark_as_used=False))
self.assertEqual("foo_4", g.unique_name("foo"))
self.assertEqual("bar_2", g.unique_name("bar", mark_as_used=False))
self.assertEqual("bar_2", g.unique_name("bar"))
def testNameAndVariableScope(self):
with self.test_session() as sess:
with sess.graph.name_scope("l0"):
with variable_scope.variable_scope("l1"):
with sess.graph.name_scope("l1") as scope:
self.assertEqual("l0/l1/l1/", scope)
self.assertEqual("l0/l1/l1/foo",
sess.graph.unique_name("foo", mark_as_used=False))
self.assertEqual("l0/l1/l1/foo", sess.graph.unique_name("foo"))
with sess.graph.name_scope("l2") as scope:
self.assertEqual("l0/l1/l2/", scope)
self.assertEqual("l0/l1/l2/foo",
sess.graph.unique_name("foo", mark_as_used=False))
self.assertEqual("l0/l1/l2/foo", sess.graph.unique_name("foo"))
def testOutOfOrderUniqueName(self):
g = ops.Graph()
self.assertEqual("foo_2", g.unique_name("foo_2"))
self.assertEqual("foo", g.unique_name("foo"))
self.assertEqual("foo_1", g.unique_name("foo"))
self.assertEqual("foo_3", g.unique_name("foo"))
class NameTest(test_util.TensorFlowTestCase):
def testGenerateName(self):
g = ops.Graph()
op0 = g.create_op("const", [], [dtypes.float32, dtypes.float32])
self.assertEqual("const", op0.name)
self.assertEqual("const:0", op0.outputs[0].name)
self.assertEqual("const:1", op0.outputs[1].name)
op1 = g.create_op("const", [], [dtypes.float32])
self.assertEqual("const_1", op1.name)
self.assertEqual("const_1:0", op1.outputs[0].name)
op2 = g.create_op("const", [], [dtypes.float32], name="my_op")
self.assertEqual("my_op", op2.name)
self.assertEqual("my_op:0", op2.outputs[0].name)
def testNameScope(self):
g = ops.Graph()
with g.name_scope("foo") as foo:
self.assertEqual("foo/", foo)
with g.name_scope("foo2") as foo2:
self.assertEqual("foo/foo2/", foo2)
with g.name_scope(None) as empty1:
self.assertEqual("", empty1)
with g.name_scope("foo3") as foo3:
self.assertEqual("foo3/", foo3)
with g.name_scope("") as empty2:
self.assertEqual("", empty2)
self.assertEqual("const",
g.create_op("const", [], [dtypes.float32]).name)
with g.name_scope("bar") as scope:
self.assertEqual("bar/const",
g.create_op("const", [], [dtypes.float32]).name)
self.assertEqual("bar/const_1",
g.create_op("const", [], [dtypes.float32]).name)
# If you use the value from "with .. as", that values is used as-is.
self.assertEqual(
"bar",
g.create_op("const", [], [dtypes.float32], name=scope).name)
with g.name_scope("baz") as scope:
with g.name_scope("quux"):
self.assertEqual("baz/quux/const",
g.create_op("const", [], [dtypes.float32]).name)
# If you use the value from the enclosing "with .. as", nothing is pushed.
with g.name_scope(scope):
self.assertEqual("baz/const",
g.create_op("const", [], [dtypes.float32]).name)
self.assertEqual("baz",
g.create_op("const", [], [dtypes.float32],
name=scope).name)
self.assertEqual("trailing",
g.create_op("const", [], [dtypes.float32],
name="trailing/").name)
with g.name_scope("bar"):
self.assertEqual("bar_1/const",
g.create_op("const", [], [dtypes.float32]).name)
with g.name_scope("bar/"):
self.assertEqual("bar/const_2",
g.create_op("const", [], [dtypes.float32]).name)
class DeviceTest(test_util.TensorFlowTestCase):
def testNoDevice(self):
g = ops.Graph()
op = g.create_op("an_op", [], [dtypes.float32])
self.assertDeviceEqual(None, op.device)
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op" }
""", gd)
def testDevicePartialString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op" device: "/job:worker/replica:2" }
""", gd)
def testDeviceFull(self):
g = ops.Graph()
with g.device(pydev.Device(job="worker", replica=2, task=0,
device_type="CPU",
device_index=3)):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/task:0/device:CPU:3" }
""", gd)
def testNesting(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:3/task:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2" }
""", gd)
def testNestingString(self):
g = ops.Graph()
with g.device("/job:worker/replica:2"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:3/task:0"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:3/task:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2" }
""", gd)
def testNestingOverrideGpuCpu(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker/replica:2/device:GPU:2"):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/replica:2/device:GPU:2" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNestingWithMergeDeviceFunction(self):
g = ops.Graph()
with g.device(pydev.merge_device("/device:GPU:0")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:worker")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/device:CPU:0")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")):
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device(None)):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:0" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/device:GPU:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/device:CPU:0" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps/device:CPU:0" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStrings(self):
g = ops.Graph()
with g.device("/device:GPU:0"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:worker"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:0"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:ps"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(""):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:0" }
node { name: "an_op_1" op: "an_op"
device: "/job:worker/device:GPU:0" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/device:CPU:0" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps/device:CPU:0" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps/device:CPU:0" }
""", gd)
def testNestingWithDeviceStringWildcard(self):
g = ops.Graph()
with g.device("/device:GPU:7"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:GPU:*"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:*"):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/device:CPU:5"):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/device:GPU:7" }
node { name: "an_op_1" op: "an_op"
device: "/device:GPU:7" }
node { name: "an_op_2" op: "an_op"
device: "/device:CPU:*" }
node { name: "an_op_3" op: "an_op"
device: "/device:CPU:5" }
""", gd)
def testNoneClearsDefault(self):
g = ops.Graph()
with g.device("/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def testNoneIgnoresOuterDeviceFunction(self):
g = ops.Graph()
with g.device(lambda op: "/job:worker/replica:2/device:CPU:1"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None):
g.create_op("an_op", [], [dtypes.float32])
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
node { name: "an_op_1" op: "an_op" }
node { name: "an_op_2" op: "an_op"
device: "/job:worker/replica:2/device:CPU:1" }
""", gd)
def _overwritingDeviceFunction(self, unused_op):
# This device function unconditionally overwrites the device of ops.
#
# NOTE(mrry): Writing device functions like this is not
# recommended. Instead, in most cases you should use
# `pydev.merge_device("/job:ps")` or simply `"/job:ps"` as the
# argument to `tf.device()` and the device component will be merged in.
return "/job:overwrite"
def testOverwritingBehavior(self):
g = ops.Graph()
with g.device(self._overwritingDeviceFunction):
g.create_op("an_op", [], [dtypes.float32])
with g.device("/job:ps"): # Will be overwritten.
g.create_op("an_op", [], [dtypes.float32])
with g.device(pydev.merge_device("/job:ps")): # Will be overwritten.
g.create_op("an_op", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device("/job:ps"):
g.create_op("an_op", [], [dtypes.float32])
with g.device(None): # Disables overwriting device function
with g.device(pydev.merge_device("/job:ps")):
g.create_op("an_op", [], [dtypes.float32])
gd = g.as_graph_def()
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_1" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_2" op: "an_op"
device: "/job:overwrite" }
node { name: "an_op_3" op: "an_op"
device: "/job:ps" }
node { name: "an_op_4" op: "an_op"
device: "/job:ps" }
""", gd)
class ObjectWithName(object):
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
class CollectionTest(test_util.TensorFlowTestCase):
def testadd_to_collection(self):
g = ops.Graph()
g.add_to_collection("key", 12)
g.add_to_collection("other", "foo")
g.add_to_collection("key", 34)
# Note that only blank1 is returned.
g.add_to_collection("blah", 27)
blank1 = ObjectWithName("prefix/foo")
g.add_to_collection("blah", blank1)
blank2 = ObjectWithName("junk/foo")
g.add_to_collection("blah", blank2)
self.assertEqual(["foo"], g.get_collection("other"))
self.assertEqual([12, 34], g.get_collection("key"))
self.assertEqual([], g.get_collection("nothing"))
self.assertEqual([27, blank1, blank2], g.get_collection("blah"))
self.assertEqual([blank1], g.get_collection("blah", "prefix"))
def testDefaulGraph(self):
with ops.Graph().as_default():
ops.add_to_collection("key", 90)
ops.add_to_collection("key", 100)
# Collections are ordered.
self.assertEqual([90, 100], ops.get_collection("key"))
def an_op(g):
return _apply_op(g, "an_op", [], [dtypes.float32])
ops.NoGradient("an_op")
def copy_op(x):
return _apply_op(x.graph, "copy", [x], [x.dtype])
@ops.RegisterGradient("copy")
def _CopyGrad(op, x_grad):
_ = op
return x_grad
@ops.RegisterGradient("copy_override")
def _CopyOverrideGrad(op, x_grad):
_ = op
return x_grad
class RegistrationTest(test_util.TensorFlowTestCase):
def testRegisterGradients(self):
g = ops.Graph()
x = an_op(g)
y = copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyGrad, fn)
def testOverrideGradients(self):
g = ops.Graph()
x = an_op(g)
with g.gradient_override_map({"copy": "copy_override"}):
y = copy_op(x)
fn = ops.get_gradient_function(y.op)
self.assertEqual(_CopyOverrideGrad, fn)
def testNonExistentOverride(self):
g = ops.Graph()
x = an_op(g)
with g.gradient_override_map({"copy": "unknown_override"}):
y = copy_op(x)
with self.assertRaisesRegexp(LookupError, "unknown_override"):
fn = ops.get_gradient_function(y.op)
class ComparisonTest(test_util.TensorFlowTestCase):
def testMembershipAllowed(self):
g = ops.Graph()
t1 = _apply_op(g, "const", [], [dtypes.float32], name="myop1")
t2 = _apply_op(g, "const", [], [dtypes.float32], name="myop2")
self.assertTrue(isinstance(t1, ops.Tensor))
self.assertTrue(isinstance(t2, ops.Tensor))
self.assertTrue(t1 in [t1])
self.assertTrue(t1 not in [t2])
class ControlDependenciesTest(test_util.TensorFlowTestCase):
def testBasic(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
b = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a]):
c = _apply_op(g, "const", [], [dtypes.float32])
d = _apply_op(g, "identity", [b], [dtypes.float32])
e = _apply_op(g, "identity", [c], [dtypes.float32])
self.assertEqual(c.op.control_inputs, [a.op])
self.assertEqual(d.op.control_inputs, [a.op])
# e should be dominated by c.
self.assertEqual(e.op.control_inputs, [])
def testBasicWithConversion(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
class ConvertibleObj(object):
def _as_graph_element(self):
return a
with g.control_dependencies([ConvertibleObj()]):
c = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(c.op.control_inputs, [a.op])
def testNested(self):
g = ops.Graph()
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1, a_2, a_3, a_4]):
b_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
b_2 = _apply_op(g, "const", [], [dtypes.float32])
self.assertItemsEqual(
[a_1.op, a_2.op, a_3.op, a_4.op], b_1.op.control_inputs)
self.assertItemsEqual(b_1.op.control_inputs, b_2.op.control_inputs)
def testClear(self):
g = ops.Graph()
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
with g.control_dependencies([a_2]):
with g.control_dependencies(None):
with g.control_dependencies([a_3]):
with g.control_dependencies([a_4]):
# deps [a_3, a_4]
b_3_4 = _apply_op(g, "const", [], [dtypes.float32])
# deps = [a_3]
b_3 = _apply_op(g, "const", [], [dtypes.float32])
# deps back to None
b_none = _apply_op(g, "const", [], [dtypes.float32])
# deps back to [a_1, a_2]
b_1_2 = _apply_op(g, "const", [], [dtypes.float32])
# deps back to [a_1]
b_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies(None):
# deps are None again
b_none2 = _apply_op(g, "const", [], [dtypes.float32])
self.assertItemsEqual([a_3.op, a_4.op], b_3_4.op.control_inputs)
self.assertItemsEqual([a_3.op], b_3.op.control_inputs)
self.assertItemsEqual([], b_none.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_1_2.op.control_inputs)
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([], b_none2.op.control_inputs)
def testComplex(self):
g = ops.Graph()
# Usage pattern:
# * Nodes a_i are constants defined at the outermost scope, and are used
# as control inputs for the ith nested scope.
# * Nodes b_i are defined as Mul(a_3, a_4) at each scope.
# * Nodes c_i are defined as Mul(a_1, b_1) at each scope.
# * Nodes d_i are defined as Mul(b_i, c_i) at each scope.
# * Nodes e_i are defined as Mul(e_i-1, e_i-1) at each scope i > 1.
a_1 = _apply_op(g, "const", [], [dtypes.float32])
a_2 = _apply_op(g, "const", [], [dtypes.float32])
a_3 = _apply_op(g, "const", [], [dtypes.float32])
a_4 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
b_1 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_1 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_1 = _apply_op(g, "mul", [b_1, c_1], [dtypes.float32])
e_1 = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_2]):
b_2 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_2 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_2 = _apply_op(g, "mul", [b_2, c_2], [dtypes.float32])
e_2 = _apply_op(g, "mul", [e_1, e_1], [dtypes.float32])
with g.control_dependencies([a_3]):
b_3 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_3 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_3 = _apply_op(g, "mul", [b_3, c_3], [dtypes.float32])
e_3 = _apply_op(g, "mul", [e_2, e_2], [dtypes.float32])
with g.control_dependencies([a_4]):
b_4 = _apply_op(g, "mul", [a_3, a_4], [dtypes.float32])
c_4 = _apply_op(g, "mul", [a_1, b_1], [dtypes.float32])
d_4 = _apply_op(g, "mul", [b_4, c_4], [dtypes.float32])
e_4 = _apply_op(g, "mul", [e_3, e_3], [dtypes.float32])
self.assertItemsEqual([a_1.op], b_1.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_2.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_3.op.control_inputs)
self.assertItemsEqual([a_1.op, a_2.op], b_4.op.control_inputs)
self.assertItemsEqual([], c_1.op.control_inputs)
self.assertItemsEqual([a_2.op], c_2.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op], c_3.op.control_inputs)
self.assertItemsEqual([a_2.op, a_3.op, a_4.op], c_4.op.control_inputs)
self.assertItemsEqual([], d_1.op.control_inputs)
self.assertItemsEqual([], d_2.op.control_inputs)
self.assertItemsEqual([], d_3.op.control_inputs)
self.assertItemsEqual([], d_4.op.control_inputs)
self.assertItemsEqual([a_1.op], e_1.op.control_inputs)
self.assertItemsEqual([a_2.op], e_2.op.control_inputs)
self.assertItemsEqual([a_3.op], e_3.op.control_inputs)
self.assertItemsEqual([a_4.op], e_4.op.control_inputs)
def testRepeatedDependency(self):
g = ops.Graph()
a = g.create_op("foo", [], [dtypes.float32, dtypes.float32])
a_0, a_1 = a.outputs
with g.control_dependencies([a_0]):
b = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a_1]):
c = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [a])
self.assertEqual(c.op.control_inputs, [a])
def testNoControlDependencyWithDataDependency(self):
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
with g.control_dependencies([a]):
b = _apply_op(g, "identity", [a], [dtypes.float32])
self.assertEqual(b.op.control_inputs, [])
class OpScopeTest(test_util.TensorFlowTestCase):
def testNoScopeName(self):
g0 = ops.Graph()
values = [
g0.create_op("a", [], [dtypes.float32]),
g0.create_op("b", [], [dtypes.float32])]
with self.assertRaises(ValueError):
with ops.op_scope(values, None):
pass
with self.assertRaises(ValueError):
with ops.op_scope(values, None, None):
pass
def testEmptyScopeName(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
with ops.op_scope([a, b], "") as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.op_scope([a, b], "", "my_default_scope") as scope:
self.assertEqual("", scope)
self.assertEqual(g0, ops.get_default_graph())
def testDefaultScopeName(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
scope_name = "my_scope"
default_scope_name = "my_default_scope"
with ops.op_scope([a, b], scope_name, default_scope_name) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
with ops.op_scope([a, b], None, default_scope_name) as scope:
self.assertEqual("%s/" % default_scope_name, scope)
self.assertEqual(g0, ops.get_default_graph())
def _testGraphElements(self, graph_elements):
scope_name = "my_scope"
with ops.op_scope(graph_elements, scope_name) as scope:
self.assertEqual("%s/" % scope_name, scope)
self.assertEqual(graph_elements[0].graph, ops.get_default_graph())
g1 = ops.Graph()
c = g1.create_op("c", [], [dtypes.float32])
with self.assertRaises(ValueError):
with ops.op_scope(graph_elements + [c], scope_name):
pass
def testTensor(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
self._testGraphElements([a, b])
def testSparseTensor(self):
g0 = ops.Graph()
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
sparse = ops.SparseTensor(
_apply_op(g0, "const", [], [dtypes.int64]),
_apply_op(g0, "const", [], [dtypes.float32]),
_apply_op(g0, "const", [], [dtypes.int64]))
self._testGraphElements([a, sparse, b])
def testVariable(self):
g0 = ops.Graph()
with g0.as_default():
variable = variables.Variable([1.0])
a = g0.create_op("a", [], [dtypes.float32])
b = g0.create_op("b", [], [dtypes.float32])
self._testGraphElements([a, variable, b])
class GraphTest(test_util.TensorFlowTestCase):
def setUp(self):
ops.reset_default_graph()
def _AssertDefault(self, expected):
self.assertIs(expected, ops.get_default_graph())
def testGraphContextManager(self):
g0 = ops.Graph()
with g0.as_default() as g1:
self.assertIs(g0, g1)
def testDefaultGraph(self):
orig = ops.get_default_graph()
self._AssertDefault(orig)
g0 = ops.Graph()
self._AssertDefault(orig)
context_manager_0 = g0.as_default()
self._AssertDefault(orig)
with context_manager_0 as g0:
self._AssertDefault(g0)
with ops.Graph().as_default() as g1:
self._AssertDefault(g1)
self._AssertDefault(g0)
self._AssertDefault(orig)
def testAsGraphElementConversions(self):
class ConvertibleObj(object):
def _as_graph_element(self):
return "const:0"
class NonConvertibleObj(object):
pass
g = ops.Graph()
a = _apply_op(g, "const", [], [dtypes.float32])
self.assertEqual(a, g.as_graph_element(ConvertibleObj()))
with self.assertRaises(TypeError):
g.as_graph_element(NonConvertibleObj())
ops.RegisterShape("KernelLabel")(common_shapes.scalar_shape)
class KernelLabelTest(test_util.TensorFlowTestCase):
def testNoLabel(self):
with self.test_session():
self.assertAllEqual(b"My label is: default",
test_ops.kernel_label().eval())
def testLabelMap(self):
with self.test_session() as sess:
default_1 = test_ops.kernel_label()
# pylint: disable=protected-access
with sess.graph._kernel_label_map({"KernelLabel": "overload_1"}):
overload_1_1 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": "overload_2"}):
overload_2 = test_ops.kernel_label()
with sess.graph._kernel_label_map({"KernelLabel": ""}):
default_2 = test_ops.kernel_label()
overload_1_2 = test_ops.kernel_label()
# pylint: enable=protected-access
default_3 = test_ops.kernel_label()
self.assertAllEqual(b"My label is: default", default_1.eval())
self.assertAllEqual(b"My label is: default", default_2.eval())
self.assertAllEqual(b"My label is: default", default_3.eval())
self.assertAllEqual(b"My label is: overload_1", overload_1_1.eval())
self.assertAllEqual(b"My label is: overload_1", overload_1_2.eval())
self.assertAllEqual(b"My label is: overload_2", overload_2.eval())
class AsGraphDefTest(test_util.TensorFlowTestCase):
def testGraphDefVersion(self):
"""Test that the graphdef version is plumbed through to kernels."""
for version in range(versions.GRAPH_DEF_VERSION_MIN_PRODUCER,
versions.GRAPH_DEF_VERSION + 2):
with ops.Graph().as_default() as g:
g.graph_def_versions.producer = version
with self.test_session(graph=g):
v = test_ops.graph_def_version().eval()
self.assertEqual(version, v)
def testAddShapes(self):
with ops.Graph().as_default() as g:
t1, t2, t3, t4, t5 = _apply_op(g, "an_op", [], [dtypes.float32] * 5)
t1.set_shape(None)
t2.set_shape([])
t3.set_shape([None])
t4.set_shape([43, 37])
t5.set_shape([43, None])
gd = g.as_graph_def(add_shapes=True)
self.assertProtoEqualsVersion("""
node { name: "an_op" op: "an_op"
attr {
key: "_output_shapes"
value {
list {
shape { unknown_rank: true }
shape { }
shape { dim { size: -1 } }
shape { dim { size: 43 } dim { size: 37 } }
shape { dim { size: 43 } dim { size: -1 } }
}
}
}
}
""", gd)
# NOTE(petewarden): Dummy stats registrations for ops used in the tests.
@ops.RegisterStatistics("a", "weight_parameters")
def _calc_a_weight_params(unused_graph, unused_node):
return ops.OpStats("weight_parameters", 10)
@ops.RegisterStatistics("a", "flops")
def _calc_a_forward_flops(unused_graph, unused_node):
return ops.OpStats("flops", 20)
class StatisticsTest(test_util.TensorFlowTestCase):
def testRegisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("a", "an_a")
weight_params = ops.get_stats_for_node_def(graph, node, "weight_parameters")
self.assertEqual(10, weight_params.value)
flops = ops.get_stats_for_node_def(graph, node, "flops")
self.assertEqual(20, flops.value)
missing_stat = ops.get_stats_for_node_def(graph, node, "missing_stat")
self.assertEqual(None, missing_stat.value)
def testUnregisteredNode(self):
graph = ops.Graph()
node = ops._NodeDef("b", "a_b")
weight_params = ops.get_stats_for_node_def(graph, node, "weight_params")
self.assertEqual(None, weight_params.value)
def testAccumulateStatistics(self):
weight_params_total = ops.OpStats("weight_parameters")
self.assertEqual(None, weight_params_total.value)
flops_total = ops.OpStats("flops")
self.assertEqual(None, flops_total.value)
first_weight_params = ops.OpStats("weight_parameters", 100)
weight_params_total += first_weight_params
self.assertEqual(100, weight_params_total.value)
second_flops = ops.OpStats("flops", 3)
flops_total += second_flops
self.assertEqual(3, flops_total.value)
second_weight_params = ops.OpStats("weight_parameters", 200)
weight_params_total += second_weight_params
self.assertEqual(300, weight_params_total.value)
class ColocationGroupTest(test_util.TensorFlowTestCase):
def testBasic(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
self.assertEqual(["loc:@a"], a.op.colocation_groups())
self.assertEqual(["loc:@a"], b.op.colocation_groups())
with self.assertRaises(ValueError):
c.op.get_attr("_class")
def testColocationDeviceInteraction(self):
with ops.device("/cpu:0"):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
# 'b' is created in the scope of /cpu:0, but but it is
# colocated with 'a', which is on '/gpu:0'. colocate_with
# overrides devices because it is a stronger constraint.
b = constant_op.constant(3.0)
self.assertEqual(["loc:@a"], b.op.colocation_groups())
self.assertEqual(a.op.device, b.op.device)
def testLocationOverrides(self):
with ops.device("/cpu:0"):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
# Note that this colocation is "redundant", since we are
# within the scope of "/gpu:0". However, we would like to
# preserve in the GraphDef that these two ops should be
# colocated in a portable way.
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
c = constant_op.constant(4.0)
d = constant_op.constant(5.0)
self.assertEqual(["loc:@a"], b.op.colocation_groups())
self.assertEqual("/device:GPU:0", a.op.device)
self.assertEqual(a.op.device, b.op.device)
# Test that device function stack is restored.
self.assertEqual("/device:GPU:0", c.op.device)
self.assertEqual("/device:CPU:0", d.op.device)
def testNestedColocateWith(self):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
b = constant_op.constant(3.0)
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual(["loc:@a"], b.op.colocation_groups())
self.assertEqual(["loc:@a"], c.op.colocation_groups())
def testMultiColocationGroups(self):
a = constant_op.constant([2.0], name="a")
b = constant_op.constant(3.0, name="b")
with ops.colocate_with(a.op):
with ops.colocate_with(b.op):
c = constant_op.constant(4.0)
self.assertEqual(set(["loc:@a", "loc:@b"]), set(c.op.colocation_groups()))
def testColocateVariables(self):
a = variables.Variable([2.0], name="a")
with ops.colocate_with(a.op):
b = variables.Variable([3.0], name="b")
self.assertEqual(["loc:@a"], b.op.colocation_groups())
def testInconsistentDeviceWithinColocate(self):
with ops.device("/gpu:0"):
a = constant_op.constant([2.0], name="a")
with ops.colocate_with(a.op):
# This is allowed due to legacy but clearly wrong, since we
# should really be colocating with 'a'. We allow devices to
# override colocate_with, but we log warnings to suggest that
# this is probably unintentional or misguided.
with ops.device("/cpu:0"):
b = constant_op.constant([3.0], name="b")
self.assertEqual("/device:CPU:0", b.device)
if __name__ == "__main__":
googletest.main()
|
|
"""Config flow for Universal Devices ISY994 integration."""
import logging
from urllib.parse import urlparse
from pyisy.configuration import Configuration
from pyisy.connection import Connection
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.components import ssdp
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from .const import (
CONF_IGNORE_STRING,
CONF_RESTORE_LIGHT_STATE,
CONF_SENSOR_STRING,
CONF_TLS_VER,
CONF_VAR_SENSOR_STRING,
DEFAULT_IGNORE_STRING,
DEFAULT_RESTORE_LIGHT_STATE,
DEFAULT_SENSOR_STRING,
DEFAULT_TLS_VERSION,
DEFAULT_VAR_SENSOR_STRING,
ISY_URL_POSTFIX,
UDN_UUID_PREFIX,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
def _data_schema(schema_input):
"""Generate schema with defaults."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=schema_input.get(CONF_HOST, "")): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_TLS_VER, default=DEFAULT_TLS_VERSION): vol.In([1.1, 1.2]),
},
extra=vol.ALLOW_EXTRA,
)
async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
user = data[CONF_USERNAME]
password = data[CONF_PASSWORD]
host = urlparse(data[CONF_HOST])
tls_version = data.get(CONF_TLS_VER)
if host.scheme == "http":
https = False
port = host.port or 80
elif host.scheme == "https":
https = True
port = host.port or 443
else:
_LOGGER.error("isy994 host value in configuration is invalid")
raise InvalidHost
# Connect to ISY controller.
isy_conf = await hass.async_add_executor_job(
_fetch_isy_configuration,
host.hostname,
port,
user,
password,
https,
tls_version,
host.path,
)
if not isy_conf or "name" not in isy_conf or not isy_conf["name"]:
raise CannotConnect
# Return info that you want to store in the config entry.
return {"title": f"{isy_conf['name']} ({host.hostname})", "uuid": isy_conf["uuid"]}
def _fetch_isy_configuration(
address, port, username, password, use_https, tls_ver, webroot
):
"""Validate and fetch the configuration from the ISY."""
try:
isy_conn = Connection(
address,
port,
username,
password,
use_https,
tls_ver,
log=_LOGGER,
webroot=webroot,
)
except ValueError as err:
raise InvalidAuth(err.args[0])
return Configuration(log=_LOGGER, xml=isy_conn.get_config())
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Universal Devices ISY994."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self):
"""Initialize the isy994 config flow."""
self.discovered_conf = {}
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OptionsFlowHandler(config_entry)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
info = None
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidHost:
errors["base"] = "invalid_host"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
await self.async_set_unique_id(info["uuid"], raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user",
data_schema=_data_schema(self.discovered_conf),
errors=errors,
)
async def async_step_import(self, user_input):
"""Handle import."""
return await self.async_step_user(user_input)
async def async_step_ssdp(self, discovery_info):
"""Handle a discovered isy994."""
friendly_name = discovery_info[ssdp.ATTR_UPNP_FRIENDLY_NAME]
url = discovery_info[ssdp.ATTR_SSDP_LOCATION]
mac = discovery_info[ssdp.ATTR_UPNP_UDN]
if mac.startswith(UDN_UUID_PREFIX):
mac = mac[len(UDN_UUID_PREFIX) :]
if url.endswith(ISY_URL_POSTFIX):
url = url[: -len(ISY_URL_POSTFIX)]
await self.async_set_unique_id(mac)
self._abort_if_unique_id_configured()
self.discovered_conf = {
CONF_NAME: friendly_name,
CONF_HOST: url,
}
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context["title_placeholders"] = self.discovered_conf
return await self.async_step_user()
class OptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a option flow for isy994."""
def __init__(self, config_entry: config_entries.ConfigEntry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle options flow."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
options = self.config_entry.options
restore_light_state = options.get(
CONF_RESTORE_LIGHT_STATE, DEFAULT_RESTORE_LIGHT_STATE
)
ignore_string = options.get(CONF_IGNORE_STRING, DEFAULT_IGNORE_STRING)
sensor_string = options.get(CONF_SENSOR_STRING, DEFAULT_SENSOR_STRING)
var_sensor_string = options.get(
CONF_VAR_SENSOR_STRING, DEFAULT_VAR_SENSOR_STRING
)
options_schema = vol.Schema(
{
vol.Optional(CONF_IGNORE_STRING, default=ignore_string): str,
vol.Optional(CONF_SENSOR_STRING, default=sensor_string): str,
vol.Optional(CONF_VAR_SENSOR_STRING, default=var_sensor_string): str,
vol.Required(
CONF_RESTORE_LIGHT_STATE, default=restore_light_state
): bool,
}
)
return self.async_show_form(step_id="init", data_schema=options_schema)
class InvalidHost(exceptions.HomeAssistantError):
"""Error to indicate the host value is invalid."""
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
|
|
"""CLI script to annotate gpd entries
"""
import sys, argparse, gzip, re, itertools, os, uuid
from multiprocessing import cpu_count
from tempfile import mkdtemp, gettempdir
from random import shuffle
from seqtools.format.gpd import GPD, GPDStream
from seqtools.structure.transcriptome import Transcriptome
from seqtools.structure.transcript import Transcript
from seqtools.stream import OrderedStream
from seqtools.stream import MultiLocusStream
from seqtools.structure.transcript.group import Deconvolution
from seqtools.graph import Graph, Node, Edge
from seqtools.range import GenomicRange
from subprocess import Popen, PIPE
def main(args):
"""Read any reference transcriptome we can"""
txome = Transcriptome()
#read the reference gpd if one is gven
if args.reference:
rinf = None
if re.search('\.gz$',args.reference):
rinf = gzip.open(args.reference)
else:
rinf = open(args.reference)
sys.stderr.write("Reading in reference\n")
z = 0
# populate txome with reference transcripts for each chromosome
for line in rinf:
z += 1
gpd = GPD(line)
gpd.set_payload(z)
if z%100 == 0: sys.stderr.write(str(z)+" \r")
txome.add_transcript(gpd)
rinf.close()
sys.stderr.write(str(z)+" \r")
sys.stderr.write("\n")
txome.sort_transcripts()
sys.stderr.write("Buffering mappings\n")
inf = sys.stdin
if args.input != '-':
if re.search('\.gz$',args.input):
inf = gzip.open(args.input)
else:
inf = open(args.input)
tof = gzip.open(args.tempdir+'/reads.gpd.gz','w')
for line in inf: tof.write(line.rstrip()+"\n")
tof.close()
sys.stderr.write("1. Process by overlapping locus.\n")
annotated_reads(txome,gzip.open(args.tempdir+'/reads.gpd.gz'),'initial',args)
#of.close()
"""Now sequences we should be able to annotate as partial have
consensus transcripts. Lets put those to annotation scrutiny again.
"""
sys.stderr.write("2. Reannotated partially annotated by overlapping locus.\n")
annotated_reads(txome,gzip.open(args.tempdir+'/initial/partial_annotated_multiexon.sorted.gpd.gz'),'repartial',args)
txs = {}
for transcript in txome.transcripts:
txs[transcript.name] = transcript
detected = {}
tinf = gzip.open(args.tempdir+'/initial/annotated.txt.gz')
for line in tinf:
f = line.rstrip().split("\t")
detected[f[1]] = True
tinf.close()
tinf = gzip.open(args.tempdir+'/repartial/annotated.txt.gz')
remove_partial = {}
for line in tinf:
f = line.rstrip().split("\t")
detected[f[1]] = True
remove_partial[f[1]] = True
tinf.close()
tof = gzip.open(args.tempdir+'/candidate.gpd.gz','w')
for name in detected:
tof.write(txs[name].get_gpd_line()+"\n")
tinf = gzip.open(args.tempdir+'/initial/partial_annotated_multiexon.sorted.gpd.gz')
for line in tinf:
f = line.rstrip().split("\t")
if f[1] not in remove_partial:
tof.write(line)
tinf = gzip.open(args.tempdir+'/initial/unannotated_singleexon.sorted.gpd.gz')
for line in tinf:
f = line.rstrip().split("\t")
f[0] = str(uuid.uuid4())
tof.write("\t".join(f)+"\n")
tinf.close()
tof.close()
sort_gpd(args.tempdir+'/candidate.gpd.gz',args.tempdir+'/candidate.sorted.gpd.gz',args)
"""We can ignore the partial annotations that have been detected"""
ntxome = Transcriptome()
tinf = gzip.open(args.tempdir+'/candidate.sorted.gpd.gz')
for line in tinf: ntxome.add_transcript(GPD(line))
annotated_reads(ntxome,gzip.open(args.tempdir+'/initial/unannotated_multiexon.sorted.gpd.gz'),'unannotated',args)
"""now we know which unannotated reads actually have annotations"""
tinf.close()
tof = gzip.open(args.tempdir+'/final.gpd.gz','w')
tinf = gzip.open(args.tempdir+'/candidate.sorted.gpd.gz')
for line in tinf: tof.write(line)
tinf.close()
tinf = gzip.open(args.tempdir+'/unannotated/unannotated_multiexon.sorted.gpd.gz')
for line in tinf: tof.write(line)
tinf.close()
tinf = gzip.open(args.tempdir+'/unannotated/partial_annotated_multiexon.sorted.gpd.gz')
for line in tinf: tof.write(line)
tinf.close()
tof.close()
sort_gpd(args.tempdir+'/final.gpd.gz',args.tempdir+'/final.sorted.gpd.gz',args)
"""Prepare outputs"""
of = sys.stdout
if args.output:
if re.search('\.gz$',args.output):
of = gzip.open(args.output,'w')
else:
of = open(args.output,'w')
tinf = gzip.open(args.tempdir+'/final.sorted.gpd.gz')
for line in tinf: of.write(line)
tinf.close()
of.close()
def annotated_reads(txome,sorted_read_stream,output,args):
if not os.path.exists(args.tempdir+'/'+output):
os.makedirs(args.tempdir+'/'+output)
ts = OrderedStream(iter(txome.transcript_stream()))
rs = OrderedStream(GPDStream(sorted_read_stream))
mls = MultiLocusStream([ts,rs])
aof = gzip.open(args.tempdir+'/'+output+'/annotated.txt.gz','w')
of1 = gzip.open(args.tempdir+'/'+output+'/unannotated_multiexon.gpd.gz','w')
of2 = gzip.open(args.tempdir+'/'+output+'/partial_annotated_multiexon.gpd.gz','w')
of3 = gzip.open(args.tempdir+'/'+output+'/unannotated_singleexon.gpd.gz','w')
z = 0
for ml in mls:
refs, reads = ml.payload
if len(refs) == 0: continue
"""Check and see if we have single exons annotated"""
single_exon_refs = [x for x in refs if x.get_exon_count()==1]
single_exon_reads = [x for x in reads if x.get_exon_count()==1]
#print str(len(single_exon_refs))+" "+str(len(single_exon_reads))
unannotated_single_exon_reads = []
for seread in single_exon_reads:
ovs = [(x.exons[0].length,seread.exons[0].length,x.exons[0].overlap_size(seread.exons[0]),x) for x in single_exon_refs if x.exons[0].overlaps(seread.exons[0])]
#check for minimum overlap
ovs = [x for x in ovs if x[2] >= args.single_exon_minimum_overlap]
ovs = [x for x in ovs if float(x[2])/float(max(x[0],x[1])) >= args.single_exon_mutual_overlap]
ovs = sorted(ovs,key=lambda x: float(x[2])/float(max(x[0],x[1])),reverse=True)
if len(ovs) == 0:
unannotated_single_exon_reads.append(seread)
continue
best_ref = ovs[0][3]
aof.write(seread.name+"\t"+best_ref.name+"\tSE"+"\n")
"""May want an optional check for any better matches among exons"""
#now we can look for best matches among multi-exon transcripts
reads = [x for x in reads if x.get_exon_count() > 1]
multiexon_refs = [x for x in refs if x.get_exon_count() > 1]
unannotated_multi_exon_reads = []
partial_annotated_multi_exon_reads = []
for read in reads:
# we dont' need to have multiple exons matched. one is enough to call
ovs = [y for y in [(x,x.exon_overlap(read,
multi_minover=args.multi_exon_minimum_overlap,
multi_endfrac=args.multi_exon_end_frac,
multi_midfrac=args.multi_exon_mid_frac,
multi_consec=False)) for x in multiexon_refs] if y[1]]
for o in ovs: o[1].analyze_overs()
full = sorted([x for x in ovs if x[1].is_subset()==1],
key = lambda y: float(y[1].overlap_size())/float(max(y[1].tx_obj1.length,y[1].tx_obj2.length)),
reverse=True
)
if len(full) > 0:
aof.write(read.name+"\t"+full[0][0].name+"\tfull"+"\n")
continue
subset = sorted([x for x in ovs if x[1].is_subset()==2],
key = lambda y: (y[1].match_exon_count(),
y[1].min_overlap_fraction()),
reverse = True
)
if len(subset) > 0:
aof.write(read.name+"\t"+subset[0][0].name+"\tpartial"+"\n")
continue
#check for supersets
superset = sorted([x for x in ovs if x[1].is_subset()==3],
key = lambda y: (y[1].match_exon_count(),
y[1].min_overlap_fraction()),
reverse = True
)
if len(superset) > 0:
partial_annotated_multi_exon_reads.append((read,superset[0][0]))
#print read.name+"\t"+superset[0][0].name+"\tsuper"
continue
#check for noncompatible overlaps
overset = sorted([x for x in ovs if x[1].match_exon_count > 0],
key = lambda y: (y[1].consecutive_exon_count(),
y[1].min_overlap_fraction()),
reverse = True
)
#print [(x[1].consecutive_exon_count(), x[1].min_overlap_fraction()) for x in overset]
if len(overset) > 0:
partial_annotated_multi_exon_reads.append((read,overset[0][0]))
#print read.name+"\t"+overset[0][0].name+"\tover"
continue
unannotated_multi_exon_reads.append(read)
"""Now we have partially annotated multi and unannotated multi and unannotated single"""
if len(unannotated_multi_exon_reads) > 0:
sys.stderr.write(str(z)+" "+str(len(unannotated_multi_exon_reads))+" \r")
d = Deconvolution(downsample(unannotated_multi_exon_reads,args.downsample_locus))
groups = d.parse(tolerance=20,downsample=args.downsample)
for tx in groups:
z+=1
of1.write(tx.get_gpd_line()+"\n")
if len(partial_annotated_multi_exon_reads) > 0:
sys.stderr.write(str(z)+" "+str(len(partial_annotated_multi_exon_reads))+" \r")
### set the direction of the transcript
for v in partial_annotated_multi_exon_reads:
v[0].set_strand(v[1].direction)
### set the gene name of the transcript
for v in partial_annotated_multi_exon_reads:
v[0].set_gene_name(v[1].gene_name)
d = Deconvolution(downsample([x[0] for x in partial_annotated_multi_exon_reads],args.downsample_locus))
groups = d.parse(tolerance=20,downsample=args.downsample,use_gene_names=True)
for tx in groups:
z += 1
of2.write(tx.get_gpd_line()+"\n")
"""Do the unannotated single exon reads"""
g = Graph()
for r in unannotated_single_exon_reads:
if len([x for x in partial_annotated_multi_exon_reads if x[0].overlaps(r)]) > 0: continue
if len([x for x in unannotated_multi_exon_reads if x.overlaps(r)]) > 0: continue
if len([x for x in txome.transcripts if x.overlaps(r)]) > 0: continue
g.add_node(Node(r))
for i in range(0,len(g.nodes)):
for j in range(0,len(g.nodes)):
if i == j: continue
if g.nodes[i].payload.overlaps(g.nodes[j].payload):
g.add_edge(Edge(g.nodes[i],g.nodes[j]))
g.merge_cycles()
for r in g.roots:
se = []
se += r.payload_list
children = g.get_children(r)
for child in children: se += child.payload_list
rng = GenomicRange(se[0].exons[0].chr,
min([x.exons[0].start for x in se]),
max([x.exons[0].end for x in se]))
tx = Transcript([rng],Transcript.Options(direction='+'))
of3.write(tx.get_gpd_line()+"\n")
of1.close()
of2.close()
of3.close()
sys.stderr.write("\n")
"""Sort our progress"""
sys.stderr.write("sort transcriptome\n")
sort_gpd(args.tempdir+'/'+output+'/partial_annotated_multiexon.gpd.gz',args.tempdir+'/'+output+'/partial_annotated_multiexon.sorted.gpd.gz',args)
sort_gpd(args.tempdir+'/'+output+'/unannotated_multiexon.gpd.gz',args.tempdir+'/'+output+'/unannotated_multiexon.sorted.gpd.gz',args)
sort_gpd(args.tempdir+'/'+output+'/unannotated_singleexon.gpd.gz',args.tempdir+'/'+output+'/unannotated_singleexon.sorted.gpd.gz',args)
"""We still have the unannotated single exon reads to deal with"""
def sort_gpd(infile,outfile,args):
tinf = gzip.open(infile)
cmd1 = 'seq-tools sort --gpd - '
cmd2 = 'gzip'
if args.threads > 1: cmd1 += ' --threads '+str(args.threads)
tof = open(outfile,'w')
p2 = Popen(cmd2.split(),stdin=PIPE,stdout=tof)
p1 = Popen(cmd1.split(),stdin=PIPE,stdout=p2.stdin)
for line in tinf: p1.stdin.write(line)
p1.communicate()
p2.communicate()
tinf.close()
tof.close()
def downsample(txs,cnt):
if cnt >= len(txs): return txs
v = txs[:]
shuffle(v)
return v[0:cnt]
def setup_tempdir(args):
if args.specific_tempdir:
if not os.path.exists(args.specific_tempdir):
os.makedirs(args.specific_tempdir.rstrip('/'))
args.tempdir = args.specific_tempdir.rstrip('/')
if not os.path.exists(args.specific_tempdir.rstrip('/')):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
else:
args.tempdir = mkdtemp(prefix="weirathe.",dir=args.tempdir.rstrip('/'))
if not os.path.exists(args.tempdir.rstrip('/')):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
if not os.path.exists(args.tempdir):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
return
def do_inputs():
parser = argparse.ArgumentParser(description="build a transcriptome from long reads",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input',help="Use - for STDIN ordered GPD")
parser.add_argument('-o','--output',help="output file otherwise STDOUT")
parser.add_argument('--threads',type=int,default=cpu_count(),help="Number of threads to convert names")
parser.add_argument('-r','--reference',help="reference gpd")
parser.add_argument('--downsample',type=int,default=50,help='at most this many reads')
parser.add_argument('--downsample_locus',type=int,default=50,help="limit the number of locus transcripts to this amount")
group1 = parser.add_argument_group('Single Exon Parameters')
group1.add_argument('--single_exon_minimum_overlap',type=int,default=20,help="minimum bases a read must overlap with a reference single exon transcript")
group1.add_argument('--single_exon_mutual_overlap',type=int,default=0.2,help="minimum bases a read must overlap with a reference single exon transcript")
group2 = parser.add_argument_group('Multi Exon Parameters')
group2.add_argument('--multi_exon_minimum_overlap',type=int,default=10,help="minimum amount exons must overlap")
group2.add_argument('--multi_exon_end_frac',type=float,default=0,help="minimum mutual overlap of end exons")
group2.add_argument('--multi_exon_mid_frac',type=float,default=0.8,help="minimum mutual overlap of middle exons")
group = parser.add_mutually_exclusive_group()
group.add_argument('--tempdir',default=gettempdir(),help="The temporary directory is made and destroyed here.")
group.add_argument('--specific_tempdir',help="This temporary directory will be used, but will remain after executing.")
args = parser.parse_args()
setup_tempdir(args)
return args
def external_cmd(cmd):
cache_argv = sys.argv
sys.argv = cmd
args = do_inputs()
main(args)
sys.argv = cache_argv
if __name__=="__main__":
args = do_inputs()
main(args)
|
|
# coding: utf-8
"""
Salt Edge Account Information API
API Reference for services # noqa: E501
OpenAPI spec version: 5.0.0
Contact: support@saltedge.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ConsentRequestBody(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'scopes': 'list[str]',
'from_date': 'date',
'to_date': 'date',
'period_days': 'int'
}
attribute_map = {
'scopes': 'scopes',
'from_date': 'from_date',
'to_date': 'to_date',
'period_days': 'period_days'
}
def __init__(self, scopes=None, from_date=None, to_date=None, period_days=None): # noqa: E501
"""ConsentRequestBody - a model defined in Swagger""" # noqa: E501
self._scopes = None
self._from_date = None
self._to_date = None
self._period_days = None
self.discriminator = None
self.scopes = scopes
if from_date is not None:
self.from_date = from_date
if to_date is not None:
self.to_date = to_date
if period_days is not None:
self.period_days = period_days
@property
def scopes(self):
"""Gets the scopes of this ConsentRequestBody. # noqa: E501
data to be allowed for fetching. The allowed values for this parameter must fall within the client's `allowed_fetch_scopes` and/or provider's `supported_fetch_scopes` restrictions. To change the client's allowed scopes, please <a href='https://www.saltedge.com/pages/contact' target=\"_blank\">contact our Sales team</a>. # noqa: E501
:return: The scopes of this ConsentRequestBody. # noqa: E501
:rtype: list[str]
"""
return self._scopes
@scopes.setter
def scopes(self, scopes):
"""Sets the scopes of this ConsentRequestBody.
data to be allowed for fetching. The allowed values for this parameter must fall within the client's `allowed_fetch_scopes` and/or provider's `supported_fetch_scopes` restrictions. To change the client's allowed scopes, please <a href='https://www.saltedge.com/pages/contact' target=\"_blank\">contact our Sales team</a>. # noqa: E501
:param scopes: The scopes of this ConsentRequestBody. # noqa: E501
:type: list[str]
"""
if scopes is None:
raise ValueError("Invalid value for `scopes`, must not be `None`") # noqa: E501
allowed_values = ["account_details", "holder_information", "transactions_details"] # noqa: E501
if not set(scopes).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `scopes` [{0}], must be a subset of [{1}]" # noqa: E501
.format(", ".join(map(str, set(scopes) - set(allowed_values))), # noqa: E501
", ".join(map(str, allowed_values)))
)
self._scopes = scopes
@property
def from_date(self):
"""Gets the from_date of this ConsentRequestBody. # noqa: E501
date to be allowed for fetching the data from. Defaults to `90 days ago` (only for the providers with `regulated:true`). This parameter is used when `scopes` parameter contains `transactions_details`. The allowed values for this parameter must be within exactly 365 days ago. # noqa: E501
:return: The from_date of this ConsentRequestBody. # noqa: E501
:rtype: date
"""
return self._from_date
@from_date.setter
def from_date(self, from_date):
"""Sets the from_date of this ConsentRequestBody.
date to be allowed for fetching the data from. Defaults to `90 days ago` (only for the providers with `regulated:true`). This parameter is used when `scopes` parameter contains `transactions_details`. The allowed values for this parameter must be within exactly 365 days ago. # noqa: E501
:param from_date: The from_date of this ConsentRequestBody. # noqa: E501
:type: date
"""
self._from_date = from_date
@property
def to_date(self):
"""Gets the to_date of this ConsentRequestBody. # noqa: E501
date to be allowed for fetching the data until. The allowed values for this parameter must be equal or more than `from_date`. # noqa: E501
:return: The to_date of this ConsentRequestBody. # noqa: E501
:rtype: date
"""
return self._to_date
@to_date.setter
def to_date(self, to_date):
"""Sets the to_date of this ConsentRequestBody.
date to be allowed for fetching the data until. The allowed values for this parameter must be equal or more than `from_date`. # noqa: E501
:param to_date: The to_date of this ConsentRequestBody. # noqa: E501
:type: date
"""
self._to_date = to_date
@property
def period_days(self):
"""Gets the period_days of this ConsentRequestBody. # noqa: E501
determines the period the consent will be valid for. Defaults to `null` (limitless) or provider's `max_consent_days`. The allowed value for this parameter must not be higher than the provider's `max_consent_days`. # noqa: E501
:return: The period_days of this ConsentRequestBody. # noqa: E501
:rtype: int
"""
return self._period_days
@period_days.setter
def period_days(self, period_days):
"""Sets the period_days of this ConsentRequestBody.
determines the period the consent will be valid for. Defaults to `null` (limitless) or provider's `max_consent_days`. The allowed value for this parameter must not be higher than the provider's `max_consent_days`. # noqa: E501
:param period_days: The period_days of this ConsentRequestBody. # noqa: E501
:type: int
"""
self._period_days = period_days
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ConsentRequestBody, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ConsentRequestBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import time
import mock
from os_xenapi.client import host_agent
from os_xenapi.client import XenAPI
from oslo_concurrency import processutils
from oslo_utils import uuidutils
from nova import exception
from nova import test
from nova.virt.xenapi import agent
def _get_fake_instance(**kwargs):
system_metadata = []
for k, v in kwargs.items():
system_metadata.append({
"key": k,
"value": v
})
return {
"system_metadata": system_metadata,
"uuid": "uuid",
"key_data": "ssh-rsa asdf",
"os_type": "asdf",
}
class AgentTestCaseBase(test.NoDBTestCase):
def _create_agent(self, instance, session="session"):
self.session = session
self.virtapi = "virtapi"
self.vm_ref = "vm_ref"
return agent.XenAPIBasedAgent(self.session, self.virtapi,
instance, self.vm_ref)
class AgentImageFlagsTestCase(AgentTestCaseBase):
def test_agent_is_present(self):
self.flags(use_agent_default=False, group='xenserver')
instance = {"system_metadata":
[{"key": "image_xenapi_use_agent", "value": "true"}]}
self.assertTrue(agent.should_use_agent(instance))
def test_agent_is_disabled(self):
self.flags(use_agent_default=True, group='xenserver')
instance = {"system_metadata":
[{"key": "image_xenapi_use_agent", "value": "false"}]}
self.assertFalse(agent.should_use_agent(instance))
def test_agent_uses_deafault_when_prop_invalid(self):
self.flags(use_agent_default=True, group='xenserver')
instance = {"system_metadata":
[{"key": "image_xenapi_use_agent", "value": "bob"}],
"uuid": "uuid"}
self.assertTrue(agent.should_use_agent(instance))
def test_agent_default_not_present(self):
self.flags(use_agent_default=False, group='xenserver')
instance = {"system_metadata": []}
self.assertFalse(agent.should_use_agent(instance))
def test_agent_default_present(self):
self.flags(use_agent_default=True, group='xenserver')
instance = {"system_metadata": []}
self.assertTrue(agent.should_use_agent(instance))
class SysMetaKeyTestBase(object):
key = None
def _create_agent_with_value(self, value):
kwargs = {self.key: value}
instance = _get_fake_instance(**kwargs)
return self._create_agent(instance)
def test_get_sys_meta_key_true(self):
agent = self._create_agent_with_value("true")
self.assertTrue(agent._get_sys_meta_key(self.key))
def test_get_sys_meta_key_false(self):
agent = self._create_agent_with_value("False")
self.assertFalse(agent._get_sys_meta_key(self.key))
def test_get_sys_meta_key_invalid_is_false(self):
agent = self._create_agent_with_value("invalid")
self.assertFalse(agent._get_sys_meta_key(self.key))
def test_get_sys_meta_key_missing_is_false(self):
instance = _get_fake_instance()
agent = self._create_agent(instance)
self.assertFalse(agent._get_sys_meta_key(self.key))
class SkipSshFlagTestCase(SysMetaKeyTestBase, AgentTestCaseBase):
key = "image_xenapi_skip_agent_inject_ssh"
def test_skip_ssh_key_inject(self):
agent = self._create_agent_with_value("True")
self.assertTrue(agent._skip_ssh_key_inject())
class SkipFileInjectAtBootFlagTestCase(SysMetaKeyTestBase, AgentTestCaseBase):
key = "image_xenapi_skip_agent_inject_files_at_boot"
def test_skip_inject_files_at_boot(self):
agent = self._create_agent_with_value("True")
self.assertTrue(agent._skip_inject_files_at_boot())
class InjectSshTestCase(AgentTestCaseBase):
@mock.patch.object(agent.XenAPIBasedAgent, 'inject_file')
def test_inject_ssh_key_succeeds(self, mock_inject_file):
instance = _get_fake_instance()
agent = self._create_agent(instance)
agent.inject_ssh_key()
mock_inject_file.assert_called_once_with("/root/.ssh/authorized_keys",
"\n# The following ssh key "
"was injected by Nova"
"\nssh-rsa asdf\n")
@mock.patch.object(agent.XenAPIBasedAgent, 'inject_file')
def _test_inject_ssh_key_skipped(self, instance, mock_inject_file):
agent = self._create_agent(instance)
# make sure its not called
agent.inject_ssh_key()
mock_inject_file.assert_not_called()
def test_inject_ssh_key_skipped_no_key_data(self):
instance = _get_fake_instance()
instance["key_data"] = None
self._test_inject_ssh_key_skipped(instance)
def test_inject_ssh_key_skipped_windows(self):
instance = _get_fake_instance()
instance["os_type"] = "windows"
self._test_inject_ssh_key_skipped(instance)
def test_inject_ssh_key_skipped_cloud_init_present(self):
instance = _get_fake_instance(
image_xenapi_skip_agent_inject_ssh="True")
self._test_inject_ssh_key_skipped(instance)
class FileInjectionTestCase(AgentTestCaseBase):
@mock.patch.object(agent.XenAPIBasedAgent, '_call_agent')
def test_inject_file(self, mock_call_agent):
instance = _get_fake_instance()
agent = self._create_agent(instance)
b64_path = base64.b64encode(b'path')
b64_contents = base64.b64encode(b'contents')
agent.inject_file("path", "contents")
mock_call_agent.assert_called_once_with(host_agent.inject_file,
{'b64_contents': b64_contents,
'b64_path': b64_path})
@mock.patch.object(agent.XenAPIBasedAgent, 'inject_file')
def test_inject_files(self, mock_inject_file):
instance = _get_fake_instance()
agent = self._create_agent(instance)
files = [("path1", "content1"), ("path2", "content2")]
agent.inject_files(files)
mock_inject_file.assert_has_calls(
[mock.call("path1", "content1"), mock.call("path2", "content2")])
@mock.patch.object(agent.XenAPIBasedAgent, 'inject_file')
def test_inject_files_skipped_when_cloud_init_installed(self,
mock_inject_file):
instance = _get_fake_instance(
image_xenapi_skip_agent_inject_files_at_boot="True")
agent = self._create_agent(instance)
files = [("path1", "content1"), ("path2", "content2")]
agent.inject_files(files)
mock_inject_file.assert_not_called()
class RebootRetryTestCase(AgentTestCaseBase):
@mock.patch.object(agent, '_wait_for_new_dom_id')
def test_retry_on_reboot(self, mock_wait):
mock_session = mock.Mock()
mock_session.VM.get_domid.return_value = "fake_dom_id"
agent = self._create_agent(None, mock_session)
mock_method = mock.Mock().method()
mock_method.side_effect = [XenAPI.Failure(["REBOOT: fake"]),
{"returncode": '0', "message": "done"}]
result = agent._call_agent(mock_method)
self.assertEqual("done", result)
self.assertTrue(mock_session.VM.get_domid.called)
self.assertEqual(2, mock_method.call_count)
mock_wait.assert_called_once_with(mock_session, self.vm_ref,
"fake_dom_id", mock_method)
@mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time')
def test_wait_for_new_dom_id_found(self, mock_time, mock_sleep):
mock_session = mock.Mock()
mock_session.VM.get_domid.return_value = "new"
agent._wait_for_new_dom_id(mock_session, "vm_ref", "old", "method")
mock_session.VM.get_domid.assert_called_once_with("vm_ref")
self.assertFalse(mock_sleep.called)
@mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time')
def test_wait_for_new_dom_id_after_retry(self, mock_time, mock_sleep):
self.flags(agent_timeout=3, group="xenserver")
mock_time.return_value = 0
mock_session = mock.Mock()
old = "40"
new = "42"
mock_session.VM.get_domid.side_effect = [old, "-1", new]
agent._wait_for_new_dom_id(mock_session, "vm_ref", old, "method")
mock_session.VM.get_domid.assert_called_with("vm_ref")
self.assertEqual(3, mock_session.VM.get_domid.call_count)
self.assertEqual(2, mock_sleep.call_count)
@mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time')
def test_wait_for_new_dom_id_timeout(self, mock_time, mock_sleep):
self.flags(agent_timeout=3, group="xenserver")
def fake_time():
fake_time.time = fake_time.time + 1
return fake_time.time
fake_time.time = 0
mock_time.side_effect = fake_time
mock_session = mock.Mock()
mock_session.VM.get_domid.return_value = "old"
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self.assertRaises(exception.AgentTimeout,
agent._wait_for_new_dom_id,
mock_session, "vm_ref", "old", mock_method)
self.assertEqual(4, mock_session.VM.get_domid.call_count)
class SetAdminPasswordTestCase(AgentTestCaseBase):
@mock.patch.object(agent.XenAPIBasedAgent, '_call_agent')
@mock.patch("nova.virt.xenapi.agent.SimpleDH")
def test_exchange_key_with_agent(self, mock_simple_dh, mock_call_agent):
agent = self._create_agent(None)
instance_mock = mock_simple_dh()
instance_mock.get_public.return_value = 4321
mock_call_agent.return_value = "1234"
result = agent._exchange_key_with_agent()
mock_call_agent.assert_called_once_with(host_agent.key_init,
{"pub": "4321"},
success_codes=['D0'],
ignore_errors=False)
result.compute_shared.assert_called_once_with(1234)
@mock.patch.object(agent.XenAPIBasedAgent, '_call_agent')
@mock.patch.object(agent.XenAPIBasedAgent,
'_save_instance_password_if_sshkey_present')
@mock.patch.object(agent.XenAPIBasedAgent, '_exchange_key_with_agent')
def test_set_admin_password_works(self, mock_exchange, mock_save,
mock_call_agent):
mock_dh = mock.Mock(spec_set=agent.SimpleDH)
mock_dh.encrypt.return_value = "enc_pass"
mock_exchange.return_value = mock_dh
agent_inst = self._create_agent(None)
agent_inst.set_admin_password("new_pass")
mock_dh.encrypt.assert_called_once_with("new_pass\n")
mock_call_agent.assert_called_once_with(host_agent.password,
{'enc_pass': 'enc_pass'})
mock_save.assert_called_once_with("new_pass")
@mock.patch.object(agent.XenAPIBasedAgent, '_add_instance_fault')
@mock.patch.object(agent.XenAPIBasedAgent, '_exchange_key_with_agent')
def test_set_admin_password_silently_fails(self, mock_exchange,
mock_add_fault):
error = exception.AgentTimeout(method="fake")
mock_exchange.side_effect = error
agent_inst = self._create_agent(None)
agent_inst.set_admin_password("new_pass")
mock_add_fault.assert_called_once_with(error, mock.ANY)
@mock.patch('oslo_concurrency.processutils.execute')
def test_run_ssl_successful(self, mock_execute):
mock_execute.return_value = ('0',
'*** WARNING : deprecated key derivation used.'
'Using -iter or -pbkdf2 would be better.')
agent.SimpleDH()._run_ssl('foo')
@mock.patch('oslo_concurrency.processutils.execute',
side_effect=processutils.ProcessExecutionError(
exit_code=1, stderr=('ERROR: Something bad happened')))
def test_run_ssl_failure(self, mock_execute):
self.assertRaises(RuntimeError, agent.SimpleDH()._run_ssl, 'foo')
class UpgradeRequiredTestCase(test.NoDBTestCase):
def test_less_than(self):
self.assertTrue(agent.is_upgrade_required('1.2.3.4', '1.2.3.5'))
def test_greater_than(self):
self.assertFalse(agent.is_upgrade_required('1.2.3.5', '1.2.3.4'))
def test_equal(self):
self.assertFalse(agent.is_upgrade_required('1.2.3.4', '1.2.3.4'))
def test_non_lexical(self):
self.assertFalse(agent.is_upgrade_required('1.2.3.10', '1.2.3.4'))
def test_length(self):
self.assertTrue(agent.is_upgrade_required('1.2.3', '1.2.3.4'))
@mock.patch.object(uuidutils, 'generate_uuid')
class CallAgentTestCase(AgentTestCaseBase):
def test_call_agent_success(self, mock_uuid):
session = mock.Mock()
instance = {"uuid": "fake"}
addl_args = {"foo": "bar"}
session.VM.get_domid.return_value = '42'
mock_uuid.return_value = 1
mock_method = mock.Mock().method()
mock_method.return_value = {'returncode': '4', 'message': "asdf\\r\\n"}
mock_method.__name__ = "mock_method"
self.assertEqual("asdf",
agent._call_agent(session, instance, "vm_ref",
mock_method, addl_args, timeout=300,
success_codes=['0', '4']))
expected_args = {}
expected_args.update(addl_args)
mock_method.assert_called_once_with(session, 1, '42', 300,
**expected_args)
session.VM.get_domid.assert_called_once_with("vm_ref")
def _call_agent_setup(self, session, mock_uuid, mock_method,
returncode='0', success_codes=None,
exception=None):
session.XenAPI.Failure = XenAPI.Failure
instance = {"uuid": "fake"}
addl_args = {"foo": "bar"}
session.VM.get_domid.return_value = "42"
mock_uuid.return_value = 1
if exception:
mock_method.side_effect = exception
else:
mock_method.return_value = {'returncode': returncode,
'message': "asdf\\r\\n"}
return agent._call_agent(session, instance, "vm_ref", mock_method,
addl_args, success_codes=success_codes)
def _assert_agent_called(self, session, mock_uuid, mock_method):
expected_args = {"foo": "bar"}
mock_uuid.assert_called_once_with()
mock_method.assert_called_once_with(session, 1, '42', 30,
**expected_args)
session.VM.get_domid.assert_called_once_with("vm_ref")
def test_call_agent_works_with_defaults(self, mock_uuid):
session = mock.Mock()
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self._call_agent_setup(session, mock_uuid, mock_method)
self._assert_agent_called(session, mock_uuid, mock_method)
def test_call_agent_fails_with_timeout(self, mock_uuid):
session = mock.Mock()
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self.assertRaises(exception.AgentTimeout, self._call_agent_setup,
session, mock_uuid, mock_method,
exception=XenAPI.Failure(["TIMEOUT:fake"]))
self._assert_agent_called(session, mock_uuid, mock_method)
def test_call_agent_fails_with_not_implemented(self, mock_uuid):
session = mock.Mock()
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self.assertRaises(exception.AgentNotImplemented,
self._call_agent_setup,
session, mock_uuid, mock_method,
exception=XenAPI.Failure(["NOT IMPLEMENTED:"]))
self._assert_agent_called(session, mock_uuid, mock_method)
def test_call_agent_fails_with_other_error(self, mock_uuid):
session = mock.Mock()
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self.assertRaises(exception.AgentError, self._call_agent_setup,
session, mock_uuid, mock_method,
exception=XenAPI.Failure(["asdf"]))
self._assert_agent_called(session, mock_uuid, mock_method)
def test_call_agent_fails_with_returned_error(self, mock_uuid):
session = mock.Mock()
mock_method = mock.Mock().method()
mock_method.__name__ = "mock_method"
self.assertRaises(exception.AgentError, self._call_agent_setup,
session, mock_uuid, mock_method, returncode='42')
self._assert_agent_called(session, mock_uuid, mock_method)
class XenAPIBasedAgent(AgentTestCaseBase):
@mock.patch.object(agent.XenAPIBasedAgent, "_add_instance_fault")
@mock.patch.object(agent, "_call_agent")
def test_call_agent_swallows_error(self, mock_call_agent,
mock_add_instance_fault):
fake_error = exception.AgentError(method="bob")
mock_call_agent.side_effect = fake_error
instance = _get_fake_instance()
agent = self._create_agent(instance)
agent._call_agent("bob")
mock_call_agent.assert_called_once_with(agent.session, agent.instance,
agent.vm_ref, "bob", None, None, None)
mock_add_instance_fault.assert_called_once_with(fake_error, mock.ANY)
@mock.patch.object(agent.XenAPIBasedAgent, "_add_instance_fault")
@mock.patch.object(agent, "_call_agent")
def test_call_agent_throws_error(self, mock_call_agent,
mock_add_instance_fault):
fake_error = exception.AgentError(method="bob")
mock_call_agent.side_effect = fake_error
instance = _get_fake_instance()
agent = self._create_agent(instance)
self.assertRaises(exception.AgentError, agent._call_agent,
"bob", ignore_errors=False)
mock_call_agent.assert_called_once_with(agent.session, agent.instance,
agent.vm_ref, "bob", None, None, None)
self.assertFalse(mock_add_instance_fault.called)
|
|
from zeit.cms.checkout.helper import checked_out
from zeit.cms.interfaces import ICMSContent
import transaction
import unittest
import zeit.cms.tagging.testing
import zeit.content.article.edit.browser.testing
import zeit.content.author.author
import zope.security.management
class HeadTest(zeit.content.article.edit.browser.testing.EditorTestCase):
def setUp(self):
super(HeadTest, self).setUp()
self.open('/repository/online/2007/01/Somalia/@@checkout')
s = self.selenium
s.waitForElementPresent('id=options-b.year')
s.click('css=#edit-form-misc .fold-link')
def test_form_should_highlight_changed_data(self):
s = self.selenium
s.assertValue('id=options-b.year', '2007')
s.assertElementNotPresent('css=.widget-outer.dirty')
s.type('id=options-b.year', '2010')
s.click('id=options-b.volume')
s.waitForElementPresent('css=.field.dirty')
def test_form_should_save_entered_text_on_blur(self):
s = self.selenium
s.assertValue('id=options-b.year', '2007')
s._find('id=options-b.year').clear()
s.type('id=options-b.year', '2010')
s.type('id=options-b.volume', '\t') # Trigger blur for form.
s.waitForElementNotPresent('css=.field.dirty')
# Re-open the page and verify that the data is still there
s.clickAndWait('link=Edit contents')
s.waitForElementPresent('id=options-b.year')
s.assertValue('id=options-b.year', '2010')
def test_form_should_save_selection_on_blur(self):
s = self.selenium
s.click('css=#edit-form-metadata .fold-link')
s.select('id=metadata-b.product', 'Zeit Magazin')
s.type('id=metadata-b.copyrights', '\t') # Trigger blur for form.
s.waitForElementNotPresent('css=.field.dirty')
s.assertSelectedLabel('id=metadata-b.product', 'Zeit Magazin')
def test_change_in_ressort_should_update_subressort_list(self):
s = self.selenium
s.click('css=#edit-form-metadata .fold-link')
s.assertSelectedLabel('id=metadata-a.ressort', 'International')
s.pause(100)
self.assertEqual(
[u'(nothing selected)', u'Meinung', u'Nahost', u'US-Wahl'],
s.getSelectOptions('id=metadata-a.sub_ressort'))
s.select('id=metadata-a.ressort', 'Deutschland')
s.pause(100)
self.assertEqual(
[u'(nothing selected)', u'Datenschutz', u'Integration',
u'Joschka Fisher', u'Meinung'],
s.getSelectOptions('id=metadata-a.sub_ressort'))
s.type('id=metadata-a.sub_ressort', '\t') # Trigger blur for form.
s.pause(500)
self.assertEqual(
[u'(nothing selected)', u'Datenschutz', u'Integration',
u'Joschka Fisher', u'Meinung'],
s.getSelectOptions('id=metadata-a.sub_ressort'))
def test_invalid_input_should_display_error_message(self):
s = self.selenium
s.assertValue('id=options-b.year', '2007')
s.type('id=options-b.year', 'ASDF')
s.type('id=options-b.volume', '\t') # Trigger blur for form.
s.waitForElementPresent('css=.inline-form div.error')
def test_relateds_should_be_addable(self):
self.add_testcontent_to_clipboard()
s = self.selenium
s.waitForElementPresent('id=internallinks.related')
s.click('css=#edit-form-internallinks .fold-link')
s.dragAndDropToObject(
'//li[@uniqueid="Clip/testcontent"]',
'xpath=//*[@id="internallinks.related"]//ul')
s.waitForElementPresent(
'xpath=//*[@id="internallinks.related"]//li[1]')
def test_metadata_should_be_foldable_and_unfoldable(self):
s = self.selenium
s.waitForElementPresent('css=#edit-form-metadata.folded')
s.click('css=#edit-form-metadata .edit-bar .fold-link')
s.waitForElementNotPresent('css=#edit-form-metadata.folded')
s.click('css=#edit-form-metadata .edit-bar .fold-link')
s.waitForElementPresent('css=#edit-form-metadata.folded')
def test_unfold_should_be_stored(self):
s = self.selenium
s.waitForElementPresent('css=#edit-form-metadata.folded')
s.click('css=#edit-form-metadata .edit-bar .fold-link')
s.waitForElementNotPresent('css=#edit-form-metadata.folded')
s.open(s.getLocation())
s.waitForElementPresent('css=#edit-form-metadata')
s.assertElementNotPresent('css=#edit-form-metadata.folded')
class KeywordTest(zeit.content.article.edit.browser.testing.EditorTestCase,
zeit.cms.tagging.testing.TaggingHelper):
@unittest.skip("no d'n'd 'til webdriver")
def test_sorting_should_trigger_write(self):
s = self.selenium
self.setup_tags('t1', 't2', 't3')
self.open('/repository/online/2007/01/Somalia/@@checkout')
s.waitForElementPresent('id=metadata-a.keywords')
s.waitForTextPresent('t1*t2*t3')
s.dragAndDropToObject(
"xpath=//li[contains(., 't1')]",
"xpath=//li[contains(., 't3')]")
s.pause(200)
self.assertEqual(
['t2', 't1', 't3'],
list(self.tagger().updateOrder.call_args[0][0]))
class MetadataTest(zeit.content.article.edit.browser.testing.EditorTestCase):
def setUp(self):
super(MetadataTest, self).setUp()
self.open('/repository/online/2007/01/Somalia/@@checkout')
self.selenium.waitForElementPresent('id=options-b.year')
self.selenium.click('css=#edit-form-metadata .fold-link')
def test_comments_allowed_toggled_when_comments_section_is_toggled(self):
s = self.selenium
s.waitForElementNotPresent('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentSectionEnable')
s.waitForElementPresent('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentSectionEnable')
s.waitForElementNotPresent('metadata-comments.commentsAllowed')
def test_disallow_comments_if_comments_section_is_disabled(self):
s = self.selenium
s.waitForElementNotPresent('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentSectionEnable')
s.waitForElementPresent('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentsAllowed')
s.waitForChecked('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentSectionEnable')
s.waitForElementNotPresent('metadata-comments.commentsAllowed')
s.click('metadata-comments.commentSectionEnable')
s.waitForElementPresent('metadata-comments.commentsAllowed')
s.waitForNotChecked('metadata-comments.commentsAllowed')
def test_comments_premoderate_option_is_available(self):
s = self.selenium
s.click('metadata-comments.commentSectionEnable')
s.waitForElementPresent('metadata-comments.commentsPremoderate')
@unittest.skip(
'Drag&Drop does not work, the icon is never dropped on the clipboard')
class HeaderTest(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_icon_in_header_is_draggable_to_clipboard(self):
principal = (zope.security.management.getInteraction()
.participations[0].principal)
clipboard = zeit.cms.clipboard.interfaces.IClipboard(principal)
clipboard.addClip('Clip')
transaction.commit()
self.open('/repository/online/2007/01/Somalia')
s = self.selenium
clipboard = '//li[@uniqueid="Clip"]'
s.click(clipboard)
s.waitForElementPresent('//li[@uniqueid="Clip"][@action="collapse"]')
icon = 'css=#editor-forms-heading .content-icon'
s.waitForElementPresent(icon)
s.dragAndDropToObject(icon, clipboard)
s.waitForElementPresent(
clipboard + '//span[contains(@class, "uniqueId") and '
'contains(text(), "Somalia")]')
class AuthorLocationTest(
zeit.content.article.edit.browser.testing.EditorTestCase):
def setUp(self):
super(AuthorLocationTest, self).setUp()
shakespeare = zeit.content.author.author.Author()
shakespeare.firstname = 'William'
shakespeare.lastname = 'Shakespeare'
self.repository['shakespeare'] = shakespeare
with checked_out(ICMSContent(
'http://xml.zeit.de/online/2007/01/Somalia')) as co:
co.authorships = [co.authorships.create(
self.repository['shakespeare'])]
def test_entering_location_via_autocomplete(self):
self.open('/repository/online/2007/01/Somalia/@@checkout')
s = self.selenium
fold = 'css=#edit-form-metadata .fold-link'
s.waitForElementPresent(fold)
s.click(fold)
location_input = 'css=.object-details.type-author .autocomplete-widget'
s.waitForElementPresent(location_input)
self.add_by_autocomplete('Paris', location_input)
s.pause(500) # Workflow area is reloaded on each InlineForm submit.
s.clickAndWait('id=checkin')
location_display = 'css=.object-details.type-author .widget'
s.waitForElementPresent(location_display)
s.waitForText(location_display, 'Paris')
class FilenameTest(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_filename_input_is_wired_up(self):
self.add_article()
s = self.selenium
fold = 'css=#edit-form-filename .fold-link'
s.waitForElementPresent(fold)
s.click(fold)
input_filename = 'new-filename.rename_to'
s.waitForElementPresent(input_filename)
s.type(input_filename, 'foo bar\t')
s.waitForValue(input_filename, 'foo-bar')
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Model classes that extend the instances functionality for MySQL instances.
"""
from trove.common import cfg
from trove.common import exception
from trove.common import utils
from trove.common.remote import create_guest_client
from trove.db import get_db_api
from trove.guestagent.db import models as guest_models
from trove.instance import models as base_models
from trove.openstack.common import log as logging
from trove.openstack.common.gettextutils import _
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def persisted_models():
return {'root_enabled_history': RootHistory}
def load_and_verify(context, instance_id):
# Load InstanceServiceStatus to verify if its running
instance = base_models.Instance.load(context, instance_id)
if not instance.is_sql_running:
raise exception.UnprocessableEntity(
"Instance %s is not ready." % instance.id)
else:
return instance
class User(object):
_data_fields = ['name', 'host', 'password', 'databases']
def __init__(self, name, host, password, databases):
self.name = name
self.host = host
self.password = password
self.databases = databases
@classmethod
def load(cls, context, instance_id, username, hostname):
load_and_verify(context, instance_id)
validate = guest_models.MySQLUser()
validate.name = username
validate.host = hostname
client = create_guest_client(context, instance_id)
found_user = client.get_user(username=username, hostname=hostname)
if not found_user:
return None
database_names = [{'name': db['_name']}
for db in found_user['_databases']]
return cls(found_user['_name'],
found_user['_host'],
found_user['_password'],
database_names)
@classmethod
def create(cls, context, instance_id, users):
# Load InstanceServiceStatus to verify if it's running
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
for user in users:
user_name = user['_name']
host_name = user['_host']
if host_name is None:
host_name = '%'
userhost = "%s@%s" % (user_name, host_name)
existing_users, _nadda = Users.load_with_client(
client,
limit=1,
marker=userhost,
include_marker=True)
if (len(existing_users) > 0 and
str(existing_users[0].name) == str(user_name) and
str(existing_users[0].host) == str(host_name)):
raise exception.UserAlreadyExists(name=user_name,
host=host_name)
return client.create_user(users)
@classmethod
def delete(cls, context, instance_id, user):
load_and_verify(context, instance_id)
create_guest_client(context, instance_id).delete_user(user)
@classmethod
def access(cls, context, instance_id, username, hostname):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
databases = client.list_access(username, hostname)
dbs = []
for db in databases:
dbs.append(Schema(name=db['_name'],
collate=db['_collate'],
character_set=db['_character_set']))
return UserAccess(dbs)
@classmethod
def grant(cls, context, instance_id, username, hostname, databases):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
client.grant_access(username, hostname, databases)
@classmethod
def revoke(cls, context, instance_id, username, hostname, database):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
client.revoke_access(username, hostname, database)
@classmethod
def change_password(cls, context, instance_id, users):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
change_users = []
for user in users:
change_user = {'name': user.name,
'host': user.host,
'password': user.password,
}
change_users.append(change_user)
client.change_passwords(change_users)
@classmethod
def update_attributes(cls, context, instance_id, username, hostname,
user_attrs):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
user_name = user_attrs.get('name')
host_name = user_attrs.get('host')
user = user_name or username
host = host_name or hostname
userhost = "%s@%s" % (user, host)
if user_name or host_name:
existing_users, _nadda = Users.load_with_client(
client,
limit=1,
marker=userhost,
include_marker=True)
if (len(existing_users) > 0 and
existing_users[0].name == user and
existing_users[0].host == host):
raise exception.UserAlreadyExists(name=user,
host=host)
client.update_attributes(username, hostname, user_attrs)
class UserAccess(object):
_data_fields = ['databases']
def __init__(self, databases):
self.databases = databases
class Root(object):
@classmethod
def load(cls, context, instance_id):
load_and_verify(context, instance_id)
# TODO(pdmars): remove the is_root_enabled call from the guest agent,
# just check the database for this information.
# If the root history returns null or raises an exception, the root
# user hasn't been enabled.
try:
root_history = RootHistory.load(context, instance_id)
except exception.NotFound:
return False
if not root_history:
return False
return True
@classmethod
def create(cls, context, instance_id, user):
load_and_verify(context, instance_id)
root = create_guest_client(context, instance_id).enable_root()
root_user = guest_models.RootUser()
root_user.deserialize(root)
RootHistory.create(context, instance_id, user)
return root_user
class RootHistory(object):
_auto_generated_attrs = ['id']
_data_fields = ['instance_id', 'user', 'created']
_table_name = 'root_enabled_history'
def __init__(self, instance_id, user):
self.id = instance_id
self.user = user
self.created = utils.utcnow()
def save(self):
LOG.debug(_("Saving %(name)s: %(dict)s") %
{'name': self.__class__.__name__, 'dict': self.__dict__})
return get_db_api().save(self)
@classmethod
def load(cls, context, instance_id):
history = get_db_api().find_by(cls, id=instance_id)
return history
@classmethod
def create(cls, context, instance_id, user):
history = cls.load(context, instance_id)
if history is not None:
return history
history = RootHistory(instance_id, user)
return history.save()
def load_via_context(cls, context, instance_id):
"""Creates guest and fetches pagination arguments from the context."""
load_and_verify(context, instance_id)
limit = int(context.limit or cls.DEFAULT_LIMIT)
limit = cls.DEFAULT_LIMIT if limit > cls.DEFAULT_LIMIT else limit
client = create_guest_client(context, instance_id)
# The REST API standard dictates that we *NEVER* include the marker.
return cls.load_with_client(client=client, limit=limit,
marker=context.marker, include_marker=False)
class Users(object):
DEFAULT_LIMIT = CONF.users_page_size
@classmethod
def load(cls, context, instance_id):
return load_via_context(cls, context, instance_id)
@classmethod
def load_with_client(cls, client, limit, marker, include_marker):
user_list, next_marker = client.list_users(
limit=limit,
marker=marker,
include_marker=include_marker)
model_users = []
ignore_users = CONF.ignore_users
for user in user_list:
mysql_user = guest_models.MySQLUser()
mysql_user.deserialize(user)
if mysql_user.name in ignore_users:
continue
# TODO(hub-cap): databases are not being returned in the
# reference agent
dbs = []
for db in mysql_user.databases:
dbs.append({'name': db['_name']})
model_users.append(User(mysql_user.name,
mysql_user.host,
mysql_user.password,
dbs))
return model_users, next_marker
class Schema(object):
_data_fields = ['name', 'collate', 'character_set']
def __init__(self, name, collate, character_set):
self.name = name
self.collate = collate
self.character_set = character_set
@classmethod
def create(cls, context, instance_id, schemas):
load_and_verify(context, instance_id)
client = create_guest_client(context, instance_id)
for schema in schemas:
schema_name = schema['_name']
existing_schema, _nadda = Schemas.load_with_client(
client,
limit=1,
marker=schema_name,
include_marker=True)
if (len(existing_schema) > 0 and
str(existing_schema[0].name) == str(schema_name)):
raise exception.DatabaseAlreadyExists(name=schema_name)
return client.create_database(schemas)
@classmethod
def delete(cls, context, instance_id, schema):
load_and_verify(context, instance_id)
create_guest_client(context, instance_id).delete_database(schema)
class Schemas(object):
DEFAULT_LIMIT = CONF.databases_page_size
@classmethod
def load(cls, context, instance_id):
return load_via_context(cls, context, instance_id)
@classmethod
def load_with_client(cls, client, limit, marker, include_marker):
schemas, next_marker = client.list_databases(
limit=limit,
marker=marker,
include_marker=include_marker)
model_schemas = []
ignore_dbs = CONF.ignore_dbs
for schema in schemas:
mysql_schema = guest_models.MySQLDatabase()
mysql_schema.deserialize(schema)
if mysql_schema.name in ignore_dbs:
continue
model_schemas.append(Schema(mysql_schema.name,
mysql_schema.collate,
mysql_schema.character_set))
return model_schemas, next_marker
|
|
"""
This module provides classes to build an OrderList for input to a plant,
including the Order instances and their Recipe instances.
"""
from xml.dom import minidom
from plant import CraneMoveTime
class Recipe(object):
"""
This class provides a Recipe for an Order. It is a list (or dictionary) of
tuples (str machineName, int timeAtMachine).
"""
def __init__(self):
"""
recipe is a list (or dictionary) that contains the tuples of time
information for the Recipe.
"""
object.__init__(self)
self.recipe = []
def indexOfMachine(self, machineName):
"""
Returns the index of the Machine with machineName in the recipe list.
"""
for i, r in enumerate(self.recipe):
if r[0] == machineName:
return i
return -1
def calcMinProcTime(self, machineName = None):
"""
This method calculates the minimum processing time of the Recipe
starting from Machine with machineName (Considers the constant plant
delays for the crane movement time between machines).
"""
if machineName == None:
index = 0
else:
index = self.indexOfMachine(machineName)
res = (len(self.recipe) - 1 - index) * CraneMoveTime
while index < len(self.recipe):
res += self.recipe[index][1]
if self.recipe[index][1] == 0:
res -= CraneMoveTime
index += 1
return res
def __getitem__(self, key):
"""
Returns the time in the Recipe at Machine with name key.
"""
assert type(key) == str or type(key) == unicode
for r in self.recipe:
if r[0] == key:
return r[1]
return None
def __setitem__(self, key, value):
"""
Adds a Recipe item (a tuple of (str machineName, int time)) to the
Recipe list (or dictionary). It will not add the item if machineName
is already in the list.
"""
assert type(key) == str or type(key) == unicode
assert type(value) == int
if self.__getitem__(key) == None:
self.recipe.append((key, value))
else:
for i, r in enumerate(self.recipe):
if r[0] == key:
del self.recipe[i]
self.recipe.insert(i, (key, value))
return
def toXml(self, xmlDoc):
"""
Converts the Recipe to an XML description and returns the XML tree
node. XmlDoc is the document to create the tree element from.
"""
node = xmlDoc.createElement("recipe")
itemNode = None
for i, r in enumerate(self.recipe):
itemNode = xmlDoc.createElement("machine")
itemNode.setAttribute("name", r[0])
itemNode.setAttribute("time", str(r[1]))
node.appendChild(itemNode)
return node
@staticmethod
def fromXml(element):
"""
A static method that creates a Recipe instance from an XML tree node
and returns it.
"""
recipe = Recipe()
for e in element.getElementsByTagName("machine"):
recipe[e.getAttribute("name")] = int(e.getAttribute("time"))
return recipe
class Order(object):
"""
This class provides an Order with id, deadline and recipe.
"""
def __init__(self, id = 0, deadline = 0, currentMachine = "", currentOvertime = 0):
"""
id is a unique int for the Order.
deadline is the int deadline for the Order.
recipe is the Recipe instance for the order.
"""
object.__init__(self)
assert deadline >= 0
assert id >= 0
self.id = id
self.deadline = deadline
self.recipe = None
self.currentMachine = currentMachine
self.currentOvertime = currentOvertime
def toXml(self, xmlDoc):
"""
Converts the Order to an XML description and returns the XML tree
node. XmlDoc is the document to create the tree element from.
"""
node = xmlDoc.createElement("order")
node.setAttribute("id", str(self.id))
node.setAttribute("deadline", str(self.deadline))
node.appendChild(self.recipe.toXml(xmlDoc))
return node
@staticmethod
def fromXml(element):
"""
A static method that creates an Order instance from an XML tree node
and returns it. The number of children (Recipe instances) of the node
have to be exactly one since each Order can only have a single Recipe.
"""
assert len(element.getElementsByTagName("recipe")) == 1
order = Order(
deadline = int(element.getAttribute("deadline")),
id = int(element.getAttribute("id")),
currentMachine = element.getAttribute("current_machine"),
currentOvertime = int(element.getAttribute("current_overtime"))
)
order.recipe = Recipe.fromXml(element.getElementsByTagName("recipe")[0])
return order
class OrderList(object):
"""
This class provides a list of Order instances.
"""
def __init__(self):
"""
orders is a list of Order instances.
"""
object.__init__(self)
self.orders = []
def toXmlFile(self, filename):
"""
Exports the OrderList instance to an xml file (str filename).
"""
file = open(filename, "w")
file.write(self.toXml().toprettyxml())
file.close()
def toXml(self):
"""
Creates an XML tree node element of the OrderList instance and returns
it.
"""
domImp = minidom.getDOMImplementation()
xmlDoc = domImp.createDocument(None, "order-list", None)
for o in self.orders:
xmlDoc.documentElement.appendChild(o.toXml(xmlDoc))
return xmlDoc.documentElement
@staticmethod
def fromXml(xmlDoc):
"""
A static method that creates an OrderList instance from an XML tree
node and returns it.
"""
orderList = OrderList()
for e in xmlDoc.getElementsByTagName("order"):
orderList.addOrder(Order.fromXml(e))
return orderList
@staticmethod
def fromXmlFile(filename):
"""
A static method that loads an OrderList from a file (str filename) and
returns an instance.
"""
file = open(filename, "r")
doc = minidom.parse(file)
orderList = OrderList.fromXml(doc)
file.close()
return orderList
def addOrder(self, order):
"""
Adds an Order to the OrderList. The Order instance and the Order.id
cannot be already in the list.
"""
assert order not in self.orders
for o in self.orders:
if o.id == order.id:
raise Exception("Order id already in order list")
self.orders.append(order)
|
|
"""
Puppy experiments are executed within the [Webots]_ simulator. Since
this module is linked to :py:mod:`PuPy` through the class
:py:class:`ActorCritic`, this is the native approach. For the purpose of
Puppy, an adapted Actor-Critic is implemented in :py:class:`PuppyHDP`,
handling Puppy specifics. It inherits from :py:class:`ADHDP`,
hence can be used in the same fashion.
Simulation with [Webots]_ is often time consuming. Therefore, a method
is provided to collect data in the simulation and replay it later. This
is implemented through :py:class:`OfflineCollector` and
:py:func:`puppy.offline_playback`. An example of how to approach this
is documented in :ref:`puppy_offline`.
"""
from ..hdp import ADHDP
from ..rl import Plant
import numpy as np
import warnings
import h5py
import HDPy
SENSOR_NAMES = ['trg0', 'trg1', 'trg2', 'trg3', 'accelerometer_x', 'accelerometer_y', 'accelerometer_z', 'compass_x', 'compass_y', 'compass_z', 'gyro_x', 'gyro_y', 'gyro_z', 'hip0', 'hip1', 'hip2', 'hip3', 'knee0', 'knee1', 'knee2', 'knee3', 'puppyGPS_x', 'puppyGPS_y', 'puppyGPS_z', 'touch0', 'touch0', 'touch1', 'touch2', 'touch3']
class PuppyHDP(ADHDP):
"""ADHDP subtype for simulations using Puppy in webots.
This class adds some code considering restarts of Puppy. It adds
an optional argument ``tumbled_reward``. The reward will be forced
to this value after the supervisor detected tumbling. If
:py:const:`None` (the default) is used, the reward remains
unchanged.
"""
def __init__(self, *args, **kwargs):
self._tumbled_reward = kwargs.pop('tumbled_reward', None)
self.has_tumbled = False
self.supervisor_tumbled_notice = 0
super(PuppyHDP, self).__init__(*args, **kwargs)
def _signal(self, msg, **kwargs):
"""Handle messages from the supervisor. Messages are expected
when the robot has tumbled and thus the robot has to be reset.
"""
super(PuppyHDP, self)._signal(msg, **kwargs)
# msg is 'reset', 'out_of_arena', 'tumbled_grace_start' or 'tumbled'
# for msg==reset, the robot is reset immediately
# msg==tumbled_grace_start marks the start of the grace period of the tumbled robot
if msg == 'tumbled':
#print "Tumbling received", self.num_step
self.supervisor_tumbled_notice = 1
if msg == 'reset':
#print "Reset received", self.num_step
self.child.reset()
self.new_episode()
# DEPRICATED: use of event_handler replaced by RobotActor's signal chain.
#def event_handler(self, robot, epoch, current_time, msg):
# """Handle messages from the supervisor. Messages are expected
# when the robot has tumbled and thus the robot has to be reset.
# """
# # msg is 'reset', 'out_of_arena', 'tumbled_grace_start' or 'tumbled'
# # for msg==reset, the robot is reset immediately
# # msg==tumbled_grace_start marks the start of the grace period of the tumbled robot
# if msg == 'tumbled_grace_start':
# #print "Tumbling received", self.num_step
# self.supervisor_tumbled_notice = 1
#
# if msg == 'reset':
# #print "Reset received", self.num_step
# self.child.reset()
# self.new_episode()
# self.signal('new_episode')
def new_episode(self):
"""After restarting, reset the tumbled values and start the
new episode.
"""
super(PuppyHDP, self).new_episode()
self.has_tumbled = False
self.supervisor_tumbled_notice = 0
def _step(self, s_curr, epoch, a_curr, reward):
"""Ensure the tumbled reward and initiate behaviour between
restarts. The step of the parent is then invoked.
"""
if self.has_tumbled:
epoch['a_next'] = np.zeros(shape=a_curr.shape[::-1])
return epoch
if self.supervisor_tumbled_notice > 0:
if self.supervisor_tumbled_notice > 1:
if self._tumbled_reward is not None:
reward = np.atleast_2d([self._tumbled_reward])
#reward /= (1.0 - self.gamma(self.num_episode, self.num_step))
# geometric series to incorporate future rewards
# note that with this, its err = r/(1-gamma) - J * (1-gamma)
# but should be err = r/(1-gamma) - J
# thus, there's an difference of J*gamma
# is solved this by temporarily set gamma = 0.0
self.has_tumbled = True
#old_gamma = self.gamma
#self.set_gamma(0.0)
self.supervisor_tumbled_notice += 1
#reward += np.random.normal(scale=0.001)
epoch = super(PuppyHDP, self)._step(s_curr, epoch, a_curr, reward)
#if self.supervisor_tumbled_notice > 2:
# self.gamma = old_gamma
#print self.num_step, reward, a_curr.T, a_next.T, epoch['puppyGPS_x'][-1]
return epoch
def init_episode(self, epoch, time_start_ms, time_end_ms, step_size_ms):
"""Initial behaviour (after reset)
.. note::
Assuming identical initial trajectories, the initial state
is the same - and thus doesn't matter.
Non-identical initial trajectories will result in
non-identical behaviour, therefore the initial state should
be different (initial state w.r.t. start of learning).
Due to this, the critic is already updated in the initial
trajectory.
"""
if self.num_step > 2:
# in_state = self.plant.state_input(self.s_curr)
# a_curr_nrm = self.normalizer.normalize_value('a_curr', self.a_curr)
# i_curr = np.vstack((in_state, a_curr_nrm)).T
# x_curr = self.reservoir(i_curr, simulate=False)
# x_curr = np.hstack((x_curr, i_curr)) # FIXME: Input/Output ESN Model
i_curr, x_curr, _ = self._critic_eval(self.s_curr, self.a_curr, False, 'a_curr')
epoch['x_curr'] = x_curr
epoch['i_curr'] = i_curr
epoch['a_next'] = self.a_curr.T
epoch['a_curr'] = self.a_curr.T
self.s_curr = epoch
return self.child(epoch, time_start_ms, time_end_ms, step_size_ms)
class OfflineCollector(ADHDP):
"""Collect sensor data for Puppy in webots, such that it can be
reused later to train a critic offline.
Note that in contrast to :py:class:`ADHDP`, some
structures are not required (reservoir, plant). They will be set
to stubs, hence don't need to be passed.
Some extra metadata is stored in the datafile, which allows
processing of the experiment in an offline fashion through the
function :py:func:`puppy.offline_playback`.
"""
def __init__(self, *args, **kwargs):
# look for policy's member 'action_space_dim' (policy is hidden in child or sub-child)
policy = kwargs['policy']
if hasattr(policy, 'action_space_dim'):
action_space_dim = policy.action_space_dim
else:
from ..hdp import return_none
action_space_dim = return_none
class Phony:
"""Stub for a reservoir."""
reset_states = False
def get_input_dim(self):
"""Return input dimension (action space dim.)"""
return action_space_dim()
def reset(self):
"""Reset to the initial state (no effect)"""
pass
kwargs['plant'] = Plant(state_space_dim=0)
kwargs['reservoir'] = Phony()
kwargs['readout'] = None
self.supervisor_tumbled_notice = 0
super(OfflineCollector, self).__init__(*args, **kwargs)
def new_episode(self):
"""After restarting, reset the tumbled values and start the
new episode.
"""
super(OfflineCollector, self).new_episode()
self.supervisor_tumbled_notice = 0
def __call__(self, epoch, time_start_ms, time_end_ms, step_size_ms):
"""Store the sensor measurements of an epoch in the datafile
as well as relevant metadata. The robot detects if the
simulation was reverted and if it has tumbled (through the
supervisor message). Other guards are not considered, as none
are covered by :py:class:`PuppyHDP`.
"""
#print "(call)", time_start_ms, self.a_curr.T, ('puppyGPS_x' in epoch and epoch['puppyGPS_x'][-1] or 'NOX')
if len(epoch) == 0:
# TODO: epoch length will never be 0 I think(?) Use _get_initial_target() for this purpose.
# the very first initial epoch of the first episode
# this case occurs when the simulation starts or after it is reverted
self.num_step += 1
#self._pre_increment_hook(dict(), empty_initial_step=np.array([1]))
self._pre_increment_hook(dict(), init_step=np.array([self.num_step]))
return self.child.get_iterator(time_start_ms, time_end_ms, step_size_ms)
# Determine next action
if self.num_step <= self._init_steps:
# Init
a_next = self.a_curr
elif self.supervisor_tumbled_notice > 2:
# Tumbled, prepare for reset
a_next = np.zeros(shape=self.a_curr.shape)
self.supervisor_tumbled_notice += 1
elif self.supervisor_tumbled_notice > 0:
# Tumbled, still walking
a_next = self._next_action_hook(self.a_curr)
self.supervisor_tumbled_notice += 1
else:
# Normal walking
a_next = self._next_action_hook(self.a_curr)
# if self.num_step <= self._init_steps:
# print "(init)", a_next.T
# elif self.supervisor_tumbled_notice > 2:
# print time_start_ms, self.a_curr.T, self.num_step
# else:
# print time_start_ms, self.a_curr.T, epoch['puppyGPS_x'][-1]
epoch['a_curr'] = self.a_curr.T
epoch['a_next'] = a_next.T
self.a_curr = a_next
self.num_step += 1
#print "(call-end)", self.num_step, a_next.T, a_next.shape, self.a_curr.shape
return self.child(epoch, time_start_ms, time_end_ms, step_size_ms)
def _signal(self, msg, **kwargs):
"""Handle messages from the supervisor. Messages are expected
when the robot has tumbled and thus the robot has to be reset.
"""
super(OfflineCollector, self)._signal(msg, **kwargs)
# msg is 'reset', 'out_of_arena', 'tumbled_grace_start' or 'tumbled'
# for msg==reset, the robot is reset immediately
# msg==tumbled_grace_start marks the start of the grace period of the tumbled robot
if msg == 'tumbled_grace_start':
#print "Tumbling received", self.num_step
self.supervisor_tumbled_notice = 1
self._pre_increment_hook(dict(), tumbled=np.array([self.num_step]))
if msg == 'reset':
#print "Reset received", self.num_step
self.child.reset()
self.new_episode()
# DEPRICATED: use of event_handler replaced by RobotActor's signal chain.
#def event_handler(self, robot, epoch, current_time, msg):
# """Handle messages from the supervisor. Messages are expected
# when the robot has tumbled and thus the robot has to be reset.
# """
# # msg is 'reset', 'out_of_arena', 'tumbled_grace_start' or 'tumbled'
# # for msg==reset, the robot is reset immediately
# # msg==tumbled_grace_start marks the start of the grace period of the tumbled robot
# if msg == 'tumbled_grace_start':
# #print "Tumbling received", self.num_step
# self.supervisor_tumbled_notice = 1
# self._pre_increment_hook(dict(), tumbled=np.array([self.num_step]))
#
# if msg == 'reset':
# #print "Reset received", self.num_step
# self.child.reset()
# self.new_episode()
# self.signal('new_episode')
def _next_action_hook(self, a_next):
"""Defines the action sampling policy of the offline data
gathering. Note that this policy is very relevant to later
experiments, hence this methods should be overloaded (although
a default policy is provided).
"""
warnings.warn('Default sampling policy is used.')
a_next = np.zeros(self.a_curr.shape)
# Prohibit too small or large amplitudes
while (a_next < 0.2).any() or (a_next > 2.0).any() or ((a_next > 1.0).any() and a_next.ptp() > 0.4):
a_next = self.a_curr + np.random.normal(0.0, 0.15, size=self.a_curr.shape)
return a_next
def offline_playback(pth_data, critic, samples_per_action, ms_per_step, episode_start=None, episode_end=None, min_episode_len=0, err_coefficient=0.01, episode_start_test=None):
"""Simulate an experiment run for the critic by using offline data.
The data has to be collected in webots, using the respective
robot and supervisor. Note that the behaviour of the simulation
should match what's expected by the critic. The critic is fed the
sensor data, in order. Of course, it can't react to it since
the next action is predefined.
Additional to the sensor fields, the 'tumbling' dataset is expected
which indicates, if and when the robot has tumbled. It is used such
that the respective signals can be sent to the critic.
The critic won't store any sensory data again.
``pth_data``
Path to the datafile with the sensory information (HDF5).
``critic``
PuppyHDP instance.
``samples_per_action``
Number of samples per control step. Must correspond to the data.
``ms_per_step``
Sensor sampling period.
``episode_start``
Defines a lower limit on the episode number. Passed as int,
is with respect to the episode index, not its identifier.
``episode_stop``
Defines an upper limit on the episode number. Passed as int,
is with respect to the episode index, not its identifier.
``min_episode_len``
Only pick episodes longer than this threshold.
``err_coefficient``
coefficient for the TD-error exponential moving average (EMA)
``episode_start_test``
starting point for the test, i.e. when we start accounting the TD-error.
:returns: accumulated TD-error average
"""
# Open data file, get valid experiments
f = h5py.File(pth_data,'r')
storages = map(str, sorted(map(int, f.keys())))
storages = filter(lambda s: len(f[s]) > 0, storages)
if min_episode_len > 0:
storages = filter(lambda s: f[s]['a_curr'].shape[0] > min_episode_len, storages)
if episode_end is not None:
storages = storages[:episode_end]
if episode_start is not None:
storages = storages[episode_start:]
assert len(storages) > 0
if episode_start_test is None:
episode_start_test = len(storages)/2 - 1; #use last half for testing
# Prepare critic; redirect hooks to avoid storing epoch data twice
# and feed the actions
global accError
accError = 0 # accumulated error
next_action = None
episode = None
critic._pre_increment_hook_orig = critic._pre_increment_hook
critic._next_action_hook_orig = critic._next_action_hook
def pre_increment_hook(epoch, **kwargs):
kwargs['offline_episode'] = np.array([episode])
critic._pre_increment_hook_orig(dict(), **kwargs)
if int(episode) > episode_start_test and kwargs.has_key('err'):
global accError
accError = accError*(1-err_coefficient) + (kwargs['err'][0][0]**2)*err_coefficient # accumulated squared error
#accError = accError*(1-err_coefficient) + np.abs(kwargs['err'][0][0])*err_coefficient # accumulated absolute error
def next_action_hook(a_next):
#print "(next)", a_next.T, next_action.T
return next_action
critic._next_action_hook = next_action_hook
critic._pre_increment_hook = pre_increment_hook
# Main loop, feed data to the critic
time_step_ms = ms_per_step * samples_per_action
time_start_ms = 0
for episode_idx, episode in enumerate(storages):
print episode_idx
data_grp = f[episode]
N = len(data_grp['trg0'])
# get the stored ratio
#db_samples_per_action = N / len(data_grp['a_next'])
#assert N % db_samples_per_action == 0
assert N % samples_per_action == 0
# get tumbled infos
if 'tumble' in data_grp:
from pylab import find
time_tumbled = find(data_grp['tumble'])[0] / samples_per_action * samples_per_action
#time_tumbled = data_grp['tumbled'][0] * db_samples_per_action
#time_tumbled = data_grp['tumble'][0] * samples_per_action
else:
time_tumbled = -1
# initial, empty call
if 'init_step' in data_grp:
print "Simulation was started/reverted"
time_start_ms = 0
critic(dict(), time_start_ms, time_start_ms + samples_per_action, ms_per_step)
time_tumbled -= samples_per_action
# initial action
critic.a_curr = np.atleast_2d(data_grp['a_curr'][0]).T
# loop through data, incrementally feed the critic
for num_iter in np.arange(0, N, samples_per_action):
# next action
next_action = np.atleast_2d(data_grp['a_next'][num_iter/db_samples_per_action]).T
# get data
time_start_ms += time_step_ms
time_end_ms = time_start_ms + time_step_ms
chunk = dict([(k, data_grp[k][num_iter:(num_iter+samples_per_action)]) for k in SENSOR_NAMES])
# send tumbled message
if num_iter == time_tumbled:
#critic.event_handler(None, dict(), time_tumbled, 'tumbled_grace_start')
critic.signal('tumbled_grace_start')
# update critic
critic(chunk, time_start_ms, time_end_ms, time_step_ms)
# send reset after episode has finished
if episode_idx < len(storages) - 1:
#critic.event_handler(None, dict(), ms_per_step * N, 'reset')
critic.signal('reset')
critic.signal('new_episode') # collectors will create new group
# cleanup
critic._pre_increment_hook = critic._pre_increment_hook_orig
critic._next_action_hook = critic._next_action_hook_orig
del critic._pre_increment_hook_orig
del critic._next_action_hook_orig
return accError
## DEPRECATED ##
def puppy_offline_playback(*args, **kwargs):
"""Alias of offline_playback.
.. deprecated:: 1.0
Use :py:func:`offline_playback` instead
"""
warnings.warn("This function is deprecated. Use 'offline_playback' instead")
return offline_playback(*args, **kwargs)
|
|
#!/usr/bin/env python
#
# Copyright 2015 Naver Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import subprocess
import config
import default_cluster
import util
import testbase
import redis_mgmt
import time
import telnet
import random
import fi_confmaster
import load_generator_crc16
def block_network(cluster, mgmt_ip, mgmt_port):
# Block
if util.iptables_drop('A', '127.0.0.100') == False:
util.log('add a bloking role to iptables fail.')
return False
for i in range(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
for i in range(2):
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port)
time.sleep(1)
return True
def unblock_network(cluster, mgmt_ip, mgmt_port, final_state):
# Unblock
if util.iptables_drop('D', '127.0.0.100') == False:
util.log('delete a bloking role to iptables fail.')
return False
# Check cluster state
for i in range(3):
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state)
time.sleep(1)
return True
def is_pgs_normal(pgs):
return pgs['active_role'] == pgs['mgmt_role'] and (pgs['mgmt_role'] == 'M' or pgs['mgmt_role'] == 'S') and pgs['color'] == 'GREEN'
class TestNetworkIsolation(unittest.TestCase):
def setUp(self):
ret = util.nic_add('eth1:arc', '127.0.0.100')
util.set_process_logfile_prefix( 'TestNetworkIsolation_%s' % self._testMethodName )
self.cleanup_iptables()
return 0
def tearDown(self):
util.nic_del('eth1:arc')
self.cleanup_iptables()
return 0
def cleanup_iptables(self):
while True:
if util.iptables_redirect('D', '127.0.0.100', '127.0.0.1') == False:
break
while True:
if util.iptables_drop('D', '127.0.0.100') == False:
break
while True:
if util.iptables_drop('D', '127.0.0.101') == False:
break
while True:
if util.iptables_drop('D', '127.0.0.102') == False:
break
def test_1_mgmt_is_isolated(self):
util.print_frame()
util.iptables_print_list()
cluster = filter(lambda x: x['cluster_name'] == 'network_isolation_cluster_1', config.clusters)[0]
util.log(util.json_to_str(cluster))
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Set SMR option (slave_idle_timeout)
util.log('\n\n\n ### Set SMR option ###')
for s in cluster['servers']:
t = telnet.Telnet('SMR%d' % s['id'])
self.assertEqual(t.connect(s['ip'], s['smr_mgmt_port']), 0,
'Failed to connect to smr. ADDR=%s:%d' % (s['ip'], s['smr_mgmt_port']))
cmd = 'confset slave_idle_timeout_msec 18000'
util.log('[%s:%d] >> %s' % (s['ip'], s['smr_mgmt_port'], cmd))
t.write('confset slave_idle_timeout_msec 18000\r\n')
reply = t.read_until('\r\n').strip()
util.log('[%s:%d] << %s' % (s['ip'], s['smr_mgmt_port'], reply))
self.assertEqual(reply, '+OK', 'Failed to set slave_idle_timeout, REPLY=%s' % reply)
# Network isolation test
for cnt in range(5):
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % cnt)
for s in cluster['servers']:
"""Loopback Address Range (Reference : RFC3330)
127.0.0.0/8 - This block is assigned for use as the Internet host
loopback address. A datagram sent by a higher level protocol to an
address anywhere within this block should loop back inside the host.
This is ordinarily implemented using only 127.0.0.1/32 for loopback,
but no addresses within this block should ever appear on any network
anywhere [RFC1700, page 5].
"""
self.assertTrue(util.iptables_drop('A', '127.0.0.100', s['smr_mgmt_port']), 'add a bloking role to iptables fail.')
for i in range(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in range(7):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
time.sleep(1)
state_transition_done = True
for s in isolated_states:
if s['ip'] != '127.0.0.100':
continue
if s['active_role'] != '?' or s['mgmt_role'] != 'N':
state_transition_done = False
if state_transition_done :
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
# Unblock network
util.log('\n\n\n ### UNBLOCK NETWORK, %d ### ' % cnt)
for s in cluster['servers']:
self.assertTrue(util.iptables_drop('D', '127.0.0.100', s['smr_mgmt_port']), 'delete a bloking role to iptables fail.')
# Check cluster state
ok = False
for i in range(7):
final_state = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state, check_quorum=True)
all_green = True
for s in final_state:
if is_pgs_normal(s) == False:
all_green = False
if all_green:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state consistency')
# Check state
self.assertNotEqual(initial_state, None, 'initial_state is None')
self.assertNotEqual(final_state, None, 'final_state is None')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_2_some_pgs_is_isolated(self):
util.print_frame()
util.iptables_print_list()
# Add forwarding role (127.0.0.100 -> 127.0.0.1)
self.assertTrue(util.iptables_redirect('A', '127.0.0.100', '127.0.0.1'), 'add a forwarding role to iptables fail.')
cluster = filter(lambda x: x['cluster_name'] == 'network_isolation_cluster_2', config.clusters)[0]
util.log(util.json_to_str(cluster))
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Place master on virtual ip address in order to cause master election.
pg_id = 0
m = util.get_server_by_role_and_pg(cluster['servers'], 'master', pg_id)
s = util.get_server_by_role_and_pg(cluster['servers'], 'slave', pg_id)
if m.has_key('ip') == True and m.has_key('real_ip') == False:
ret = util.role_change(cluster['servers'][0], cluster['cluster_name'], s['id'])
self.assertNotEquals(ret, -1, 'change %d to a master fail' % s['id'])
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Network isolation test
for cnt in range(3):
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % cnt)
self.assertTrue(util.iptables_drop('A', '127.0.0.100'), 'add a bloking role to iptables fail.')
for i in range(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in range(7):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
time.sleep(1)
state_transition_done = True
for s in isolated_states:
if s['pgs_id'] == 1:
continue
if s['active_role'] != '?' or s['mgmt_role'] != 'N':
state_transition_done = False
if state_transition_done :
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
# Unblock network
util.log('\n\n\n ### UNBLOCK NETWORK, %d ### ' % cnt)
self.assertTrue(util.iptables_drop('D', '127.0.0.100'), 'delete a bloking role to iptables fail.')
# Check cluster state
ok = False
for i in range(7):
final_state = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state, check_quorum=True)
state_consistency = True
for s in final_state:
if s['pgs_id'] == 1:
continue
if is_pgs_normal(s) == False:
state_consistency = False
if state_consistency:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state consistency')
# Check state
self.assertNotEqual(initial_state, None, 'initial_state is None')
self.assertNotEqual(final_state, None, 'final_state is None')
initial_state = sorted(initial_state, key=lambda x: int(x['pgs_id']))
final_state = sorted(final_state, key=lambda x: int(x['pgs_id']))
for i in range(len(final_state)):
msg = 'ts (%d)%d -> (%d)%d' % (initial_state[i]['pgs_id'], initial_state[i]['active_ts'], final_state[i]['pgs_id'], final_state[i]['active_ts'])
util.log(msg)
self.assertNotEqual(initial_state[i]['active_ts'], final_state[i]['active_ts'], msg)
# Delete forwarding role (127.0.0.100 -> 127.0.0.1)
self.assertTrue(util.iptables_redirect('D', '127.0.0.100', '127.0.0.1'), 'delete a forwarding role to iptables fail')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_3_some_pgs_is_isolated_2copy(self):
util.print_frame()
out = util.sudo('iptables -L')
util.log('====================================================================')
util.log('out : %s' % out)
util.log('out.return_code : %d' % out.return_code)
util.log('out.stderr : %s' % out.stderr)
util.log('out.succeeded : %s' % out.succeeded)
# Add forwarding role (127.0.0.100 -> 127.0.0.1)
self.assertTrue(util.iptables_redirect('A', '127.0.0.100', '127.0.0.1'), 'add a forwarding role to iptables fail.')
cluster = filter(lambda x: x['cluster_name'] == 'network_isolation_cluster_1_2copy', config.clusters)[0]
util.log(util.json_to_str(cluster))
# MGMT
mgmt_ip = cluster['servers'][0]['ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Place master on real ip address
for pg_id in [0, 1]:
m = util.get_server_by_role_and_pg(cluster['servers'], 'master', pg_id)
s = util.get_server_by_role_and_pg(cluster['servers'], 'slave', pg_id)
if m.has_key('ip') and m.has_key('real_ip'):
if m['ip'] != m['real_ip']:
ret = util.role_change(cluster['servers'][0], cluster['cluster_name'], s['id'])
self.assertNotEquals(ret, -1, 'change %d to a master fail' % s['id'])
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Network isolation test
for cnt in range(3):
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % cnt)
self.assertTrue(util.iptables_drop('A', '127.0.0.100'), 'add a bloking role to iptables fail.')
for i in range(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in range(7):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
time.sleep(1)
state_transition_done = True
for s in isolated_states:
if s['pgs_id'] == 0 or s['pgs_id'] == 1:
continue
if s['active_role'] != 'M' or s['mgmt_role'] != 'M':
state_transition_done = False
if s['quorum'] != 0:
state_transition_done = False
if state_transition_done:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
# Unblock network
util.log('\n\n\n ### UNBLOCK NETWORK, %d ### ' % cnt)
self.assertTrue(util.iptables_drop('D', '127.0.0.100'), 'delete a bloking role to iptables fail.')
# Check cluster state
ok = False
for i in range(7):
final_state = []
if util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state, check_quorum=True) == False:
time.sleep(1)
continue
state_consistency = True
for s in final_state:
if s['pgs_id'] == 1:
continue
if is_pgs_normal(s) == False:
state_consistency = False
if state_consistency:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state consistency')
# Check state
self.assertNotEqual(initial_state, None, 'initial_state is None')
self.assertNotEqual(final_state, None, 'final_state is None')
# Delete forwarding role (127.0.0.100 -> 127.0.0.1)
self.assertTrue(util.iptables_redirect('D', '127.0.0.100', '127.0.0.1'), 'delete a forwarding role to iptables fail.')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_4_mgmt_is_isolated_with_red_failover(self):
util.print_frame()
util.iptables_print_list()
cluster = filter(lambda x: x['cluster_name'] == 'network_isolation_cluster_1', config.clusters)[0]
util.log(util.json_to_str(cluster))
self.leader_cm = cluster['servers'][0]
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port)
# Master must be the first pgs, cluster['servers'][0].
to_be_master = cluster['servers'][0]
m = util.get_server_by_role_and_pg(cluster['servers'], 'master', to_be_master['pg_id'])
master_id = -1
if m['id'] != to_be_master['id']:
try_cnt = 0
while master_id != to_be_master['id'] and try_cnt < 20:
master_id = util.role_change(cluster['servers'][0], cluster['cluster_name'], to_be_master['id'])
try_cnt += 1
time.sleep(1)
self.assertEquals(master_id, to_be_master['id'], 'change %d to a master fail' % to_be_master['id'])
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Set SMR option (slave_idle_timeout)
util.log('\n\n\n ### Set SMR option ###')
for s in cluster['servers']:
t = telnet.Telnet('SMR%d' % s['id'])
self.assertEqual(t.connect(s['ip'], s['smr_mgmt_port']), 0,
'Failed to connect to smr. ADDR=%s:%d' % (s['ip'], s['smr_mgmt_port']))
cmd = 'confset slave_idle_timeout_msec 18000'
util.log('[%s:%d] >> %s' % (s['ip'], s['smr_mgmt_port'], cmd))
t.write('confset slave_idle_timeout_msec 18000\r\n')
reply = t.read_until('\r\n').strip()
util.log('[%s:%d] << %s' % (s['ip'], s['smr_mgmt_port'], reply))
self.assertEqual(reply, '+OK', 'Failed to set slave_idle_timeout, REPLY=%s' % reply)
# Network isolation test
for loop_cnt in range(3):
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % loop_cnt)
for s in cluster['servers']:
self.assertTrue(util.iptables_drop('A', '127.0.0.100', s['smr_mgmt_port']), 'add a bloking role to iptables fail.')
for i in range(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in range(7):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
time.sleep(1)
state_transition_done = True
for s in isolated_states:
if s['ip'] != '127.0.0.100':
continue
if s['active_role'] != '?' or s['mgmt_role'] != 'N':
state_transition_done = False
if state_transition_done :
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
pgs_list = util.get_pgs_info_list(mgmt_ip, mgmt_port, cluster)
reds = filter(lambda x: x['color'] == 'RED', pgs_list)
# Shutdown
server = cluster['servers'][random.choice(reds)['pgs_id']]
util.log( 'shutdown pgs%d while hanging.' % server['id'] )
ret = testbase.request_to_shutdown_smr( server )
self.assertEqual( ret, 0, 'failed to shutdown smr. id:%d' % server['id'] )
ret = testbase.request_to_shutdown_redis( server )
self.assertEqual( ret, 0, 'failed to shutdown redis. id:%d' % server['id'] )
# Check state F
max_try = 20
expected = 'F'
for i in range( 0, max_try):
util.log('MGMT_IP:%s, MGMT_PORT:%d' % (mgmt_ip, mgmt_port))
state = util._get_smr_state( server['id'], cluster['cluster_name'], mgmt_ip, mgmt_port )
if expected == state:
break;
time.sleep( 1 )
self.assertEqual( expected , state,
'server%d - state:%s, expected:%s' % (server['id'], state, expected) )
util.log( 'succeeded : pgs%d state changed to F.' % server['id'] )
# Unblock network
for s in cluster['servers']:
self.assertTrue(util.iptables_drop('D', '127.0.0.100', s['smr_mgmt_port']), 'delete a bloking role to iptables fail.')
# Check cluster state
ok = False
for i in range(10):
final_state = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state, check_quorum=True)
state_consistency = True
for s in final_state:
if s['pgs_id'] == server['id']:
continue
if is_pgs_normal(s) == False:
state_consistency = False
if state_consistency:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state consistency')
# Recovery
util.log( 'restart pgs%d.' % server['id'] )
ret = testbase.request_to_start_smr( server )
self.assertEqual( ret, 0, 'failed to start smr. id:%d' % server['id'] )
ret = testbase.request_to_start_redis( server )
self.assertEqual( ret, 0, 'failed to start redis. id:%d' % server['id'] )
wait_count = 20
ret = testbase.wait_until_finished_to_set_up_role( server, wait_count )
self.assertEqual( ret, 0, 'failed to role change. smr_id:%d' % (server['id']) )
redis = redis_mgmt.Redis( server['id'] )
ret = redis.connect( server['ip'], server['redis_port'] )
self.assertEqual( ret, 0, 'failed to connect to redis' )
ok = False
for i in xrange(5):
ok = util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, check_quorum=True)
if ok:
break
else:
time.sleep(1)
self.assertTrue(ok, 'failed to check cluster state')
# Reset SMR option (slave_idle_timeout)
t = telnet.Telnet('SMR%d' % server['id'])
self.assertEqual(t.connect(server['ip'], server['smr_mgmt_port']), 0,
'Failed to connect to smr. ADDR=%s:%d' % (server['ip'], server['smr_mgmt_port']))
cmd = 'confset slave_idle_timeout_msec 18000'
util.log('[%s:%d] >> %s' % (server['ip'], server['smr_mgmt_port'], cmd))
t.write('confset slave_idle_timeout_msec 18000\r\n')
reply = t.read_until('\r\n').strip()
util.log('[%s:%d] << %s' % (server['ip'], server['smr_mgmt_port'], reply))
self.assertEqual(reply, '+OK', 'Failed to set slave_idle_timeout, REPLY=%s' % reply)
# Check state
self.assertNotEqual(initial_state, None, 'initial_state is None')
self.assertNotEqual(final_state, None, 'final_state is None')
initial_state = sorted(initial_state, key=lambda x: int(x['pgs_id']))
final_state = sorted(final_state, key=lambda x: int(x['pgs_id']))
for i in range(len(final_state)):
msg = 'ts (%d)%d -> (%d)%d' % (initial_state[i]['pgs_id'], initial_state[i]['active_ts'], final_state[i]['pgs_id'], final_state[i]['active_ts'])
util.log(msg)
if initial_state[i]['pgs_id'] == 1:
self.assertNotEqual(initial_state[i]['active_ts'], final_state[i]['active_ts'], msg)
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, check_quorum=True), 'failed to check cluster state')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_5_mgmt_is_isolated_with_lconn(self):
util.print_frame()
util.iptables_print_list()
cluster = filter(lambda x: x['cluster_name'] == 'network_isolation_cluster_1', config.clusters)[0]
util.log(util.json_to_str(cluster))
self.leader_cm = cluster['servers'][0]
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Set SMR option (slave_idle_timeout)
util.log('\n\n\n ### Set SMR option ###')
for s in cluster['servers']:
t = telnet.Telnet('SMR%d' % s['id'])
self.assertEqual(t.connect(s['ip'], s['smr_mgmt_port']), 0,
'Failed to connect to smr. ADDR=%s:%d' % (s['ip'], s['smr_mgmt_port']))
cmd = 'confset slave_idle_timeout_msec 18000'
util.log('[%s:%d] >> %s' % (s['ip'], s['smr_mgmt_port'], cmd))
t.write('confset slave_idle_timeout_msec 18000\r\n')
reply = t.read_until('\r\n').strip()
util.log('[%s:%d] << %s' % (s['ip'], s['smr_mgmt_port'], reply))
self.assertEqual(reply, '+OK', 'Failed to set slave_idle_timeout, REPLY=%s' % reply)
# Network isolation test
for loop_cnt in range(3):
# Get master
master = util.get_server_by_role_and_pg( cluster['servers'], 'master', 0 )
first_slave = None
for s in cluster['servers']:
if s == master:
continue
# Skip non-virtual host
if s.has_key('real_ip') == False:
continue
if first_slave == None:
first_slave = s
# 'role lconn'
util.log( 'role lconn pgs%d while hanging.' % s['id'] )
ret = util.role_lconn_addr( s['real_ip'], s['smr_mgmt_port'] )
self.assertEqual( ret, '+OK\r\n', 'role lconn failed. reply="%s"' % (ret[:-2]) )
util.log( 'succeeded : cmd="role lconn", reply="%s"' % (ret[:-2]) )
time.sleep(0.5)
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % loop_cnt)
self.assertTrue(util.iptables_drop('A', '127.0.0.100', first_slave['smr_mgmt_port']), 'add a bloking role to iptables fail.')
for i in range(6):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in range(10):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
time.sleep(1)
state_transition_done = True
for s in isolated_states:
if s['pgs_id'] != first_slave['id']:
continue
if s['active_role'] != '?' or s['mgmt_role'] != 'N':
state_transition_done = False
if state_transition_done :
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
# Unblock network
self.assertTrue(util.iptables_drop('D', '127.0.0.100', first_slave['smr_mgmt_port']), 'delete a bloking role to iptables fail.')
# Check cluster state
ok = False
for i in range(7):
final_state = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, final_state, check_quorum=True)
state_consistency = True
for s in final_state:
if s['pgs_id'] == 1:
continue
if is_pgs_normal(s) == False:
state_consistency = False
if state_consistency:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state consistency')
ok = False
for i in xrange(5):
ok = util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, check_quorum=True)
if ok:
break
else:
time.sleep(1)
self.assertTrue(ok, 'failed to check cluster state')
# Check state
self.assertNotEqual(initial_state, None, 'initial_state is None')
self.assertNotEqual(final_state, None, 'final_state is None')
initial_state = sorted(initial_state, key=lambda x: int(x['pgs_id']))
final_state = sorted(final_state, key=lambda x: int(x['pgs_id']))
for i in range(len(final_state)):
msg = 'ts (%d)%d -> (%d)%d' % (initial_state[i]['pgs_id'], initial_state[i]['active_ts'], final_state[i]['pgs_id'], final_state[i]['active_ts'])
util.log(msg)
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, check_quorum=True), 'failed to check cluster state')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_6_repeat_isolation_and_no_opinion_linepay(self):
util.print_frame()
util.iptables_print_list()
# Add forwarding role
self.assertTrue(util.iptables_redirect('A', '127.0.0.100', '127.0.0.1'), 'add a forwarding role to iptables fail.')
self.assertTrue(util.iptables_redirect('A', '127.0.0.101', '127.0.0.1'), 'add a forwarding role to iptables fail.')
self.assertTrue(util.iptables_redirect('A', '127.0.0.102', '127.0.0.1'), 'add a forwarding role to iptables fail.')
cluster_name = 'no_opinion'
cluster = filter(lambda x: x['cluster_name'] == cluster_name, config.clusters)[0]
util.log(util.json_to_str(cluster))
self.leader_cm = cluster['servers'][0]
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster )
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Network isolation test
loop_cnt = 0
while (loop_cnt < 20):
loop_cnt += 1
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %d ### ' % loop_cnt)
self.assertTrue(util.iptables_drop('A', '127.0.0.102'), 'add a bloking role to iptables fail.')
for i in range(1):
util.log('waiting... %d' % (i + 1))
time.sleep(0.1)
self.assertTrue(util.iptables_drop('A', '127.0.0.100'), 'add a bloking role to iptables fail.')
for i in range(3):
util.log('waiting... %d' % (i + 1))
time.sleep(1.2)
self.assertTrue(util.iptables_drop('A', '127.0.0.101'), 'add a bloking role to iptables fail.')
for i in range(1):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Unblock network
util.log('\n\n\n ### UNBLOCK NETWORK, %d ### ' % loop_cnt)
self.assertTrue(util.iptables_drop('D', '127.0.0.102'), 'delete a bloking role to iptables fail.')
for i in range(0):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
self.assertTrue(util.iptables_drop('D', '127.0.0.100'), 'delete a bloking role to iptables fail.')
for i in range(0):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
self.assertTrue(util.iptables_drop('D', '127.0.0.101'), 'delete a bloking role to iptables fail.')
for i in range(3):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Print state of cluster
util.log('\n ### STATE OF CLUSTER ### ')
cluster_state = False
for i in range(10):
cluster_state = util.check_cluster(cluster_name, mgmt_ip, mgmt_port, initial_state, check_quorum=True)
if cluster_state == True:
break
else:
time.sleep(1)
self.assertTrue(cluster_state, 'failed to check cluster state')
all_in_f = True
for s in cluster['servers']:
if checkLastState(mgmt_ip, s['cm_port'], cluster_name, 0, 'F') == False:
all_in_f = False
break
self.assertFalse(all_in_f, 'PGS0`s last state remains in F')
# Delete forwarding role
self.assertTrue(util.iptables_redirect('D', '127.0.0.100', '127.0.0.1'), 'delete a forwarding role to iptables fail.')
self.assertTrue(util.iptables_redirect('D', '127.0.0.101', '127.0.0.1'), 'delete a forwarding role to iptables fail.')
self.assertTrue(util.iptables_redirect('D', '127.0.0.102', '127.0.0.1'), 'delete a forwarding role to iptables fail.')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
def test_7_dirty_network_fi(self):
util.print_frame()
clnts = []
try:
util.iptables_print_list()
# Add forwarding role
self.assertTrue(util.iptables_redirect('A', '127.0.0.100', '127.0.0.1'), 'add a forwarding role to iptables fail.')
cluster_name = 'network_isolation_cluster_1'
cluster = filter(lambda x: x['cluster_name'] == cluster_name, config.clusters)[0]
util.log(util.json_to_str(cluster))
self.leader_cm = cluster['servers'][0]
# MGMT
mgmt_ip = cluster['servers'][0]['real_ip']
mgmt_port = cluster['servers'][0]['cm_port']
# Create cluster
conf_checker = default_cluster.initialize_starting_up_smr_before_redis( cluster,
conf={'cm_context':'applicationContext-fi.xml'})
self.assertIsNotNone(conf_checker, 'failed to initialize cluster')
# Print initial state of cluster
util.log('\n\n\n ### INITIAL STATE OF CLUSTER ### ')
initial_state = []
self.assertTrue(util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, initial_state, check_quorum=True), 'failed to check cluster state')
# Start crc16 client
for s in cluster['servers']:
c = load_generator_crc16.Crc16Client(s['id'], s['ip'], s['redis_port'], 600, verbose=False)
c.start()
clnts.append(c)
# Network isolation test
cmfi = fi_confmaster.ConfmasterWfFi(['ra', 'me', 'yj', 'bj', 'mg'],
['lconn', 'slave', 'master', 'setquorum'], [True, False], 1)
for fi in cmfi:
# Block network
util.log('\n\n\n ### BLOCK NETWORK, %s ### ' % str(fi))
ret = block_network(cluster, mgmt_ip, mgmt_port)
self.assertTrue(ret, '[%s] failed to block network.' % str(fi))
for i in xrange(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in xrange(10):
isolated_states = []
util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
state_transition_done = True
for s in isolated_states:
if s['ip'] != '127.0.0.100':
continue
if s['active_role'] != '?' or s['mgmt_role'] != 'N':
state_transition_done = False
if state_transition_done:
ok = True
break
time.sleep(1)
self.assertTrue(ok, 'Fail, state transition')
# Fault injection
try:
self.assertTrue(fi_confmaster.fi_add(fi, 1, mgmt_ip, mgmt_port),
"Confmaster command fail. fi: %s" % str(fi))
except ValueError as e:
self.fail("Confmaster command error. cmd: \"%s\", reply: \"%s\"" % (cmd, reply))
# Unblock network
util.log('\n\n\n ### UNBLOCK NETWORK, %s ### ' % str(fi))
ret = unblock_network(cluster, mgmt_ip, mgmt_port, None)
self.assertTrue(ret, '[%s] failed to unblock network.' % str(fi))
for i in xrange(4):
util.log('waiting... %d' % (i + 1))
time.sleep(1)
# Check cluster state
ok = False
for i in xrange(10):
isolated_states = []
ok = util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
if ok:
break
time.sleep(1)
self.assertTrue(ok, '[%s] Fail. unstable cluster.' % str(fi))
check_cluster = False
# 'bj', 'slave'
if fi[0] == 'bj' and fi[1] == 'slave':
m, s1, s2 = util.get_mss(cluster)
ret = util.role_lconn(s1)
self.assertEqual("+OK\r\n", ret, '[%s] role lconn fail.' % str(fi))
check_cluster = True
# 'me', 'lconn'
elif fi[0] == 'me':
m, s1, s2 = util.get_mss(cluster)
ret = util.role_lconn(m)
self.assertEqual("+OK\r\n", ret, '[%s] role lconn fail.' % str(fi))
check_cluster = True
# 'setquorum'
elif fi[1] == 'setquorum':
m, s1, s2 = util.get_mss(cluster)
ret = util.cmd_to_smr_addr(s1['ip'], s1['smr_mgmt_port'], 'fi delay sleep 1 8000\r\n', timeout=20)
self.assertEqual("+OK\r\n", ret, '[%s] "fi delay sleep 1 8000" fail. ret: "%s"' % (str(fi), ret))
check_cluster = True
if check_cluster:
# Check cluster state
ok = False
for i in xrange(20):
isolated_states = []
ok = util.check_cluster(cluster['cluster_name'], mgmt_ip, mgmt_port, isolated_states, check_quorum=True)
if ok:
break
time.sleep(1)
self.assertTrue(ok, '[%s] Fail. unstable cluster.' % str(fi))
# Check fault injection
ok = False
for i in xrange(10):
count = fi_confmaster.fi_count(fi, mgmt_ip, mgmt_port)
if count == 0:
ok = True
break
time.sleep(0.5)
self.assertTrue(ok, "[%s] fail. failt injection had not been triggered." % str(fi))
for c in clnts:
self.assertTrue(c.is_consistency(), '[%s] data consistency error!' % str(fi))
for c in clnts:
self.assertTrue(c.is_consistency(), '[%s] data consistency error!' % str(fi))
# Go back to initial configuration
cmfi.init()
for fi in cmfi:
try:
self.assertTrue(fi_confmaster.fi_add(fi, 0, mgmt_ip, mgmt_port),
"Confmaster command fail. fi: %s" % str(fi))
except ValueError as e:
self.fail("Confmaster command error. cmd: \"%s\", reply: \"%s\"" % (cmd, reply))
# Wait until workflows done
ret = util.await(60, True)(
lambda cinfo: cinfo['wf'] == 0,
lambda : util.cluster_info(mgmt_ip, mgmt_port, cluster['cluster_name']))
self.assertTrue(ret, 'There are still some workflows.')
self.assertTrue(conf_checker.final_check())
# Shutdown cluster
default_cluster.finalize(cluster)
finally:
for c in clnts:
c.quit()
for c in clnts:
c.join()
# Delete forwarding role
self.assertTrue(util.iptables_redirect('D', '127.0.0.100', '127.0.0.1'), 'add a forwarding role to iptables fail.')
def checkLastState(mgmt_ip, mgmt_port, cluster_name, pgs_id, state):
pgs = util.get_pgs_info_all(mgmt_ip, mgmt_port, cluster_name, pgs_id)
util.log('PGS:%d, LastState:%s, LastTimestamp:%d' % (pgs_id, pgs['HBC']['lastState'], pgs['HBC']['lastStateTimestamp']))
if state == pgs['HBC']['lastState']:
return True
else:
return False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.