text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
MoinMoin - ReStructured Text Parser
@copyright: 2004 Matthew Gilbert <gilbert AT voxmea DOT net>,
2004 Alexander Schremmer <alex AT alexanderweb DOT de>
@license: GNU GPL, see COPYING for details.
REQUIRES docutils 0.3.10 or later (must be later than December 30th, 2005)
"""
import re
import new
import StringIO
import __builtin__
import sys
# docutils imports are below
from MoinMoin.parser.text_moin_wiki import Parser as WikiParser
from MoinMoin.Page import Page
from MoinMoin.action import AttachFile
from MoinMoin import wikiutil
Dependencies = [] # this parser just depends on the raw text
# --- make docutils safe by overriding all module-scoped names related to IO ---
# TODO: Add an error message to dummyOpen so that the user knows that
# they requested an unsupported feature of Docutils in MoinMoin.
def dummyOpen(x, y=None, z=None): return
class dummyIO(StringIO.StringIO):
def __init__(self, destination=None, destination_path=None,
encoding=None, error_handler='', autoclose=1,
handle_io_errors=1, source_path=None):
StringIO.StringIO.__init__(self)
class dummyUrllib2:
def urlopen(a):
return StringIO.StringIO()
urlopen = staticmethod(urlopen)
# # # All docutils imports must be contained below here
try:
import docutils
from docutils.core import publish_parts
from docutils.writers import html4css1
from docutils.nodes import reference
from docutils.parsers import rst
from docutils.parsers.rst import directives, roles
# # # All docutils imports must be contained above here
ErrorParser = None # used in the case of missing docutils
docutils.io.FileOutput = docutils.io.FileInput = dummyIO
except ImportError:
# we need to workaround this totally broken plugin interface that does
# not allow us to raise exceptions
class ErrorParser:
caching = 0
Dependencies = Dependencies # copy dependencies from module-scope
def __init__(self, raw, request, **kw):
self.raw = raw
self.request = request
def format(self, formatter, **kw):
_ = self.request.getText
from MoinMoin.parser.text import Parser as TextParser
self.request.write(formatter.sysmsg(1) +
formatter.rawHTML(_('Rendering of reStructured text is not possible, please install Docutils.')) +
formatter.sysmsg(0))
TextParser(self.raw, self.request).format(formatter)
# Create a pseudo docutils environment
docutils = html4css1 = dummyUrllib2()
html4css1.HTMLTranslator = html4css1.Writer = object
def safe_import(name, globals = None, locals = None, fromlist = None, level = -1):
mod = __builtin__.__import__(name, globals, locals, fromlist, level)
if mod:
mod.open = dummyOpen
mod.urllib2 = dummyUrllib2
return mod
# Go through and change all docutils modules to use a dummyOpen and dummyUrllib2
# module. Also make sure that any docutils imported modules also get the dummy
# implementations.
for i in sys.modules.keys():
if i.startswith('docutils') and sys.modules[i]:
sys.modules[i].open = dummyOpen
sys.modules[i].urllib2 = dummyUrllib2
sys.modules[i].__import__ = safe_import
# --- End of dummy-code --------------------------------------------------------
def html_escape_unicode(node):
# Find Python function that does this for me. string.encode('ascii',
# 'xmlcharrefreplace') only 2.3 and above.
for i in node:
if ord(i) > 127:
node = node.replace(i, '&#%d;' % (ord(i)))
return node
class MoinWriter(html4css1.Writer):
config_section = 'MoinMoin writer'
config_section_dependencies = ('writers', )
#"""Final translated form of `document`."""
output = None
def wiki_resolver(self, node):
"""
Normally an unknown reference would be an error in an reST document.
However, this is how new documents are created in the wiki. This
passes on unknown references to eventually be handled by
MoinMoin.
"""
if hasattr(node, 'indirect_reference_name'):
node['refuri'] = node.indirect_reference_name
elif (len(node['ids']) != 0):
# If the node has an id then it's probably an internal link. Let
# docutils generate an error.
return False
elif node.hasattr('name'):
node['refuri'] = node['name']
else:
node['refuri'] = node['refname']
del node['refname']
node.resolved = 1
self.nodes.append(node)
return True
wiki_resolver.priority = 1
def __init__(self, formatter, request):
html4css1.Writer.__init__(self)
self.formatter = formatter
self.request = request
# Add our wiki unknown_reference_resolver to our list of functions to
# run when a target isn't found
self.unknown_reference_resolvers = [self.wiki_resolver]
# We create a new parser to process MoinMoin wiki style links in the
# reST.
self.wikiparser = WikiParser('', self.request)
self.wikiparser.formatter = self.formatter
self.wikiparser.hilite_re = None
self.nodes = []
# Make sure it's a supported docutils version.
required_version = (0, 3, 10)
current_version = tuple([int(i) for i in (docutils.__version__.split('.') + ['0', '0'])[:3]])
if current_version < required_version:
err = 'ERROR: The installed docutils version is %s;' % ('.'.join([str(i) for i in current_version]))
err += ' version %s or later is required.' % ('.'.join([str(i) for i in required_version]))
raise RuntimeError(err)
def translate(self):
visitor = MoinTranslator(self.document,
self.formatter,
self.request,
self.wikiparser,
self)
self.document.walkabout(visitor)
self.visitor = visitor
# Docutils 0.5 and later require the writer to have the visitor
# attributes.
if (hasattr(html4css1.Writer, 'visitor_attributes')):
for attr in html4css1.Writer.visitor_attributes:
setattr(self, attr, getattr(visitor, attr))
self.output = html_escape_unicode(visitor.astext())
# mark quickhelp as translatable
_ = lambda x: x
class Parser:
caching = 1
Dependencies = Dependencies # copy dependencies from module-scope
extensions = ['.rst', '.rest', ]
quickhelp = _("""\
{{{
Emphasis: *italic* **bold** ``monospace``
Headings: Heading 1 Heading 2 Heading 3
========= --------- ~~~~~~~~~
Horizontal rule: ----
Links: TrailingUnderscore_ `multi word with backticks`_ external_
.. _external: http://external-site.example.org/foo/
Lists: * bullets; 1., a. numbered items.
}}}
(!) For more help, see the
[[http://docutils.sourceforge.net/docs/user/rst/quickref.html|reStructuredText Quick Reference]].
""")
def __init__(self, raw, request, **kw):
self.raw = raw
self.request = request
self.form = request.form
def format(self, formatter, **kw):
# Create our simple parser
parser = MoinDirectives(self.request)
parts = publish_parts(
source=self.raw,
writer=MoinWriter(formatter, self.request),
settings_overrides={
'halt_level': 5,
'traceback': True,
'file_insertion_enabled': 0,
'raw_enabled': 0,
'stylesheet_path': '',
'template': '',
}
)
html = []
if parts['title']:
# Document title.
html.append(formatter.rawHTML('<h1>%s</h1>' % parts['title']))
# If there is only one subtitle it is propagated by Docutils
# to a document subtitle and is held in parts['subtitle'].
# However, if there is more than one subtitle then this is
# empty and fragment contains all of the subtitles.
if parts['subtitle']:
html.append(formatter.rawHTML('<h2>%s</h2>' % parts['subtitle']))
if parts['docinfo']:
html.append(parts['docinfo'])
html.append(parts['fragment'])
self.request.write(html_escape_unicode('\n'.join(html)))
class RawHTMLList(list):
"""
RawHTMLList catches all html appended to internal HTMLTranslator lists.
It passes the HTML through the MoinMoin rawHTML formatter to strip
markup when necessary. This is to support other formatting outputs
(such as ?action=show&mimetype=text/plain).
"""
def __init__(self, formatter):
self.formatter = formatter
def append(self, text):
f = sys._getframe()
if f.f_back.f_code.co_filename.endswith('html4css1.py'):
if isinstance(text, (str, unicode)):
text = self.formatter.rawHTML(text)
list.append(self, text)
class MoinTranslator(html4css1.HTMLTranslator):
def __init__(self, document, formatter, request, parser, writer):
html4css1.HTMLTranslator.__init__(self, document)
self.formatter = formatter
self.request = request
# Using our own writer when needed. Save the old one to restore
# after the page has been processed by the html4css1 parser.
self.original_write, self.request.write = self.request.write, self.capture_wiki_formatting
self.wikiparser = parser
self.wikiparser.request = request
# MoinMoin likes to start the initial headers at level 3 and the title
# gets level 2, so to comply with their styles, we do here also.
# TODO: Could this be fixed by passing this value in settings_overrides?
self.initial_header_level = 3
# Temporary place for wiki returned markup. This will be filled when
# replacing the default writer with the capture_wiki_formatting
# function (see visit_image for an example).
self.wiki_text = ''
self.setup_wiki_handlers()
self.setup_admonitions_handlers()
# Make all internal lists RawHTMLLists, see RawHTMLList class
# comment for more information.
for i in self.__dict__:
if isinstance(getattr(self, i), list):
setattr(self, i, RawHTMLList(formatter))
def depart_docinfo(self, node):
"""
depart_docinfo assigns a new list to self.body, we need to re-make that
into a RawHTMLList.
"""
html4css1.HTMLTranslator.depart_docinfo(self, node)
self.body = RawHTMLList(self.formatter)
def capture_wiki_formatting(self, text):
"""
Captures MoinMoin generated markup to the instance variable
wiki_text.
"""
# For some reason getting empty strings here which of course overwrites
# what we really want (this is called multiple times per MoinMoin
# format call, which I don't understand).
self.wiki_text += text
def process_wiki_text(self, text):
"""
This sequence is repeated numerous times, so its captured as a
single call here. Its important that wiki_text is blanked before we
make the format call. format will call request.write which we've
hooked to capture_wiki_formatting. If wiki_text is not blanked
before a call to request.write we will get the old markup as well as
the newly generated markup.
TODO: Could implement this as a list so that it acts as a stack. I
don't like having to remember to blank wiki_text.
"""
self.wiki_text = ''
self.wikiparser.raw = text
self.wikiparser.format(self.formatter)
def add_wiki_markup(self):
"""
Place holder in case this becomes more elaborate someday. For now it
only appends the MoinMoin generated markup to the html body and
raises SkipNode.
"""
self.body.append(self.wiki_text)
self.wiki_text = ''
raise docutils.nodes.SkipNode
def astext(self):
self.request.write = self.original_write
return html4css1.HTMLTranslator.astext(self)
def fixup_wiki_formatting(self, text):
replacement = {'\n': '', '> ': '>'}
for src, dst in replacement.items():
text = text.replace(src, dst)
# Fixup extraneous markup
# Removes any empty span tags
text = re.sub(r'\s*<\s*span.*?>\s*<\s*/\s*span\s*>', '', text)
# Removes the first paragraph tag
text = re.sub(r'^\s*<\s*p[^>]*?>', '', text)
# Removes the ending paragraph close tag and any remaining whitespace
text = re.sub(r'<\s*/\s*p\s*>\s*$', '', text)
return text
def visit_reference(self, node):
"""
Pass links to MoinMoin to get the correct wiki space url. Extract
the url and pass it on to the html4css1 writer to handle. Inline
images are also handled by visit_image. Not sure what the "drawing:"
link scheme is used for, so for now it is handled here.
Also included here is a hack to allow MoinMoin macros. This routine
checks for a link which starts with "<<". This link is passed to the
MoinMoin formatter and the resulting markup is inserted into the
document in the place of the original link reference.
"""
if 'refuri' in node.attributes:
refuri = node['refuri']
prefix = ''
link = refuri
if ':' in refuri:
prefix, link = refuri.lstrip().split(':', 1)
# First see if MoinMoin should handle completely. Exits through add_wiki_markup.
if refuri.startswith('<<') and refuri.endswith('>>'): # moin macro
self.process_wiki_text(refuri)
self.wiki_text = self.fixup_wiki_formatting(self.wiki_text)
self.add_wiki_markup()
if prefix == 'drawing':
self.process_wiki_text("[[%s]]" % refuri)
self.wiki_text = self.fixup_wiki_formatting(self.wiki_text)
self.add_wiki_markup()
# From here down, all links are handled by docutils (except
# missing attachments), just fixup node['refuri'].
if prefix == 'attachment':
if not AttachFile.exists(self.request, self.request.page.page_name, link):
# Attachment doesn't exist, give to MoinMoin to insert upload text.
self.process_wiki_text("[[%s]]" % refuri)
self.wiki_text = self.fixup_wiki_formatting(self.wiki_text)
self.add_wiki_markup()
# Attachment exists, just get a link to it.
node['refuri'] = AttachFile.getAttachUrl(self.request.page.page_name, link, self.request)
if not [i for i in node.children if i.__class__ == docutils.nodes.image]:
node['classes'].append(prefix)
elif prefix == 'wiki':
wiki_name, page_name = wikiutil.split_interwiki(link)
wikitag, wikiurl, wikitail, err = wikiutil.resolve_interwiki(self.request, wiki_name, page_name)
wikiurl = wikiutil.mapURL(self.request, wikiurl)
node['refuri'] = wikiutil.join_wiki(wikiurl, wikitail)
# Only add additional class information if the reference does
# not have a child image (don't want to add additional markup
# for images with targets).
if not [i for i in node.children if i.__class__ == docutils.nodes.image]:
node['classes'].append('interwiki')
elif prefix == 'javascript':
# is someone trying to do XSS with javascript?
node['refuri'] = 'javascript:alert("it does not work")'
elif prefix != '':
# Some link scheme (http, file, https, mailto, etc.), add class
# information if the reference doesn't have a child image (don't
# want additional markup for images with targets).
# Don't touch the refuri.
if not [i for i in node.children if i.__class__ == docutils.nodes.image]:
node['classes'].append(prefix)
else:
# Default case - make a link to a wiki page.
pagename, anchor = wikiutil.split_anchor(refuri)
page = Page(self.request, wikiutil.AbsPageName(self.formatter.page.page_name, pagename))
node['refuri'] = page.url(self.request, anchor=anchor)
if not page.exists():
node['classes'].append('nonexistent')
html4css1.HTMLTranslator.visit_reference(self, node)
def visit_image(self, node):
"""
Need to intervene in the case of inline images. We need MoinMoin to
give us the actual src line to the image and then we can feed this
to the default html4css1 writer. NOTE: Since the writer can't "open"
this image the scale attribute doesn't work without directly
specifying the height or width (or both).
TODO: Need to handle figures similarly.
"""
uri = node['uri'].lstrip()
prefix = '' # assume no prefix
attach_name = uri
if ':' in uri:
prefix = uri.split(':', 1)[0]
attach_name = uri.split(':', 1)[1]
# if prefix isn't URL, try to display in page
if not prefix.lower() in ('file', 'http', 'https', 'ftp'):
if not AttachFile.exists(self.request, self.request.page.page_name, attach_name):
# Attachment doesn't exist, MoinMoin should process it
if prefix == '':
prefix = 'attachment:'
self.process_wiki_text("{{%s%s}}" % (prefix, attach_name))
self.wiki_text = self.fixup_wiki_formatting(self.wiki_text)
self.add_wiki_markup()
# Attachment exists, get a link to it.
# create the url
node['uri'] = AttachFile.getAttachUrl(self.request.page.page_name, attach_name, self.request, addts=1)
if not node.hasattr('alt'):
node['alt'] = node.get('name', uri)
html4css1.HTMLTranslator.visit_image(self, node)
def create_wiki_functor(self, moin_func):
moin_callable = getattr(self.formatter, moin_func)
def visit_func(self, node):
self.wiki_text = ''
self.request.write(moin_callable(1))
self.body.append(self.wiki_text)
def depart_func(self, node):
self.wiki_text = ''
self.request.write(moin_callable(0))
self.body.append(self.wiki_text)
return visit_func, depart_func
def setup_wiki_handlers(self):
"""
Have the MoinMoin formatter handle markup when it makes sense. These
are portions of the document that do not contain reST specific
markup. This allows these portions of the document to look
consistent with other wiki pages.
Setup dispatch routines to handle basic document markup. The
hanlders dict is the html4css1 handler name followed by the wiki
handler name.
"""
handlers = {
# Text Markup
'emphasis': 'emphasis',
'strong': 'strong',
'literal': 'code',
# Blocks
'literal_block': 'preformatted',
# Simple Lists
# bullet-lists are handled completely by docutils because it uses
# the node context to decide when to make a compact list
# (no <p> tags).
'list_item': 'listitem',
# Definition List
'definition_list': 'definition_list',
}
for rest_func, moin_func in handlers.items():
visit_func, depart_func = self.create_wiki_functor(moin_func)
visit_func = new.instancemethod(visit_func, self, MoinTranslator)
depart_func = new.instancemethod(depart_func, self, MoinTranslator)
setattr(self, 'visit_%s' % (rest_func), visit_func)
setattr(self, 'depart_%s' % (rest_func), depart_func)
# Enumerated list takes an extra paramter so we handle this differently
def visit_enumerated_list(self, node):
self.wiki_text = ''
self.request.write(self.formatter.number_list(1, start=node.get('start', None)))
self.body.append(self.wiki_text)
def depart_enumerated_list(self, node):
self.wiki_text = ''
self.request.write(self.formatter.number_list(0))
self.body.append(self.wiki_text)
# Admonitions are handled here -=- tmacam
def create_admonition_functor(self, admotion_class):
def visit_func(self, node):
self.wiki_text = ''
self.request.write(self.formatter.div(1,
attr={'class': admotion_class},
allowed_attrs=[]))
self.body.append(self.wiki_text)
def depart_func(self, node):
self.wiki_text = ''
self.request.write(self.formatter.div(0))
self.body.append(self.wiki_text)
return visit_func, depart_func
def setup_admonitions_handlers(self):
"""
Admonitions are handled here... We basically surround admonitions
in a div with class admonition_{name of the admonition}.
"""
handled_admonitions = [
'attention',
'caution',
'danger',
'error',
'hint',
'important',
'note',
'tip',
'warning',
]
for adm in handled_admonitions:
visit_func, depart_func = self.create_admonition_functor(adm)
visit_func = new.instancemethod(visit_func, self, MoinTranslator)
depart_func = new.instancemethod(depart_func, self, MoinTranslator)
setattr(self, 'visit_%s' % (adm), visit_func)
setattr(self, 'depart_%s' % (adm), depart_func)
class MoinDirectives:
"""
Class to handle all custom directive handling. This code is called as
part of the parsing stage.
"""
def __init__(self, request):
self.request = request
# include MoinMoin pages
directives.register_directive('include', self.include)
# used for MoinMoin macros
directives.register_directive('macro', self.macro)
# disallow a few directives in order to prevent XSS
# for directive in ('meta', 'include', 'raw'):
for directive in ('meta', 'raw'):
directives.register_directive(directive, None)
# disable the raw role
roles._roles['raw'] = None
# As a quick fix for infinite includes we only allow a fixed number of
# includes per page
self.num_includes = 0
self.max_includes = 10
# Handle the include directive rather than letting the default docutils
# parser handle it. This allows the inclusion of MoinMoin pages instead of
# something from the filesystem.
def include(self, name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
# content contains the included file name
_ = self.request.getText
# Limit the number of documents that can be included
if self.num_includes < self.max_includes:
self.num_includes += 1
else:
lines = [_("**Maximum number of allowed includes exceeded**")]
state_machine.insert_input(lines, 'MoinDirectives')
return []
if len(content):
pagename = content[0]
page = Page(page_name=pagename, request=self.request)
if not self.request.user.may.read(pagename):
lines = [_("**You are not allowed to read the page: %s**") % (pagename, )]
else:
if page.exists():
text = page.get_raw_body()
lines = text.split('\n')
# Remove the "#format rst" line
if lines[0].startswith("#format"):
del lines[0]
else:
lines = [_("**Could not find the referenced page: %s**") % (pagename, )]
# Insert the text from the included document and then continue parsing
state_machine.insert_input(lines, 'MoinDirectives')
return []
include.has_content = include.content = True
include.option_spec = {}
include.required_arguments = 1
include.optional_arguments = 0
# Add additional macro directive.
# This allows MoinMoin macros to be used either by using the directive
# directly or by using the substitution syntax. Much cleaner than using the
# reference hack (`<<SomeMacro>>`_). This however simply adds a node to the
# document tree which is a reference, but through a much better user
# interface.
def macro(self, name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
# content contains macro to be called
if len(content):
# Allow either with or without brackets
if content[0].startswith('<<'):
macro = content[0]
else:
macro = '<<%s>>' % content[0]
ref = reference(macro, refuri=macro)
ref['name'] = macro
return [ref]
return []
macro.has_content = macro.content = True
macro.option_spec = {}
macro.required_arguments = 1
macro.optional_arguments = 0
if ErrorParser: # fixup in case of missing docutils
Parser = ErrorParser
del _
|
{
"content_hash": "3f9a5e817168842a6c49dc814b920d85",
"timestamp": "",
"source": "github",
"line_count": 635,
"max_line_length": 129,
"avg_line_length": 41.344881889763776,
"alnum_prop": 0.5939666336558239,
"repo_name": "RealTimeWeb/wikisite",
"id": "6851dc22c9d708af948e1f9cd2cf47338161250e",
"size": "26283",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MoinMoin/parser/text_rst.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "49395"
},
{
"name": "CSS",
"bytes": "204104"
},
{
"name": "ColdFusion",
"bytes": "142312"
},
{
"name": "Java",
"bytes": "491798"
},
{
"name": "JavaScript",
"bytes": "2107106"
},
{
"name": "Lasso",
"bytes": "23464"
},
{
"name": "Makefile",
"bytes": "4950"
},
{
"name": "PHP",
"bytes": "144585"
},
{
"name": "Perl",
"bytes": "44627"
},
{
"name": "Python",
"bytes": "7647140"
},
{
"name": "Shell",
"bytes": "335"
}
],
"symlink_target": ""
}
|
"""Simple script for xvfb_unittest to launch.
This script outputs formatted data to stdout for the xvfb unit tests
to read and compare with expected output.
"""
from __future__ import print_function
import os
import signal
import sys
import time
def print_signal(sig, *_):
# print_function does not guarantee its output won't be interleaved
# with other logging elsewhere, but it does guarantee its output
# will appear intact. Because the tests parse via starts_with, prefix
# with a newline. These tests were previously flaky due to output like
# > Signal: 1 <other messages>.
print('\nSignal :{}'.format(sig))
if __name__ == '__main__':
signal.signal(signal.SIGTERM, print_signal)
signal.signal(signal.SIGINT, print_signal)
# test the subprocess display number.
print('\nDisplay :{}'.format(os.environ.get('DISPLAY', 'None')))
if len(sys.argv) > 1 and sys.argv[1] == '--sleep':
time.sleep(2) # gives process time to receive signal.
|
{
"content_hash": "9a196d9fdfd4aedf93c8b86d1c672e98",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 72,
"avg_line_length": 30.34375,
"alnum_prop": 0.713697219361483,
"repo_name": "nwjs/chromium.src",
"id": "4feb330a98f05ee846ec09c72e3556354a81deb6",
"size": "1134",
"binary": false,
"copies": "7",
"ref": "refs/heads/nw70",
"path": "testing/xvfb_test_script.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import auth
from novaclient.client import Client
import novaclient
import time
def createServer(name):
print "Enter createServer"
nova_credentials = auth.get_nova_credentials_v2()
nova = Client(**nova_credentials)
serversList = nova.servers.list()
print("List of VMs: %s" % serversList)
for s in serversList:
if s.name == name:
print "server %s exists" % name
exist = True
break
else:
print "server %s does not exist" % name
exist = False
if (not exist):
image = nova.images.find(name="TestVM")
flavor = nova.flavors.find(name="VRCS")
net = nova.networks.find(label="KI10_rcs_oam")
nics = [{'net-id': net.id}]
print "creating server %s" % name
instance = nova.servers.create(name=name, image=image, flavor=flavor, nics=nics)
print("Sleeping for 5s after create command")
time.sleep(5)
print("List of VMs: %s" % nova.servers.list())
print "Return createServer"
def deleteServer(name):
print "Enter deleteServer"
nova_credentials = auth.get_nova_credentials_v2()
nova = Client(**nova_credentials)
serversList = nova.servers.list()
print("List of VMs: %s" % serversList)
for s in serversList:
if s.name == name:
print("The server %s exists. Delete it" % name)
nova.servers.delete(s)
else:
print ("server %s does not exist" % name)
# time.sleep(10)
# print("List of VMs: %s" % nova.servers.list())
print "Return deleteServer"
|
{
"content_hash": "3456c87ad81de5c1231d795b8ae7ec57",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 88,
"avg_line_length": 30.71698113207547,
"alnum_prop": 0.5927518427518428,
"repo_name": "Will1229/learnOpenstack",
"id": "962469a5683439d8a11a23024ff66d0ce0835fe2",
"size": "1628",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testServer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4043"
}
],
"symlink_target": ""
}
|
"""Required modules"""
import re
import csv
import sys
import numpy as np
import scipy.io as sio
import xlrd
import numexpr as ne
DATE = xlrd.XL_CELL_DATE
TEXT = xlrd.XL_CELL_TEXT
BLANK = xlrd.XL_CELL_BLANK
EMPTY = xlrd.XL_CELL_EMPTY
ERROR = xlrd.XL_CELL_ERROR
NUMBER = xlrd.XL_CELL_NUMBER
def read_excel(filename, sheet=None):
"""Read sheet data or sheet names from an Excel workbook into a
:class:`Spreadsheet`.
:example:
sheet_names = read_excel('parameter.xlsx') # returns a list of sheet names
:example:
spreadsheet = read_excel('parameter.xlsx', 0) # read the first sheet
:example:
spreadsheet = read_excel(parameter.xls', 'sheet_2') # load 'sheet_2'
:param filename: name of the excel woorkbook to import
:param sheet: spreadsheet name or index to import
:type filename: string
:type sheet: string or integer or None
:return: sheet names if sheet is None, otherwise sheet data
:rtype: list of strings if sheet is None, otherwise :class:`Spreadsheet`"""
book = xlrd.open_workbook(filename)
spreadsheet = Spreadsheet()
if sheet is None:
return book.sheet_names()
elif isinstance(sheet, int):
xl_sheet = book.sheet_by_index(sheet)
spreadsheet.set_data(xl_sheet.get_rows())
return spreadsheet
else:
xl_sheet = book.sheet_by_name(sheet)
spreadsheet.set_data(xl_sheet.get_rows())
return spreadsheet
def loadtxt(filename, dtype='float', comments='#', delimiter=None, skiprows=0,
usecols=None, unpack=False):
"""Load ascii files into a numpy ndarray using numpy.loadtxt."""
return np.loadtxt(
filename, dtype, comments, delimiter,
None, skiprows, usecols, unpack)
def load(file, mmap_mode=None, allow_pickle=True, fix_imports=True,
encoding='ASCII'):
"""Load numpy .npy and .npz files to an array or map of arrays
respectively using np.load"""
return np.load(file, mmap_mode, allow_pickle, fix_imports, encoding)
def read_csv(filename, start=1, stop=None, assume=TEXT):
"""Read a csv file into a :class:`Spreadsheet`
:example:
sheet = read_csv('parameters.csv', start=9, assume=NUMBER)
:param filename: name of the file to read
:param start: row to start reading
:param stop: row to stop reading
:param assume: type of data to assume
:type filename: string
:type start: integer
:type stop: integer
:type assume: integer
:return: spreadsheet data
:rtype: :class:`Spreadsheet`"""
values = []
spreadsheet = Spreadsheet(assume)
with open(filename) as csvfile:
reader = csv.reader(csvfile)
for row in reader:
values.append(row)
if stop is None:
stop = len(values)
values = values[start-1:stop]
spreadsheet.set_values(values)
return spreadsheet
def load_mat(filename, variable):
"""Read the variable from filename
:example:
sheet = read_mat("parameter.mat", "cse")
:param filename: name of the .mat file to read
:param variable: variable to load
:type filename: string
:type variable: string
:return: variable data
:rtype: array"""
contents = sio.loadmat(filename)
return contents[variable]
def load_section(sheet, row_range=None, col_range=None):
"""Read a 'chunk' of data from a spreadsheet.
Given a selection of rows and columns, this function will return the
intersection of the two ranges. Note that the minimum value for each range
is 1.
:example:
spreadsheet = read_excel('parameters.xlsx', 'Parameters')
cell_data = load_section(
spreadsheet, [1, 3, 5], range(7, 42))
:param sheet: spreadsheet data
:param row_range: selected rows
:param col_range: selected columns
:type sheet: :class:`xlrd.sheet`
:type row_range: list of integers or integer
:type col_range: list of integers or integer
:return: section of sheet data
:rtype: array if assume=NUMBER else list"""
if row_range is None:
row_range = range(1, len(sheet.values)+1)
if col_range is None:
col_range = range(1, len(sheet.values[0])+1)
if isinstance(row_range, int):
row_range = [row_range]
if isinstance(col_range, int):
col_range = [col_range]
rval = [[sheet.cell(x-1, y-1) for y in col_range] for x in row_range]
if sheet.assume == NUMBER:
return np.array(
[[rval[x-1][y-1].value for y in col_range] for x in row_range],
dtype='float')
return rval
def _multiple_replace(repl, text):
"""Replace multiple regex expressions
:param repl: dictionary of values to replace
:param text: text to perform regex on
:type repl: dict
:type text: string
:return: processed text
:rtype: string"""
# Create a regular expression from the dictionary keys
regex = re.compile("(%s)" % "|".join(map(re.escape, repl.keys())))
# For each match, look-up corresponding value in dictionary
return regex.sub(lambda mo: repl[mo.string[mo.start():mo.end()]], text)
def _fun_to_lambda(entry):
"""Convert a given string representing a matlab anonymous
function to a lambda function
:example:
lambdafun = "@(x) cos(x)"
lambdafun(np.pi)
:param entry: string of matlab anonymous equation
:type: string
:return: mathmatical function
:rtype: lambda function"""
repl = {
'./': '/',
'.*': '*',
'.^': '**'
}
# pull out function variable definition
vari = re.findall(r'\@\(.*?\)', entry)
vari = [re.sub(r'\@|\(|\)', '', x) for x in vari]
# remove variable definition
entry = re.sub(r'\@\(.*?\)', '', entry)
# replace operators to suit numpy
entry = _multiple_replace(repl, entry)
# separate equations into different functions
entry = re.sub('{|}', '', entry).split(',')
return list(lambda x, z=i: ne.evaluate(entry[z], local_dict={vari[z]: x})
for i in range(0, len(entry)))
def load_params(sheet, rows=None, ncols=None, pcols=None, cols=None,
nrows=None, prows=None):
"""Read designated parameters from the sheet
:example:
sheet=read_excel('parameter_list.xlsx', 0, 'index')
params["pos"] = load_params(sheet, range(55, 75), ncols=2, pcols=3)
:param sheet: spreadsheet data
:param rows: same as nrows=prows
:param cols: same as ncols=pcols
:param nrows: cell rows to read for parameter names
:param ncols: cell columns to read for parameter names
:param prows: cell rows to read for parameter data
:param pcols: cell columns to read for parameter data
:type sheet: :class:`Spreadsheet`
:type rows: list of integers or integer
:type cols: list of integers or integer
:type nrows: list of integers or integer
:type ncols: list of integers or integer
:type prows: list of integers or integer
:type pcols: list of integers or integer
:return: mapping of parameter names to values
:rtype: dict"""
if rows:
nrows = rows
prows = rows
if cols:
ncols = cols
pcols = cols
name_cells = load_section(sheet, nrows, ncols)
data_cells = load_section(sheet, prows, pcols)
# Verify the number of names matches the number of params
assert len(name_cells) == len(data_cells)
data = [_fun_to_lambda(x.value) if x.ctype == TEXT else
x.value if x.ctype == NUMBER else None
for y in data_cells for x in y]
return dict(zip([x.value for y in name_cells for x in y], data))
class Spreadsheet(object):
"""Hold spreadsheet data"""
def __init__(self, assumption=None):
"""Entry point for :class:`Spreadsheet`"""
self.values = None
self.ctypes = None
self.assume = assumption
def set_data(self, data_in):
"""Set spreadsheet data using cell generators"""
data = list(data_in)
self.values = [[col.value for col in row] for row in data]
self.ctypes = [[col.ctype for col in row] for row in data]
def set_values(self, values):
"""Set spreadsheet cell values
:param values: values to set
:type values: container, e.g. list"""
self.values = values
def set_ctypes(self, ctype):
"""Set spreadsheet cell types. I.e. NUMBER, TEXT, etc.
:param ctype: cell types to set
:type values: container, e.g. list"""
self.ctypes = ctype
def size(self):
"""Retrieve the dimensions of the spreadsheet
:return: spreadsheed dimensions
:rtype: tuple"""
if self.values is not None:
return len(self.values), len(self.values[0])
else:
return None
def cell(self, xpos, ypos):
"""Retrieve cell information
:param xpos: cell row
:param ypos: cell column
:type xpos: integer
:type ypos: integer
:return: cell values and info
:rtype: :class:`xlrd.sheet.Cell`"""
if self.ctypes:
return xlrd.sheet.Cell(
self.ctypes[xpos][ypos], self.values[xpos][ypos])
elif self.assume:
return xlrd.sheet.Cell(self.assume, self.values[xpos][ypos])
else:
return None
def main():
"""Module entry point"""
pass
if __name__ == '__main__':
sys.exit(main())
|
{
"content_hash": "feabd35f32403acadee6947d7853cfbe",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 79,
"avg_line_length": 29.015432098765434,
"alnum_prop": 0.6305712158281034,
"repo_name": "macklenc/coslib",
"id": "8141923faa53d6db4168fd35c3b769fc7f70948f",
"size": "9401",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "coslib/ldp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2268"
},
{
"name": "Python",
"bytes": "21668"
}
],
"symlink_target": ""
}
|
testcases = int(raw_input())
for testcase in xrange(testcases):
input_str = raw_input()
input_length = len(input_str) % 2
if input_length == 1:
print -1
continue
input_half = len(input_str) / 2
first_half = input_str[0: input_half]
second_half = input_str[input_half: ]
unique_characters = set(input_str)
result = 0
for unique_character in unique_characters:
first_count = first_half.count(unique_character)
second_count = second_half.count(unique_character)
result += abs(first_count - second_count)
print result / 2
|
{
"content_hash": "343c1fe82291f8f2958d6dc58229d414",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 58,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.6044303797468354,
"repo_name": "avenet/hackerrank",
"id": "637c63990574d9a9740e485b667c9c06584ee22c",
"size": "632",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "anagram.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "944"
},
{
"name": "Python",
"bytes": "118800"
}
],
"symlink_target": ""
}
|
from PyQt4 import QtCore, QtGui
from spreadsheet_helpers import CellHelpers
from spreadsheet_registry import spreadsheetRegistry
from spreadsheet_tab import (StandardWidgetSheetTab,
StandardWidgetSheetTabInterface)
################################################################################
class StandardSheetReference(object):
"""
StandardSheetReference is used to specify which sheet a cell want
to be on. It also knows how to decide if a sheet on the
spreadsheet is appropriate for itself.
"""
def __init__(self):
""" StandardSheetReference() -> StandardSheetReference
Initialize to the current sheet with no minimum size
"""
self.sheetName = None
self.minimumRowCount = 1
self.minimumColumnCount = 1
self.candidate = None
def isTabValid(self, tabWidget):
""" isTabValid(tabWidget: QWidget) -> boolean
Check to see if the tab is an acceptable type
"""
return issubclass(tabWidget.__class__, StandardWidgetSheetTab)
def clearCandidate(self):
""" clearCandidate() -> None
Begin the candidate searching process by clearing the previous
candidate sheet. The searching process is done by looping
through all available sheets and let the SheetReference decides
and keep track of which one is the best appropriate
"""
self.candidate = None
def checkCandidate(self, tabWidget, tabLabel, tabIndex, curIndex):
""" checkCandidate(tabWidget: QWidget,
tabLabel: str,
tabIndex: int,
curIndex: int) -> None
Check to see if this new candidate is better than the one we
have right now. If it is then use this one instead. The
condition is very simple, sheet type comes first, then name
and dimensions.
Keyword arguments:
tabWidget --- QWidget controlling actual sheet widget
tabLabel --- the display label of the sheet
tabIndex --- its index inside the tab controller
curIndex --- the current active index of the tab controller
"""
if self.isTabValid(tabWidget):
if (self.sheetName!=None and
str(tabLabel)!=str(self.sheetName)):
return
if self.candidate!=None:
if (self.sheetName==None or
(str(tabLabel)==str(self.sheetName))==
(str(self.candidate[1])==str(self.sheetName))):
storedSheet = self.candidate[0].sheet
newSheet = tabWidget.sheet
if (newSheet.rowCount()<self.minimumRowCount and
storedSheet.rowCount()>=self.minimumRowCount):
return
if (newSheet.columnCount()<self.minimumColumnCount and
storedSheet.columnCount()>=self.minimumColumnCount):
return
if (((newSheet.rowCount()<self.minimumRowCount)==
(storedSheet.rowCount()<self.minimumRowCount)) and
((newSheet.columnCount()<self.minimumColumnCount)==
(storedSheet.columnCount()<self.minimumColumnCount))):
if tabIndex!=curIndex:
return
self.candidate = (tabWidget, tabLabel, tabIndex, curIndex)
def setupCandidate(self, tabController):
""" setupCandidate(tabController: SpreadsheetTabController) -> None
Setup the candidate we have to completely satisfy the reference,
making ready to be displayed on, e.g. extend the number of row and
column
"""
if self.candidate==None:
candidate = StandardWidgetSheetTab(tabController,
self.minimumRowCount,
self.minimumColumnCount)
idx = tabController.addTabWidget(candidate, self.sheetName)
tabController.setCurrentIndex(idx)
candidate.sheet.stretchCells()
return candidate
else:
if self.candidate[0].sheet.rowCount()<self.minimumRowCount:
self.candidate[0].sheet.setRowCount(self.minimumRowCount)
if self.candidate[0].sheet.columnCount()<self.minimumColumnCount:
self.candidate[0].sheet.setColumnCount(self.minimumColumnCount)
tabController.setCurrentWidget(self.candidate[0])
return self.candidate[0]
class StandardSingleCellSheetTab(QtGui.QWidget,
StandardWidgetSheetTabInterface):
"""
StandardSingleCellSheetTab is a container of StandardWidgetSheet
with only a single cell. This will be added directly to a
QTabWidget on the spreadsheet as a sheet for displaying
"""
def __init__(self, tabWidget, row=1, col=1):
""" StandardSingleCellSheetTab(row: int,
col: int) -> StandardSingleCellSheetTab
Initialize with the vertical layout containing only a single widget
"""
QtGui.QWidget.__init__(self, None)
StandardWidgetSheetTabInterface.__init__(self)
self.type = 'StandardSingleCellSheetTab'
self.tabWidget = tabWidget
self.vLayout = QtGui.QVBoxLayout()
self.vLayout.setSpacing(0)
self.vLayout.setMargin(0)
self.setLayout(self.vLayout)
self.cell = QtGui.QWidget()
self.layout().addWidget(self.cell)
self.helpers = CellHelpers(self)
self.toolBars = {}
self.blankCellToolBar = None
self.pipelineInfo = {}
### Belows are API Wrappers to connect to self.sheet
def getDimension(self):
""" getDimension() -> tuple
Get the sheet dimensions
"""
return (1,1)
def getCell(self, row, col):
""" getCell(row: int, col: int) -> QWidget
Get cell at a specific row and column.
"""
return self.cell
def getCellToolBar(self, row, col):
""" getCellToolBar(row: int, col: int) -> QWidget
Return the toolbar widget at cell location (row, col)
"""
cell = self.getCell(row, col)
if cell and hasattr(cell, 'toolBarType'):
if not self.toolBars.has_key(cell.toolBarType):
self.toolBars[cell.toolBarType] = cell.toolBarType(self)
return self.toolBars[cell.toolBarType]
else:
return self.blankCellToolBar
def getCellRect(self, row, col):
""" getCellRect(row: int, col: int) -> QRect
Return the rectangle surrounding the cell at location (row, col)
in parent coordinates
"""
return self.contentsRect()
def getCellGlobalRect(self, row, col):
""" getCellGlobalRect(row: int, col: int) -> QRect
Return the rectangle surrounding the cell at location (row, col)
in global coordinates
"""
rect = self.getCellRect(row, col)
rect.moveTo(self.mapToGlobal(rect.topLeft()))
return rect
def setCellByType(self, row, col, cellType, inputPorts):
""" setCellByType(row: int,
col: int,
cellType: a type inherits from QWidget,
inpurPorts: tuple) -> None
Replace the current location (row, col) with a cell of
cellType. If the current type of that cell is the same as
cellType, only the contents is updated with inputPorts.
"""
oldCell = self.getCell(row, col)
if type(oldCell)!=cellType:
oldCell.hide()
self.layout().removeWidget(oldCell)
if cellType:
self.cell = cellType(self)
# self.cell.setGeometry(self.getCellRect(row, col))
self.layout().addWidget(self.cell)
self.cell.show()
self.cell.updateContents(inputPorts)
if hasattr(oldCell, 'deleteLater'):
oldCell.deleteLater()
del oldCell
else:
oldCell.updateContents(inputPorts)
def showHelpers(self, show, globalPos):
""" showHelpers(show: boolean, globalPos: QPoint) -> None
Show the helpers (toolbar, resizer) when show==True
"""
if show:
self.helpers.snapTo(0,0)
self.helpers.adjustPosition()
self.helpers.show()
else:
self.helpers.hide()
def getSelectedLocations(self):
""" getSelectedLocations() -> tuple
Return the selected locations (row, col) of the current sheet
"""
return [(0,0)]
class StandardSingleCellSheetReference(StandardSheetReference):
"""
StandardSingleCellSheetReference is a sheet reference that only
accepts a single cell. This overrides the StandardSheetReference
"""
def isTabValid(self, tabWidget):
""" isTabValid(tabWidget: QWidget) -> boolean
Only accepts StandardSingleCellSheetTab
"""
return issubclass(tabWidget.__class__, StandardSingleCellSheetTab)
def checkCandidate(self, tabWidget, tabLabel, tabIndex, curIndex):
""" checkCandidate(tabWidget: QWidget,
tabLabel: str,
tabIndex: int,
curIndex: int) -> None
Better candidate is decided merely if it is the current index
"""
if self.isTabValid(tabWidget):
better = False
if (self.sheetName!=None and
str(tabLabel)!=str(self.sheetName)):
return
if self.candidate!=None:
if self.candidate[2]==curIndex or tabIndex!=curIndex:
return
self.candidate = (tabWidget, tabLabel, tabIndex, curIndex)
def setupCandidate(self, tabController):
""" setupCandidate(tabController: SpreadsheetTabController) -> None
Set up the sheet to be single-cell sheet
"""
if self.candidate==None:
candidate = StandardSingleCellSheetTab(tabController)
index = tabController.addTabWidget(candidate, self.sheetName)
tabController.setCurrentIndex(index)
return candidate
else:
return self.candidate[0]
spreadsheetRegistry.registerSheet('StandardSingleCellSheetTab',
StandardSingleCellSheetTab)
|
{
"content_hash": "c45933c5c3946b5790e3fb2f7b5388e7",
"timestamp": "",
"source": "github",
"line_count": 274,
"max_line_length": 80,
"avg_line_length": 39.64233576642336,
"alnum_prop": 0.5758607991161848,
"repo_name": "Nikea/VisTrails",
"id": "79aa8df7ef8d6a365b6195fd60ed6f1bc82fd4c9",
"size": "13082",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vistrails/packages/spreadsheet/spreadsheet_base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1421"
},
{
"name": "Inno Setup",
"bytes": "19611"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66415"
},
{
"name": "PHP",
"bytes": "49038"
},
{
"name": "Python",
"bytes": "19674395"
},
{
"name": "R",
"bytes": "778864"
},
{
"name": "Rebol",
"bytes": "3972"
},
{
"name": "Shell",
"bytes": "34182"
},
{
"name": "TeX",
"bytes": "145219"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import time
import numpy as np
import roslaunch
import rosparam
import subprocess
import imageio
from core.env import Env
from core.minisim.minisim_client import MinisimClient
from utils.helpers import Experience
from utils.options import EnvParams
# TODO: figure out logging
class MinisimEnv(Env):
initialized = False
minisim_path = None
roslaunch_map_server = None
roslaunch_node_starter = None
roscore = None
map_dir = None
def __init__(self, args, env_ind=0):
tmp = self._reset_experience
self._reset_experience = lambda: None
super(MinisimEnv, self).__init__(args, env_ind)
self._reset_experience = tmp
assert self.env_type == "minisim"
self.extras = None
self.num_robots = args.num_robots
self.curriculum = args.curriculum if hasattr(args, "curriculum") else False
self.randomize_maps = args.randomize_maps if hasattr(args, "randomize_maps") else False
self.randomize_targets = args.randomize_targets if hasattr(args, "randomize_targets") else False
self.penalize_staying = args.penalize_staying if hasattr(args, "penalize_staying") else False
self.penalize_angle_to_target = args.penalize_angle_to_target if hasattr(args,
"penalize_angle_to_target") else False
self.collision_is_terminal = args.collision_is_terminal if hasattr(args, "collision_is_terminal") else False
self.verbose_test = args.verbose_test if hasattr(args, "verbose_test") else False
self.mode = args.mode # 1(train) | 2(test model_file)
self.total_reward = 0
if self.mode == 2:
self.curriculum = False
# self.collision_is_terminal = False
self.sim_name = 'sim' + str(self.ind)
if not MinisimEnv.initialized:
self._init_roslaunch()
self.node = self._launch_node()
self.client = MinisimClient(
self.num_robots, self.seed, self.curriculum, self.mode,
self.randomize_targets, self.penalize_staying,
self.penalize_angle_to_target, self.collision_is_terminal,
'/' + self.sim_name, self.logger
)
self.client.setup() # TODO: move to client's init?
# action space setup # [linear velocity, angular velocity]
# seemed to be too small
# self.actions = [[0, 0], [1, 0], [-1, 0], [0, 1], [0, -1]] # ,[1, 1], [1, -1], [-1, 1], [-1, -1]]
# definitely too huge, only realised a few months in :(
# self.actions = [[0, 0], [10, 0], [-10, 0], [0, 16], [0, -16]] # ,[1, 1], [1, -1], [-1, 1], [-1, -1]]
# trying out
# self.actions = [[0, 0], [3, 0], [-3, 0], [0, 8], [0, -8]] # ,[1, 1], [1, -1], [-1, 1], [-1, -1]]
# trying out without the option to stand still and backwards movement
self.actions = [[3, 0], [0, 8], [0, -8]]
# try to promote more realistic behavior with slower backward movement?
# self.actions = [[0, 0], [3, 0], [-1, 0], [0, 8], [0, -8]] # ,[1, 1], [1, -1], [-1, 1], [-1, -1]]
self.logger.warning("Action Space: %s", self.actions)
# state space setup
self.logger.warning("State Space: %s", self.state_shape)
# continuous space
if args.agent_type == "a3c":
self.enable_continuous = args.enable_continuous
if args.enable_continuous:
self.logger.warning("Continuous actions not implemented for minisim yet")
else:
self.enable_continuous = False
# TODO: history is currently broken (however it was not useful according to the experiments anyway)
# it was harmful, even
if hasattr(args, "hist_len"):
self.hist_len = args.hist_len
self.state_buffer = np.zeros((self.hist_len, self.state_shape + 2 * self.num_robots))
else:
self.hist_len = 1
self._reset_experience()
def __del__(self):
if self.node is not None:
self.node.stop()
def _preprocessState(self, state):
return state
def _reset_experience(self):
super(MinisimEnv, self)._reset_experience()
self.extras = None
if self.hist_len > 1:
self.state_buffer[:] = 0
def _append_to_history(self, state):
for i in range(self.state_buffer.shape[0] - 1):
self.state_buffer[i, :] = self.state_buffer[i + 1, :]
self.state_buffer[-1, :] = state
@property
def state_shape(self):
return self.client.state_shape
@property
def action_dim(self):
return len(self.actions)
def render(self):
self.logger.warning("WARNING: asked to render minisim - user rviz instead")
def visual(self):
pass
def sample_random_action(self): # TODO: unused
return [self.actions[np.random.randint(0, len(self.actions))] for _ in xrange(self.num_robots)]
def _get_experience(self):
if self.hist_len == 1:
return Experience(state0=self.exp_state0, # NOTE: here state0 is always None
action=self.exp_action,
reward=self.exp_reward,
state1=self._preprocessState(self.exp_state1),
terminal1=self.exp_terminal1,
extras=self.extras)
else:
return Experience(state0=self.exp_state0, # NOTE: here state0 is always None
action=self.exp_action,
reward=self.exp_reward,
state1=self.state_buffer,
terminal1=self.exp_terminal1,
extras=self.extras)
def reset(self):
self._reset_experience()
self.exp_state1, self.extras = self.client.reset()
if self.hist_len > 1:
self._append_to_history(self._preprocessState(self.exp_state1))
self.total_reward = 0
return self._get_experience()
def step(self, action_index):
self.exp_action = action_index
if self.enable_continuous:
# TODO: not implemented
self.exp_state1, self.exp_reward, self.exp_terminal1, _ = self.client.step(self.exp_action)
else:
# enumerated action combinations
# print("actions taken:", [self.actions[i] for i in self._to_n_dim_idx(action_index, self.num_robots)])
# self.exp_state1, self.exp_reward, self.exp_terminal1, _ = self.client.step(
# [self.actions[i] for i in self._to_n_dim_idx(action_index, self.num_robots)]
# )
# unstructured reward
# self.exp_state1, self.exp_reward, self.exp_terminal1, _ = self.client.step(
# [self.actions[i] for i in action_index.reshape(-1)]
# )
# structured reward
self.exp_state1, self.exp_reward, self.exp_terminal1, self.extras, _ = self.client.step_structured(
[self.actions[i] for i in action_index.reshape(-1)]
)
if self.mode == 2:
# time.sleep(0.01)
self.total_reward += self.exp_reward
if self.verbose_test:
print('total reward: ', self.total_reward)
# print("actions: ", action_index)
if self.hist_len > 1:
self._append_to_history(self._preprocessState(self.exp_state1))
return self._get_experience()
def read_static_map_image(self):
# return imageio.imread(os.path.join(MinisimEnv.minisim_path, 'map', 'medium_rooms.pgm'))
# return imageio.imread(os.path.join(MinisimEnv.minisim_path,
# 'map', 'random', 'simple_gen_small_002.pgm'))
return imageio.imread(os.path.join(MinisimEnv.minisim_path,
'map', 'medium_rooms_simpler.pgm'))
# return imageio.imread(os.path.join(MinisimEnv.minisim_path,
# 'map', 'medium_rooms_new.pgm'))
# return imageio.imread(os.path.join(MinisimEnv.minisim_path,
# 'map', 'medium_rooms_new2.pgm'))
# return imageio.imread(os.path.join(MinisimEnv.minisim_path,
# 'map', 'medium_rooms_new3.pgm'))
# was supposed to be useful for a large network with a single action index output, which would
# be expanded into individual robot actions
def _to_n_dim_idx(self, idx, n_dims):
res = np.zeros(n_dims, dtype=np.int)
for i in range(n_dims):
sub = idx / len(self.actions) ** (n_dims - i - 1)
if i != n_dims - 1:
res[i] = sub
idx -= sub * len(self.actions) ** (n_dims - i - 1)
else:
res[i] = idx % len(self.actions)
return res
def _init_roslaunch(self):
rospack = roslaunch.rospkg.RosPack()
try:
minisim_path = rospack.get_path('minisim')
MinisimEnv.minisim_path = minisim_path
except roslaunch.rospkg.ResourceNotFound:
self.logger.warning("WARNING: minisim not found")
sys.exit(-1)
if not self.randomize_maps:
# TODO: find a way to provide the map file arg to the map_server launch file
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_small.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_small_simple.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_empty_small.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_simple_gen_small_002.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_medium_rooms.launch')
map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_medium_rooms_simpler.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_medium_rooms_new.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_medium_rooms_new2.launch')
# map_server_rlaunch_path = os.path.join(minisim_path, 'launch', 'map_server_medium_rooms_new3.launch')
uuid = roslaunch.rlutil.get_or_generate_uuid(None, False)
roslaunch.configure_logging(uuid)
MinisimEnv.roslaunch_map_server = roslaunch.parent.ROSLaunchParent(uuid, [map_server_rlaunch_path])
MinisimEnv.roslaunch_map_server.start()
else:
master = roslaunch.scriptapi.Master()
if not master.is_running():
MinisimEnv.roscore = subprocess.Popen('roscore')
rlaunch_path = os.path.join(minisim_path, 'launch', 'sim_srv_multimap.launch')
loader = roslaunch.xmlloader.XmlLoader(resolve_anon=False)
config = roslaunch.config.ROSLaunchConfig()
loader.load(rlaunch_path, config, verbose=False)
MinisimEnv.map_dir = config.params.values()[0].value
MinisimEnv.roslaunch_node_starter = roslaunch.scriptapi.ROSLaunch()
MinisimEnv.roslaunch_node_starter.start()
MinisimEnv.initialized = True
def _launch_node(self):
package = 'minisim'
executable = 'minisim_srv' if not self.randomize_maps else 'minisim_srv_standalone'
node = roslaunch.core.Node(package, executable, required=True, name=self.sim_name,
namespace=self.sim_name, output='screen')
if self.randomize_maps:
rosparam.set_param("/{0}/{0}/map_dir".format(self.sim_name), MinisimEnv.map_dir)
return MinisimEnv.roslaunch_node_starter.launch(node)
if __name__ == '__main__':
params = EnvParams()
env_0 = MinisimEnv(params, 0)
for i in range(50):
env_0.reset()
for j in xrange(np.random.randint(10, 100)):
env_0.step(np.random.randint(0, 3, size=(1, 1)))
# env_1 = MinisimEnv(params, 1)
# time.sleep(10000)
|
{
"content_hash": "696687631c6474339710c99816d3c7aa",
"timestamp": "",
"source": "github",
"line_count": 281,
"max_line_length": 119,
"avg_line_length": 44.20996441281139,
"alnum_prop": 0.579892135555019,
"repo_name": "AlekseyZhelo/pytorch-rl",
"id": "97675a385f3f24b51a7e8db4e30d3ee7edccf674",
"size": "12423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/minisim/minisim_env.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "343662"
},
{
"name": "Shell",
"bytes": "5413"
}
],
"symlink_target": ""
}
|
import netaddr
def check_subnet_ip(cidr, ip_address):
"""Validate that the IP address is on the subnet."""
ip = netaddr.IPAddress(ip_address)
net = netaddr.IPNetwork(cidr)
# Check that the IP is valid on subnet. This cannot be the
# network or the broadcast address
return (ip != net.network and ip != net.broadcast
and net.netmask & ip == net.network)
def generate_pools(cidr, gateway_ip):
"""Create IP allocation pools for a specified subnet
The Neutron API defines a subnet's allocation pools as a list of
IPRange objects for defining the pool range.
"""
pools = []
# Auto allocate the pool around gateway_ip
net = netaddr.IPNetwork(cidr)
first_ip = net.first + 1
last_ip = net.last - 1
gw_ip = int(netaddr.IPAddress(gateway_ip or net.last))
# Use the gw_ip to find a point for splitting allocation pools
# for this subnet
split_ip = min(max(gw_ip, net.first), net.last)
if split_ip > first_ip:
pools.append(netaddr.IPRange(first_ip, split_ip - 1))
if split_ip < last_ip:
pools.append(netaddr.IPRange(split_ip + 1, last_ip))
return pools
|
{
"content_hash": "ac2eb7525e50b88edc6014ab2550b977",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 68,
"avg_line_length": 35.21212121212121,
"alnum_prop": 0.6617900172117039,
"repo_name": "NeCTAR-RC/neutron",
"id": "74927769ad765ab3e0ad056a46a0c10e2d4f8e47",
"size": "1792",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "neutron/ipam/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "980"
},
{
"name": "Python",
"bytes": "7228162"
},
{
"name": "Shell",
"bytes": "12807"
}
],
"symlink_target": ""
}
|
"""
polling utilities -- general polling for condition, polling for file to appear in a directory
"""
import os
import glob
from threading import Thread
from gevent.event import Event
from putil.logging import log
from Queue import Queue
class ConditionPoller(Thread):
"""
generic polling mechanism: every interval seconds, check if condition returns a true value. if so, pass the value to callback
if condition or callback raise exception, stop polling.
"""
def __init__(self, condition, condition_callback, exception_callback, interval):
self.polling_interval = interval
self._shutdown_now = Event()
self._condition = condition
self._callback = condition_callback
self._on_exception = exception_callback
super(ConditionPoller,self).__init__()
def shutdown(self):
self.is_shutting_down = True
self._shutdown_now.set()
def run(self):
try:
while not self._shutdown_now.is_set():
self._check_condition()
self._shutdown_now.wait(self.polling_interval)
except:
log.error('thread failed', exc_info=True)
def _check_condition(self):
try:
value = self._condition()
if value:
self._callback(value)
except Exception as e:
log.debug('stopping poller after exception', exc_info=True)
self.shutdown()
if self._on_exception:
self._on_exception(e)
def start(self):
super(ConditionPoller,self).start()
class DirectoryPoller(ConditionPoller):
"""
poll for new files added to a directory that match a wildcard pattern.
expects files to be added only, and added in ASCII order.
"""
def __init__(self, directory, wildcard, callback, exception_callback=None, interval=1):
try:
if not os.path.isdir(directory):
raise ValueError('%s is not a directory'%directory)
self._path = directory + '/' + wildcard
self._last_filename = None
super(DirectoryPoller,self).__init__(self._check_for_files, callback, exception_callback, interval)
except:
log.error('failed init?', exc_info=True)
def _check_for_files(self):
filenames = glob.glob(self._path)
# files, but no change since last time
if self._last_filename and filenames and filenames[-1]==self._last_filename:
return None
# no files yet, just like last time
if not self._last_filename and not filenames:
return None
if self._last_filename:
position = filenames.index(self._last_filename) # raises ValueError if file was removed
out = filenames[position+1:]
else:
out = filenames
self._last_filename = filenames[-1]
log.trace('found files: %r', out)
return out
class BlockingDirectoryIterator(object):
"""
iterator that blocks and yields new files added to a directory
use like this:
for filename in PollingDirectoryIterator('/tmp','A*.DAT').get_files():
print filename
"""
def __init__(self, directory, wildcard, interval=1):
self._values = Queue()
self._exception = None
self._ready = Event()
self._poller = DirectoryPoller(directory, wildcard, self._on_condition, self._on_exception, interval)
self._poller.start()
def __iter__(self):
return self
def get_files(self):
while True:
# could have exception or list of filenames
out = self._values.get()
if isinstance(out, Exception):
raise out
else:
yield out
def cancel(self):
self._poller.shutdown()
def _on_condition(self, filenames):
for file in filenames:
self._values.put(file)
def _on_exception(self, exception):
self._values.put(exception)
|
{
"content_hash": "d74913dd0f0270c3121c4e9e66ee188c",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 129,
"avg_line_length": 37.05555555555556,
"alnum_prop": 0.6096951524237881,
"repo_name": "mkl-/scioncc",
"id": "b1635177d047d10f848c32285533673fc90811d1",
"size": "4002",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/putil/poller.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "689"
},
{
"name": "JavaScript",
"bytes": "11408"
},
{
"name": "Python",
"bytes": "2522226"
},
{
"name": "Shell",
"bytes": "2163"
}
],
"symlink_target": ""
}
|
import numpy as np
import sys
import cv2
def write_pfm(path, image, scale=1):
"""Write pfm file.
Args:
path (str): pathto file
image (array): data
scale (int, optional): Scale. Defaults to 1.
"""
with open(path, "wb") as file:
color = None
if image.dtype.name != "float32":
raise Exception("Image dtype must be float32.")
image = np.flipud(image)
if len(image.shape) == 3 and image.shape[2] == 3: # color image
color = True
elif (
len(image.shape) == 2 or len(image.shape) == 3 and image.shape[2] == 1
): # greyscale
color = False
else:
raise Exception("Image must have H x W x 3, H x W x 1 or H x W dimensions.")
file.write("PF\n" if color else "Pf\n".encode())
file.write("%d %d\n".encode() % (image.shape[1], image.shape[0]))
endian = image.dtype.byteorder
if endian == "<" or endian == "=" and sys.byteorder == "little":
scale = -scale
file.write("%f\n".encode() % scale)
image.tofile(file)
def read_image(path):
"""Read image and output RGB image (0-1).
Args:
path (str): path to file
Returns:
array: RGB image (0-1)
"""
img = cv2.imread(path)
if img.ndim == 2:
img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) / 255.0
return img
def write_depth(path, depth, bits=1):
"""Write depth map to pfm and png file.
Args:
path (str): filepath without extension
depth (array): depth
"""
write_pfm(path + ".pfm", depth.astype(np.float32))
depth_min = depth.min()
depth_max = depth.max()
max_val = (2**(8*bits))-1
if depth_max - depth_min > np.finfo("float").eps:
out = max_val * (depth - depth_min) / (depth_max - depth_min)
else:
out = 0
if bits == 1:
cv2.imwrite(path + ".png", out.astype("uint8"))
elif bits == 2:
cv2.imwrite(path + ".png", out.astype("uint16"))
return
|
{
"content_hash": "def5f2b33c7744486acb945af37efb84",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 88,
"avg_line_length": 25.5609756097561,
"alnum_prop": 0.5438931297709924,
"repo_name": "intel-isl/MiDaS",
"id": "ff9a54bd55f5e31a90fad21242efbfda5a6cc1a7",
"size": "2096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tf/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "9891"
},
{
"name": "CMake",
"bytes": "6607"
},
{
"name": "Dockerfile",
"bytes": "809"
},
{
"name": "Java",
"bytes": "113288"
},
{
"name": "Python",
"bytes": "63838"
},
{
"name": "Ruby",
"bytes": "408"
},
{
"name": "Shell",
"bytes": "3713"
},
{
"name": "Swift",
"bytes": "58465"
}
],
"symlink_target": ""
}
|
from mqtt_as import MQTTClient
import uasyncio as asyncio
import ubinascii
from machine import Pin, PWM, unique_id
import urequests
import secrets
import topic
_CLIENT_ID = ubinascii.hexlify(unique_id())
# Pin constants
_BUZZER = 14 # GPIO14, D5
_LED1 = 16 # GPIO16, D0, Nodemcu led
_LED2 = 2 # GPIO2, D4, ESP8266 led
_THINGSPEAK_URL = "https://api.thingspeak.com/update?api_key={}&{}"
_IFTTT_URL = "https://maker.ifttt.com/trigger/gate/with/key/{}"
MQTTClient.DEBUG = False # Optional: print diagnostic messages
loop = asyncio.get_event_loop()
msg_led = Pin(_LED2, Pin.OUT, value=1)
live_led = Pin(_LED1, Pin.OUT, value=1)
def run_base():
client = MQTTClient(mqtt_config, _CLIENT_ID, secrets.MQTT_BROKER)
try:
loop.create_task(signal_alive())
loop.run_until_complete(main(client))
finally:
client.close() # Prevent LmacRxBlk:1 errors
async def signal_alive():
while True:
live_led(False)
await asyncio.sleep_ms(30)
live_led(True)
await asyncio.sleep(5)
async def signal_alarm():
msg_led(False)
await asyncio.sleep(1)
msg_led(True)
async def sound_alarm():
pwm = PWM(Pin(_BUZZER), freq=500, duty=512)
await asyncio.sleep(5)
pwm.deinit()
def callback(topic, msg):
msg_str = msg.decode('ascii').strip()
loop.create_task(signal_alarm())
loop.create_task(send_to_ifttt())
loop.create_task(send_to_thingspeak(msg_str))
loop.create_task(sound_alarm())
async def conn_han(client):
await client.subscribe(topic.GATE_STATUS)
async def main(client):
await client.connect()
while True:
await asyncio.sleep(5)
async def send_to_thingspeak(msg):
url = _THINGSPEAK_URL.format(secrets.THINGSPEAK_API_KEY, msg)
await asyncio.sleep(0)
http_get(url)
async def send_to_ifttt():
url = _IFTTT_URL.format(secrets.IFTTT_API_KEY)
await asyncio.sleep(0)
http_get(url)
def http_get(url):
try:
req = urequests.get(url)
req.close()
except Exception as e:
# Ignore so that program continues running
print('HTTP get failed', e)
mqtt_config = {
'subs_cb': callback,
'connect_coro': conn_han,
}
|
{
"content_hash": "9793331dba850996bec5ece52d6cd664",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 69,
"avg_line_length": 22.762886597938145,
"alnum_prop": 0.6571557971014492,
"repo_name": "chrisb2/gate-alarm",
"id": "2b5b9be7c4c2c6fabbce6d8daf9c84c44f3872f9",
"size": "2208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "base_app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5174"
}
],
"symlink_target": ""
}
|
import os
import sys
def color(text, color_code, readline=False):
"""Colorize text.
@param text: text.
@param color_code: color.
@return: colorized text.
"""
# $TERM under Windows:
# cmd.exe -> "" (what would you expect..?)
# cygwin -> "cygwin" (should support colors, but doesn't work somehow)
# mintty -> "xterm" (supports colors)
if sys.platform == "win32" and os.getenv("TERM") != "xterm":
return str(text)
if readline:
# special readline escapes to fix colored input promps
# http://bugs.python.org/issue17337
return "\x01\x1b[%dm\x02%s\x01\x1b[0m\x02" % (color_code, text)
return "\x1b[%dm%s\x1b[0m" % (color_code, text)
def black(text, readline=False):
return color(text, 30, readline)
def red(text, readline=False):
return color(text, 31, readline)
def green(text, readline=False):
return color(text, 32, readline)
def yellow(text, readline=False):
return color(text, 33, readline)
def blue(text, readline=False):
return color(text, 34, readline)
def magenta(text, readline=False):
return color(text, 35, readline)
def cyan(text, readline=False):
return color(text, 36, readline)
def white(text, readline=False):
return color(text, 37, readline)
def bold(text, readline=False):
return color(text, 1, readline)
|
{
"content_hash": "f82ebea832490fdac42ffd68c4d7d421",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 74,
"avg_line_length": 27.46938775510204,
"alnum_prop": 0.6552748885586924,
"repo_name": "S2R2/viper",
"id": "fef05904f3c2d5a357978e3c9edf12e91e65e107",
"size": "1468",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "viper/common/colors.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1306"
},
{
"name": "JavaScript",
"bytes": "9294"
},
{
"name": "Makefile",
"bytes": "436"
},
{
"name": "Python",
"bytes": "1552230"
},
{
"name": "Smarty",
"bytes": "28213"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/components/shield_generator/shared_shd_kse_mk4.iff"
result.attribute_template_id = 8
result.stfName("space/space_item","shd_kse_mk4_n")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "31ec17b959f461f775b15406702024d7",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 92,
"avg_line_length": 25.53846153846154,
"alnum_prop": 0.7048192771084337,
"repo_name": "obi-two/Rebelion",
"id": "a07b4ddd047ced4dc756689c664c1a31df11f06a",
"size": "477",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/ship/components/shield_generator/shared_shd_kse_mk4.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
class UserAccountConfig(AppConfig):
name = 'user_account'
|
{
"content_hash": "64f33bba773c46e52665d7b60869817c",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 35,
"avg_line_length": 19.6,
"alnum_prop": 0.7653061224489796,
"repo_name": "MahdiZareie/VersionMonitoring",
"id": "05d4def5abef1fd5e99afbc82e8c3b611beb8a8f",
"size": "98",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "user_account/apps.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7323"
},
{
"name": "HTML",
"bytes": "12550"
},
{
"name": "JavaScript",
"bytes": "2650"
},
{
"name": "Python",
"bytes": "13058"
}
],
"symlink_target": ""
}
|
import unittest
from webkitpy.common.system import filesystem_mock
from webkitpy.common.system import filesystem_unittest
class MockFileSystemTest(unittest.TestCase, filesystem_unittest.GenericFileSystemTests):
def setUp(self):
self.fs = filesystem_mock.MockFileSystem()
self.setup_generic_test_dir()
def tearDown(self):
self.teardown_generic_test_dir()
self.fs = None
def quick_check(self, test_fn, good_fn, *tests):
for test in tests:
if hasattr(test, '__iter__'):
expected = good_fn(*test)
actual = test_fn(*test)
else:
expected = good_fn(test)
actual = test_fn(test)
self.assertEqual(expected, actual, 'given %s, expected %s, got %s' % (repr(test), repr(expected), repr(actual)))
def test_join(self):
self.quick_check(self.fs.join,
self.fs._slow_but_correct_join,
('',),
('', 'bar'),
('foo',),
('foo/',),
('foo', ''),
('foo/', ''),
('foo', 'bar'),
('foo', '/bar'),
)
def test_normpath(self):
self.quick_check(self.fs.normpath,
self.fs._slow_but_correct_normpath,
'',
'/',
'.',
'/.',
'foo',
'foo/',
'foo/.',
'foo/bar',
'/foo',
'foo/../bar',
'foo/../bar/baz',
'../foo')
def test_relpath_win32(self):
pass
|
{
"content_hash": "00328ad089902f344d98b0e615df2d5c",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 124,
"avg_line_length": 32.41379310344828,
"alnum_prop": 0.39787234042553193,
"repo_name": "heke123/chromium-crosswalk",
"id": "561959ad5610989b637af3d5c0ee344c251afb2b",
"size": "3407",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Tools/Scripts/webkitpy/common/system/filesystem_mock_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""
Unit Tests for volume types extra specs code
"""
from cinder import context
from cinder import db
from cinder import exception
from cinder import test
class VolumeGlanceMetadataTestCase(test.TestCase):
def setUp(self):
super(VolumeGlanceMetadataTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_vol_glance_metadata_bad_vol_id(self):
ctxt = context.get_admin_context()
self.assertRaises(exception.VolumeNotFound,
db.volume_glance_metadata_create,
ctxt, 1, 'key1', 'value1')
self.assertRaises(exception.VolumeNotFound,
db.volume_glance_metadata_get, ctxt, 1)
db.volume_glance_metadata_delete_by_volume(ctxt, 10)
def test_vol_update_glance_metadata(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.volume_create(ctxt, {'id': 2})
db.volume_glance_metadata_create(ctxt, 1, 'key1', 'value1')
db.volume_glance_metadata_create(ctxt, 2, 'key1', 'value1')
db.volume_glance_metadata_create(ctxt, 2, 'key2', 'value2')
db.volume_glance_metadata_create(ctxt, 2, 'key3', 123)
expected_metadata_1 = {'volume_id': '1',
'key': 'key1',
'value': 'value1'}
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 1)
for key, value in expected_metadata_1.items():
self.assertEqual(metadata[0][key], value)
expected_metadata_2 = ({'volume_id': '2',
'key': 'key1',
'value': 'value1'},
{'volume_id': '2',
'key': 'key2',
'value': 'value2'},
{'volume_id': '2',
'key': 'key3',
'value': '123'})
metadata = db.volume_glance_metadata_get(ctxt, 2)
self.assertEqual(len(metadata), 3)
for expected, meta in zip(expected_metadata_2, metadata):
for key, value in expected.items():
self.assertEqual(meta[key], value)
self.assertRaises(exception.GlanceMetadataExists,
db.volume_glance_metadata_create,
ctxt, 1, 'key1', 'value1a')
metadata = db.volume_glance_metadata_get(ctxt, 1)
self.assertEqual(len(metadata), 1)
for key, value in expected_metadata_1.items():
self.assertEqual(metadata[0][key], value)
def test_vols_get_glance_metadata(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': '1'})
db.volume_create(ctxt, {'id': '2'})
db.volume_create(ctxt, {'id': '3'})
db.volume_glance_metadata_create(ctxt, '1', 'key1', 'value1')
db.volume_glance_metadata_create(ctxt, '2', 'key2', 'value2')
db.volume_glance_metadata_create(ctxt, '2', 'key22', 'value22')
metadata = db.volume_glance_metadata_get_all(ctxt)
self.assertEqual(len(metadata), 3)
self._assert_metadata_equals('1', 'key1', 'value1', metadata[0])
self._assert_metadata_equals('2', 'key2', 'value2', metadata[1])
self._assert_metadata_equals('2', 'key22', 'value22', metadata[2])
def _assert_metadata_equals(self, volume_id, key, value, observed):
self.assertEqual(volume_id, observed.volume_id)
self.assertEqual(key, observed.key)
self.assertEqual(value, observed.value)
def test_vol_delete_glance_metadata(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.volume_glance_metadata_delete_by_volume(ctxt, 1)
db.volume_glance_metadata_create(ctxt, 1, 'key1', 'value1')
db.volume_glance_metadata_delete_by_volume(ctxt, 1)
self.assertRaises(exception.GlanceMetadataNotFound,
db.volume_glance_metadata_get, ctxt, 1)
def test_vol_glance_metadata_copy_to_snapshot(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.snapshot_create(ctxt, {'id': 100, 'volume_id': 1})
db.volume_glance_metadata_create(ctxt, 1, 'key1', 'value1')
db.volume_glance_metadata_copy_to_snapshot(ctxt, 100, 1)
expected_meta = {'snapshot_id': '100',
'key': 'key1',
'value': 'value1'}
for meta in db.volume_snapshot_glance_metadata_get(ctxt, 100):
for (key, value) in expected_meta.items():
self.assertEqual(meta[key], value)
def test_vol_glance_metadata_copy_from_volume_to_volume(self):
ctxt = context.get_admin_context()
db.volume_create(ctxt, {'id': 1})
db.volume_create(ctxt, {'id': 100, 'source_volid': 1})
db.volume_glance_metadata_create(ctxt, 1, 'key1', 'value1')
db.volume_glance_metadata_copy_from_volume_to_volume(ctxt, 1, 100)
expected_meta = {'key': 'key1',
'value': 'value1'}
for meta in db.volume_glance_metadata_get(ctxt, 100):
for (key, value) in expected_meta.items():
self.assertEqual(meta[key], value)
def test_volume_glance_metadata_copy_to_volume(self):
vol1 = db.volume_create(self.ctxt, {})
vol2 = db.volume_create(self.ctxt, {})
db.volume_glance_metadata_create(self.ctxt, vol1['id'], 'm1', 'v1')
snapshot = db.snapshot_create(self.ctxt, {'volume_id': vol1['id']})
db.volume_glance_metadata_copy_to_snapshot(self.ctxt, snapshot['id'],
vol1['id'])
db.volume_glance_metadata_copy_to_volume(self.ctxt, vol2['id'],
snapshot['id'])
metadata = db.volume_glance_metadata_get(self.ctxt, vol2['id'])
metadata = {m['key']: m['value'] for m in metadata}
self.assertEqual(metadata, {'m1': 'v1'})
def test_volume_snapshot_glance_metadata_get_nonexistent(self):
vol = db.volume_create(self.ctxt, {})
snapshot = db.snapshot_create(self.ctxt, {'volume_id': vol['id']})
self.assertRaises(exception.GlanceMetadataNotFound,
db.volume_snapshot_glance_metadata_get,
self.ctxt, snapshot['id'])
|
{
"content_hash": "de97ab2717edfc85e414855141d829fe",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 77,
"avg_line_length": 44.758620689655174,
"alnum_prop": 0.5650231124807396,
"repo_name": "JioCloud/cinder",
"id": "96297fc4484621357b5ffe52ceb7438cf5d5da8d",
"size": "7196",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/tests/unit/test_volume_glance_metadata.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11977630"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
}
|
import cmd
import queue
import time
from somnMesh import *
TxQ = queue.Queue()
RxQ = queue.Queue()
class somnIf(cmd.Cmd):
def do_send(self, s):
line = s.split()
if len(line) != 2:
print("*** invalid number of arguments ***")
return
packet = somnData(int(line[0], 16),line[1])
TxQ.put(packet)
def do_read(self, s):
try:
packet = RxQ.get(False)
except:
return
print(packet)
def do_exit(self, s):
return True
if __name__ =="__main__":
somnNode = somnMesh(TxQ, RxQ)
somnNode.start()
time.sleep(5)
somnIf().cmdloop()
somnNode._mainLoopRunning = 0
somnNode.join()
exit()
|
{
"content_hash": "89f76ad541a7d2aa93ef16c82eb4c023",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 50,
"avg_line_length": 18.571428571428573,
"alnum_prop": 0.6,
"repo_name": "squidpie/somn",
"id": "7179ac142120f634bf0484508af750310627e8e1",
"size": "672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/somnAppTest.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49628"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.utils import timezone
class Category(models.Model):
name = models.CharField(max_length=50)
number = models.CharField(max_length=50, unique=True)
def __str__(self):
return str(self.name) + ":" + str(self.number)
def subs(self):
sub_list = []
ss = Subcategory.objects.filter(category=self)
for s in ss:
sub_list.append(s.name)
return sub_list
@classmethod
def to_dict(cls):
cats = cls.objects.all()
le_dict = {}
for cat in cats:
le_dict[cat.name] = cat.subs()
return le_dict
class Subcategory(models.Model):
name = models.CharField(max_length=50)
category = models.ForeignKey(Category)
def __str__(self):
return self.name
class Transaction(models.Model):
IN = 'i'
OUT = 'o'
TYPE_CHOICES = (
(IN, 'Money In'),
(OUT, 'Money Out'),
)
transaction_type = models.CharField(max_length=5, choices=TYPE_CHOICES, default=OUT)
category = models.ForeignKey(Category, blank=True, null=True)
date = models.DateField()
subcategory = models.ForeignKey(Subcategory, blank=True, null=True)
comment = models.CharField(max_length=200, blank=True)
amount = models.DecimalField(decimal_places=2, max_digits=19)
balance = models.DecimalField(decimal_places=2, max_digits=19)
account = models.ForeignKey('accounts.Account')
def __str__(self):
return self.transaction_type + str(self.amount)
@classmethod
def first_year(cls):
years = cls.objects.order_by('date')[0].date.year
return years
@classmethod
def year_range(cls):
year_range = [cls.first_year()]
if timezone.now().year == year_range[-1]:
return year_range
else:
while year_range[-1] != timezone.now().year:
year_range.append(year_range[-1] + 1)
return year_range
return 'Critical Error'
|
{
"content_hash": "a1514f69e2bbaa4fa788eded1f0b19ea",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 88,
"avg_line_length": 27.52054794520548,
"alnum_prop": 0.6102538576406172,
"repo_name": "m1k3r/gvi-accounts",
"id": "47460819dbe5ada965bfb363ca852bd2965a63aa",
"size": "2009",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "gvi/transactions/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6142"
},
{
"name": "HTML",
"bytes": "152771"
},
{
"name": "JavaScript",
"bytes": "112023"
},
{
"name": "Python",
"bytes": "64506"
}
],
"symlink_target": ""
}
|
from distutils.core import setup
import sys
#sys.path.append('validatingconfigparser')
import validatingconfigparser
setup(name='validatingconfigparser',
version='0.1',
author='Markus Juenemann',
author_email='markus.at.juenemann@gmail.com',
url='http://code.google.com/p/validatingconfigparser/',
download_url='http://code.google.com/p/validatingconfigparser/downloads/list',
description="Python's ConfigParser classes with validation",
long_description=validatingconfigparser.__doc__,
install_requires=['Formencode'],
package=['validatingconfigparser'],
provides=['validatingconfigparser'],
keywords='ConfigParser configparser validation Formencode',
license='Apache License 2.0',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
{
"content_hash": "59cb253e361bbf20ca73454fe94f82f8",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 84,
"avg_line_length": 43.357142857142854,
"alnum_prop": 0.6375617792421746,
"repo_name": "techdragon/validatingconfigparser",
"id": "536dd1c370574c72ad88f1ad573f27e05cfc2974",
"size": "1214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groff",
"bytes": "657"
},
{
"name": "Makefile",
"bytes": "2319"
},
{
"name": "Python",
"bytes": "56135"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Article.active'
db.add_column('feedback_article', 'active',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Article.active'
db.delete_column('feedback_article', 'active')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feedback.article': {
'Meta': {'object_name': 'Article'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['feedback.Category']"}),
'common_issue': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'content': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'feedback.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'})
},
'feedback.feedback': {
'Meta': {'object_name': 'Feedback'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'feedback': ('django.db.models.fields.TextField', [], {'max_length': '1000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_method': ('django.db.models.fields.CharField', [], {'default': "'CAC'", 'max_length': '25'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'default': "'CAC'", 'max_length': '25'}),
'referer': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'default': "'R3 Event Page'", 'max_length': '25'}),
'user_agent': ('django.db.models.fields.TextField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['feedback']
|
{
"content_hash": "a8b37fc272112e626956f2fb6aae02fc",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 182,
"avg_line_length": 70.23584905660377,
"alnum_prop": 0.5442578912021491,
"repo_name": "jaycrossler/geo-events",
"id": "e72e2d6fa1ea6422f6409bafa066cf9f8492cf8f",
"size": "7469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoevents/feedback/migrations/0004_auto__add_field_article_active.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "169563"
},
{
"name": "JavaScript",
"bytes": "10629192"
},
{
"name": "Python",
"bytes": "1589939"
},
{
"name": "Shell",
"bytes": "4212"
}
],
"symlink_target": ""
}
|
from copy import deepcopy
import httplib as http
import time
import mock
import pytest
import pytz
import datetime
from nose.tools import * # noqa
from tests.base import OsfTestCase, fake
from osf_tests.factories import (
UserFactory, NodeFactory, ProjectFactory,
AuthUserFactory, RegistrationFactory
)
from addons.wiki.tests.factories import WikiFactory, WikiVersionFactory
from website.exceptions import NodeStateError
from addons.wiki import settings
from addons.wiki import views
from addons.wiki.exceptions import InvalidVersionError
from addons.wiki.models import WikiPage, WikiVersion, render_content
from addons.wiki.utils import (
get_sharejs_uuid, generate_private_uuid, share_db, delete_share_doc,
migrate_uuid, format_wiki_version, serialize_wiki_settings, serialize_wiki_widget
)
from framework.auth import Auth
from django.utils import timezone
from addons.wiki.utils import to_mongo_key
from .config import EXAMPLE_DOCS, EXAMPLE_OPS
pytestmark = pytest.mark.django_db
# forward slashes are not allowed, typically they would be replaced with spaces
SPECIAL_CHARACTERS_ALL = u'`~!@#$%^*()-=_+ []{}\|/?.df,;:''"'
SPECIAL_CHARACTERS_ALLOWED = u'`~!@#$%^*()-=_+ []{}\|?.df,;:''"'
class TestWikiViews(OsfTestCase):
def setUp(self):
super(TestWikiViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=True, creator=self.user)
self.consolidate_auth = Auth(user=self.project.creator)
def test_wiki_url_get_returns_200(self):
url = self.project.web_url_for('project_wiki_view', wname='home')
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_wiki_url_404_with_no_write_permission(self): # and not public
url = self.project.web_url_for('project_wiki_view', wname='somerandomid')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_wiki_deleted_404_with_no_write_permission(self, mock_sharejs):
self.project.update_node_wiki('funpage', 'Version 1', Auth(self.user))
self.project.save()
url = self.project.web_url_for('project_wiki_view', wname='funpage')
res = self.app.get(url)
assert_equal(res.status_code, 200)
delete_url = self.project.api_url_for('project_wiki_delete', wname='funpage')
self.app.delete(delete_url, auth=self.user.auth)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_wiki_url_with_path_get_returns_200(self):
self.project.update_node_wiki('funpage', 'Version 1', Auth(self.user))
self.project.update_node_wiki('funpage', 'Version 2', Auth(self.user))
self.project.save()
url = self.project.web_url_for(
'project_wiki_view',
wname='funpage',
) + '?view&compare=1&edit'
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_wiki_url_with_edit_get_returns_403_with_no_write_permission(self):
self.project.update_node_wiki('funpage', 'Version 1', Auth(self.user))
self.project.update_node_wiki('funpage', 'Version 2', Auth(self.user))
self.project.save()
url = self.project.web_url_for(
'project_wiki_view',
wname='funpage',
compare=1,
)
res = self.app.get(url)
assert_equal(res.status_code, 200)
url = self.project.web_url_for(
'project_wiki_view',
wname='funpage',
) + '?edit'
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 403)
# Check publicly editable
wiki = self.project.get_addon('wiki')
wiki.set_editing(permissions=True, auth=self.consolidate_auth, log=True)
res = self.app.get(url, auth=AuthUserFactory().auth, expect_errors=False)
assert_equal(res.status_code, 200)
# Check publicly editable but not logged in
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_wiki_url_for_pointer_returns_200(self):
# TODO: explain how this tests a pointer
project = ProjectFactory(is_public=True)
self.project.add_pointer(project, Auth(self.project.creator), save=True)
url = self.project.web_url_for('project_wiki_view', wname='home')
res = self.app.get(url)
assert_equal(res.status_code, 200)
@pytest.mark.skip('#TODO: Fix or mock mongodb for sharejs')
def test_wiki_draft_returns_200(self):
url = self.project.api_url_for('wiki_page_draft', wname='somerandomid')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_wiki_content_returns_200(self):
url = self.project.api_url_for('wiki_page_content', wname='somerandomid')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
@mock.patch('addons.wiki.models.WikiVersion.rendered_before_update', new_callable=mock.PropertyMock)
def test_wiki_content_rendered_before_update(self, mock_rendered_before_update):
content = 'Some content'
self.project.update_node_wiki('somerandomid', content, Auth(self.user))
self.project.save()
mock_rendered_before_update.return_value = True
url = self.project.api_url_for('wiki_page_content', wname='somerandomid')
res = self.app.get(url, auth=self.user.auth)
assert_true(res.json['rendered_before_update'])
mock_rendered_before_update.return_value = False
res = self.app.get(url, auth=self.user.auth)
assert_false(res.json['rendered_before_update'])
def test_wiki_url_for_component_returns_200(self):
component = NodeFactory(parent=self.project, is_public=True)
url = component.web_url_for('project_wiki_view', wname='home')
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_project_wiki_edit_post(self):
self.project.update_node_wiki(
'home',
content='old content',
auth=Auth(self.project.creator)
)
url = self.project.web_url_for('project_wiki_edit_post', wname='home')
res = self.app.post(url, {'content': 'new content'}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
self.project.reload()
# page was updated with new content
new_wiki = self.project.get_wiki_version('home')
assert_equal(new_wiki.content, 'new content')
def test_project_wiki_edit_post_with_new_wname_and_no_content(self):
# note: forward slashes not allowed in page_name
page_name = fake.catch_phrase().replace('/', ' ')
old_wiki_page_count = WikiVersion.objects.all().count()
url = self.project.web_url_for('project_wiki_edit_post', wname=page_name)
# User submits to edit form with no content
res = self.app.post(url, {'content': ''}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
new_wiki_page_count = WikiVersion.objects.all().count()
# A new wiki page was created in the db
assert_equal(new_wiki_page_count, old_wiki_page_count + 1)
# Node now has the new wiki page associated with it
self.project.reload()
new_page = self.project.get_wiki_version(page_name)
assert_is_not_none(new_page)
def test_project_wiki_edit_post_with_new_wname_and_content(self):
# note: forward slashes not allowed in page_name
page_name = fake.catch_phrase().replace('/', ' ')
page_content = fake.bs()
old_wiki_page_count = WikiVersion.objects.all().count()
url = self.project.web_url_for('project_wiki_edit_post', wname=page_name)
# User submits to edit form with no content
res = self.app.post(url, {'content': page_content}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
new_wiki_page_count = WikiVersion.objects.all().count()
# A new wiki page was created in the db
assert_equal(new_wiki_page_count, old_wiki_page_count + 1)
# Node now has the new wiki page associated with it
self.project.reload()
new_page = self.project.get_wiki_version(page_name)
assert_is_not_none(new_page)
# content was set
assert_equal(new_page.content, page_content)
def test_project_wiki_edit_post_with_non_ascii_title(self):
# regression test for https://github.com/CenterForOpenScience/openscienceframework.org/issues/1040
# wname doesn't exist in the db, so it will be created
new_wname = u'øˆ∆´ƒøßå√ß'
url = self.project.web_url_for('project_wiki_edit_post', wname=new_wname)
res = self.app.post(url, {'content': 'new content'}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
self.project.reload()
wiki = self.project.get_wiki_page(new_wname)
assert_equal(wiki.page_name, new_wname)
# updating content should return correct url as well.
res = self.app.post(url, {'content': 'updated content'}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
def test_project_wiki_edit_post_with_special_characters(self):
new_wname = 'title: ' + SPECIAL_CHARACTERS_ALLOWED
new_wiki_content = 'content: ' + SPECIAL_CHARACTERS_ALL
url = self.project.web_url_for('project_wiki_edit_post', wname=new_wname)
res = self.app.post(url, {'content': new_wiki_content}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
self.project.reload()
wiki = self.project.get_wiki_version(new_wname)
assert_equal(wiki.wiki_page.page_name, new_wname)
assert_equal(wiki.content, new_wiki_content)
assert_equal(res.status_code, 200)
def test_wiki_edit_get_home(self):
url = self.project.web_url_for('project_wiki_view', wname='home')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_project_wiki_view_scope(self):
self.project.update_node_wiki('home', 'Version 1', Auth(self.user))
self.project.update_node_wiki('home', 'Version 2', Auth(self.user))
self.project.save()
url = self.project.web_url_for('project_wiki_view', wname='home', view=2)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
url = self.project.web_url_for('project_wiki_view', wname='home', view=3)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
url = self.project.web_url_for('project_wiki_view', wname='home', view=0)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_project_wiki_compare_returns_200(self):
self.project.update_node_wiki('home', 'updated content', Auth(self.user))
self.project.save()
url = self.project.web_url_for('project_wiki_view', wname='home') + '?compare'
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_project_wiki_compare_scope(self):
self.project.update_node_wiki('home', 'Version 1', Auth(self.user))
self.project.update_node_wiki('home', 'Version 2', Auth(self.user))
self.project.save()
url = self.project.web_url_for('project_wiki_view', wname='home', compare=2)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
url = self.project.web_url_for('project_wiki_view', wname='home', compare=3)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
url = self.project.web_url_for('project_wiki_view', wname='home', compare=0)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_wiki_page_creation_strips_whitespace(self):
# Regression test for:
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1080
# wname has a trailing space
url = self.project.web_url_for('project_wiki_view', wname='cupcake ')
res = self.app.post(url, {'content': 'blah'}, auth=self.user.auth).follow()
assert_equal(res.status_code, 200)
self.project.reload()
wiki = self.project.get_wiki_version('cupcake')
assert_is_not_none(wiki)
def test_wiki_validate_name(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='Capslock')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_wiki_validate_name_creates_blank_page(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='newpage', auth=self.consolidate_auth)
self.app.get(url, auth=self.user.auth)
self.project.reload()
assert_is_not_none(self.project.get_wiki_page('newpage'))
def test_wiki_validate_name_collision_doesnt_clear(self):
self.project.update_node_wiki('oldpage', 'some text', self.consolidate_auth)
url = self.project.api_url_for('project_wiki_validate_name', wname='oldpage', auth=self.consolidate_auth)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
url = self.project.api_url_for('wiki_page_content', wname='oldpage', auth=self.consolidate_auth)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['wiki_content'], 'some text')
def test_wiki_validate_name_cannot_create_home(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='home')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_project_wiki_validate_name_mixed_casing(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='CaPsLoCk')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.update_node_wiki('CaPsLoCk', 'hello', self.consolidate_auth)
assert_equal(self.project.get_wiki_page('CaPsLoCk').page_name, 'CaPsLoCk')
def test_project_wiki_validate_name_display_correct_capitalization(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='CaPsLoCk')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_in('CaPsLoCk', res)
def test_project_wiki_validate_name_conflict_different_casing(self):
url = self.project.api_url_for('project_wiki_validate_name', wname='CAPSLOCK')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.update_node_wiki('CaPsLoCk', 'hello', self.consolidate_auth)
url = self.project.api_url_for('project_wiki_validate_name', wname='capslock')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_project_dashboard_shows_no_wiki_content_text(self):
# Regression test for:
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1104
project = ProjectFactory(creator=self.user)
url = project.web_url_for('view_project')
res = self.app.get(url, auth=self.user.auth)
assert_in('Add important information, links, or images here to describe your project.', res)
def test_project_dashboard_wiki_wname_get_shows_non_ascii_characters(self):
# Regression test for:
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1104
text = u'你好'
self.project.update_node_wiki('home', text, Auth(self.user))
# can view wiki preview from project dashboard
url = self.project.web_url_for('view_project')
res = self.app.get(url, auth=self.user.auth)
assert_in(text, res)
def test_project_wiki_home_api_route(self):
url = self.project.api_url_for('project_wiki_home')
res = self.app.get(url, auth=self.user.auth)
assert_equals(res.status_code, 302)
# TODO: should this route exist? it redirects you to the web_url_for, not api_url_for.
# page_url = self.project.api_url_for('project_wiki_view', wname='home')
# assert_in(page_url, res.location)
def test_project_wiki_home_web_route(self):
page_url = self.project.web_url_for('project_wiki_view', wname='home', _guid=True)
url = self.project.web_url_for('project_wiki_home')
res = self.app.get(url, auth=self.user.auth)
assert_equals(res.status_code, 302)
assert_in(page_url, res.location)
def test_wiki_id_url_get_returns_302_and_resolves(self):
name = 'page by id'
self.project.update_node_wiki(name, 'some content', Auth(self.project.creator))
page = self.project.get_wiki_page(name)
page_url = self.project.web_url_for('project_wiki_view', wname=page.page_name, _guid=True)
url = self.project.web_url_for('project_wiki_id_page', wid=page._primary_key, _guid=True)
res = self.app.get(url)
assert_equal(res.status_code, 302)
assert_in(page_url, res.location)
res = res.follow()
assert_equal(res.status_code, 200)
assert_in(page_url, res.request.url)
def test_wiki_id_url_get_returns_404(self):
url = self.project.web_url_for('project_wiki_id_page', wid='12345', _guid=True)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_home_is_capitalized_in_web_view(self):
url = self.project.web_url_for('project_wiki_home', wid='home', _guid=True)
res = self.app.get(url, auth=self.user.auth).follow(auth=self.user.auth)
page_name_elem = res.html.find('span', {'id': 'pageName'})
assert_in('Home', page_name_elem.text)
def test_wiki_widget_no_content(self):
res = serialize_wiki_widget(self.project)
assert_is_none(res['wiki_content'])
def test_wiki_widget_short_content_no_cutoff(self):
short_content = 'a' * 150
self.project.update_node_wiki('home', short_content, Auth(self.user))
res = serialize_wiki_widget(self.project)
assert_in(short_content, res['wiki_content'])
assert_not_in('...', res['wiki_content'])
assert_false(res['more'])
def test_wiki_widget_long_content_cutoff(self):
long_content = 'a' * 600
self.project.update_node_wiki('home', long_content, Auth(self.user))
res = serialize_wiki_widget(self.project)
assert_less(len(res['wiki_content']), 520) # wiggle room for closing tags
assert_in('...', res['wiki_content'])
assert_true(res['more'])
def test_wiki_widget_with_multiple_short_pages_has_more(self):
project = ProjectFactory(is_public=True, creator=self.user)
short_content = 'a' * 150
project.update_node_wiki('home', short_content, Auth(self.user))
project.update_node_wiki('andanotherone', short_content, Auth(self.user))
res = serialize_wiki_widget(project)
assert_true(res['more'])
@mock.patch('addons.wiki.models.WikiVersion.rendered_before_update', new_callable=mock.PropertyMock)
def test_wiki_widget_rendered_before_update(self, mock_rendered_before_update):
# New pages use js renderer
mock_rendered_before_update.return_value = False
self.project.update_node_wiki('home', 'updated content', Auth(self.user))
res = serialize_wiki_widget(self.project)
assert_false(res['rendered_before_update'])
# Old pages use a different version of js render
mock_rendered_before_update.return_value = True
res = serialize_wiki_widget(self.project)
assert_true(res['rendered_before_update'])
def test_read_only_users_cannot_view_edit_pane(self):
url = self.project.web_url_for('project_wiki_view', wname='home')
# No write permissions
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_not_in('data-osf-panel="Edit"', res.text)
# Write permissions
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_in('data-osf-panel="Edit"', res.text)
# Publicly editable
wiki = self.project.get_addon('wiki')
wiki.set_editing(permissions=True, auth=self.consolidate_auth, log=True)
res = self.app.get(url, auth=AuthUserFactory().auth)
assert_equal(res.status_code, 200)
assert_in('data-osf-panel="Edit"', res.text)
# Publicly editable but not logged in
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_not_in('data-osf-panel="Edit"', res.text)
def test_wiki_widget_not_show_in_registration_for_contributor(self):
registration = RegistrationFactory(project=self.project)
res = self.app.get(
registration.web_url_for('view_project'),
auth=self.user.auth
)
assert_equal(res.status_code, 200)
assert_not_in('Add important information, links, or images here to describe your project.', res.text)
class TestViewHelpers(OsfTestCase):
def setUp(self):
super(TestViewHelpers, self).setUp()
self.project = ProjectFactory()
self.wname = 'New page'
self.project.update_node_wiki(self.wname, 'some content', Auth(self.project.creator))
def test_get_wiki_web_urls(self):
urls = views._get_wiki_web_urls(self.project, self.wname)
assert_equal(urls['base'], self.project.web_url_for('project_wiki_home', _guid=True))
assert_equal(urls['edit'], self.project.web_url_for('project_wiki_view', wname=self.wname, _guid=True))
assert_equal(urls['home'], self.project.web_url_for('project_wiki_home', _guid=True))
assert_equal(urls['page'], self.project.web_url_for('project_wiki_view', wname=self.wname, _guid=True))
def test_get_wiki_api_urls(self):
urls = views._get_wiki_api_urls(self.project, self.wname)
assert_equal(urls['base'], self.project.api_url_for('project_wiki_home'))
assert_equal(urls['delete'], self.project.api_url_for('project_wiki_delete', wname=self.wname))
assert_equal(urls['rename'], self.project.api_url_for('project_wiki_rename', wname=self.wname))
assert_equal(urls['content'], self.project.api_url_for('wiki_page_content', wname=self.wname))
assert_equal(urls['settings'], self.project.api_url_for('edit_wiki_settings'))
class TestWikiDelete(OsfTestCase):
def setUp(self):
super(TestWikiDelete, self).setUp()
creator = AuthUserFactory()
self.project = ProjectFactory(is_public=True, creator=creator)
self.consolidate_auth = Auth(user=self.project.creator)
self.auth = creator.auth
self.project.update_node_wiki('Elephants', 'Hello Elephants', self.consolidate_auth)
self.project.update_node_wiki('Lions', 'Hello Lions', self.consolidate_auth)
self.elephant_wiki = self.project.get_wiki_page('Elephants')
self.lion_wiki = self.project.get_wiki_page('Lions')
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_project_wiki_delete(self, mock_shrejs):
page = self.elephant_wiki
assert_equal(page.page_name.lower(), 'elephants')
assert_equal(page.deleted, None)
url = self.project.api_url_for(
'project_wiki_delete',
wname='Elephants'
)
mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc)
with mock.patch.object(timezone, 'now', return_value=mock_now):
self.app.delete(
url,
auth=self.auth
)
self.project.reload()
page.reload()
assert_equal(page.deleted, mock_now)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_project_wiki_delete_w_valid_special_characters(self, mock_sharejs):
# TODO: Need to understand why calling update_node_wiki with failure causes transaction rollback issue later
# with assert_raises(NameInvalidError):
# self.project.update_node_wiki(SPECIAL_CHARACTERS_ALL, 'Hello Special Characters', self.consolidate_auth)
self.project.update_node_wiki(SPECIAL_CHARACTERS_ALLOWED, 'Hello Special Characters', self.consolidate_auth)
self.special_characters_wiki = self.project.get_wiki_page(SPECIAL_CHARACTERS_ALLOWED)
assert_equal(self.special_characters_wiki.page_name, SPECIAL_CHARACTERS_ALLOWED)
url = self.project.api_url_for(
'project_wiki_delete',
wname=SPECIAL_CHARACTERS_ALLOWED
)
mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc)
with mock.patch.object(timezone, 'now', return_value=mock_now):
self.app.delete(
url,
auth=self.auth
)
self.project.reload()
self.special_characters_wiki.reload()
assert_equal(self.special_characters_wiki.deleted, mock_now)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_wiki_versions_do_not_reappear_after_delete(self, mock_sharejs):
# Creates a wiki page
self.project.update_node_wiki('Hippos', 'Hello hippos', self.consolidate_auth)
# Edits it two times
wiki_page = self.project.get_wiki_page('Hippos')
assert_equal(wiki_page.deleted, None)
assert_equal(wiki_page.current_version_number, 1)
self.project.update_node_wiki('Hippos', 'Hello hippopotamus', self.consolidate_auth)
wiki_page.reload()
assert_equal(wiki_page.current_version_number, 2)
# Deletes the wiki page
mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc)
with mock.patch.object(timezone, 'now', return_value=mock_now):
self.project.delete_node_wiki('Hippos', self.consolidate_auth)
wiki_page.reload()
assert_equal(wiki_page.deleted, mock_now)
# Creates new wiki with same name as deleted wiki
self.project.update_node_wiki('Hippos', 'Hello again hippos', self.consolidate_auth)
wiki_page = self.project.get_wiki_page('Hippos')
assert_equal(wiki_page.current_version_number, 1)
self.project.update_node_wiki('Hippos', 'Hello again hippopotamus', self.consolidate_auth)
wiki_page.reload()
assert_equal(wiki_page.current_version_number, 2)
class TestWikiRename(OsfTestCase):
def setUp(self):
super(TestWikiRename, self).setUp()
creator = AuthUserFactory()
self.project = ProjectFactory(is_public=True, creator=creator)
self.consolidate_auth = Auth(user=self.project.creator)
self.auth = creator.auth
self.project.update_node_wiki('home', 'Hello world', self.consolidate_auth)
self.page_name = 'page2'
self.project.update_node_wiki(self.page_name, 'content', self.consolidate_auth)
self.project.save()
self.page = self.project.get_wiki_version(self.page_name)
self.wiki = self.project.get_wiki_page('home')
self.url = self.project.api_url_for(
'project_wiki_rename',
wname=self.page_name,
)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_rename_wiki_page_valid(self, mock_sharejs, new_name=u'away'):
self.app.put_json(
self.url,
{'value': new_name},
auth=self.auth
)
self.project.reload()
old_wiki = self.project.get_wiki_version(self.page_name)
assert_false(old_wiki)
new_wiki = self.project.get_wiki_version(new_name)
assert_true(new_wiki)
assert_equal(new_wiki.wiki_page._primary_key, self.page.wiki_page._primary_key)
assert_equal(new_wiki.content, self.page.content)
assert_equal(new_wiki.identifier, self.page.identifier)
def test_rename_wiki_page_invalid(self, new_name=u'invalid/name'):
res = self.app.put_json(
self.url,
{'value': new_name},
auth=self.auth,
expect_errors=True,
)
assert_equal(http.BAD_REQUEST, res.status_code)
assert_equal(res.json['message_short'], 'Invalid name')
assert_equal(res.json['message_long'], 'Page name cannot contain forward slashes.')
self.project.reload()
old_wiki = self.project.get_wiki_page(self.page_name)
assert_true(old_wiki)
def test_rename_wiki_page_duplicate(self):
self.project.update_node_wiki('away', 'Hello world', self.consolidate_auth)
new_name = 'away'
res = self.app.put_json(
self.url,
{'value': new_name},
auth=self.auth,
expect_errors=True,
)
assert_equal(res.status_code, 409)
def test_rename_wiki_name_not_found(self):
url = self.project.api_url_for('project_wiki_rename', wname='not_found_page_name')
res = self.app.put_json(url, {'value': 'new name'},
auth=self.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_rename_wiki_page_to_home(self):
user = AuthUserFactory()
# A fresh project where the 'home' wiki page has no content
project = ProjectFactory(creator=user)
project.update_node_wiki('Hello', 'hello world', Auth(user=user))
url = project.api_url_for('project_wiki_rename', wname='Hello')
res = self.app.put_json(url, {'value': 'home'}, auth=user.auth, expect_errors=True)
assert_equal(res.status_code, 409)
def test_rename_wiki_name_with_value_missing(self):
# value is missing
res = self.app.put_json(self.url, {}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_rename_wiki_page_duplicate_different_casing(self):
# attempt to rename 'page2' from setup to different case of 'away'.
old_name = 'away'
new_name = 'AwAy'
self.project.update_node_wiki(old_name, 'Hello world', self.consolidate_auth)
res = self.app.put_json(
self.url,
{'value': new_name},
auth=self.auth,
expect_errors=True
)
assert_equal(res.status_code, 409)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_rename_wiki_page_same_name_different_casing(self, mock_sharejs):
old_name = 'away'
new_name = 'AWAY'
self.project.update_node_wiki(old_name, 'Hello world', self.consolidate_auth)
url = self.project.api_url_for('project_wiki_rename', wname=old_name)
res = self.app.put_json(
url,
{'value': new_name},
auth=self.auth,
expect_errors=False
)
assert_equal(res.status_code, 200)
def test_cannot_rename_home_page(self):
url = self.project.api_url_for('project_wiki_rename', wname='home')
res = self.app.put_json(url, {'value': 'homelol'}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, 400)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_can_rename_to_a_deleted_page(self, mock_sharejs):
self.project.delete_node_wiki(self.page_name, self.consolidate_auth)
self.project.save()
# Creates a new page
self.project.update_node_wiki('page3', 'moarcontent', self.consolidate_auth)
self.project.save()
# Renames the wiki to the deleted page
url = self.project.api_url_for('project_wiki_rename', wname='page3')
res = self.app.put_json(url, {'value': self.page_name}, auth=self.auth)
assert_equal(res.status_code, 200)
def test_rename_wiki_page_with_valid_html(self):
# script is not an issue since data is sanitized via bleach or mako before display.
self.test_rename_wiki_page_valid(new_name=u'<html>hello<html>')
def test_rename_wiki_page_with_invalid_html(self):
# script is not an issue since data is sanitized via bleach or mako before display.
# with that said routes still do not accept forward slashes
self.test_rename_wiki_page_invalid(new_name=u'<html>hello</html>')
def test_rename_wiki_page_with_non_ascii_title(self):
self.test_rename_wiki_page_valid(new_name=u'øˆ∆´ƒøßå√ß')
def test_rename_wiki_page_with_valid_special_character_title(self):
self.test_rename_wiki_page_valid(new_name=SPECIAL_CHARACTERS_ALLOWED)
def test_rename_wiki_page_with_invalid_special_character_title(self):
self.test_rename_wiki_page_invalid(new_name=SPECIAL_CHARACTERS_ALL)
class TestWikiLinks(OsfTestCase):
def test_links(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user)
wiki_page = WikiFactory(
user=user,
node=project,
)
wiki = WikiVersionFactory(
content='[[wiki2]]',
wiki_page=wiki_page,
)
assert_in(
'/{}/wiki/wiki2/'.format(project._id),
wiki.html(project),
)
# Regression test for https://sentry.osf.io/osf/production/group/310/
def test_bad_links(self):
content = u'<span></span><iframe src="http://httpbin.org/"></iframe>'
user = AuthUserFactory()
node = ProjectFactory()
wiki_page = WikiFactory(
user=user,
node=node,
)
wiki = WikiVersionFactory(
content=content,
wiki_page=wiki_page,
)
expected = render_content(content, node)
assert_equal(
'<p><span></span><iframe src="<a href="http://httpbin.org/" rel="nofollow">http://httpbin.org/</a>"></iframe></p>',
wiki.html(node)
)
class TestWikiUuid(OsfTestCase):
def setUp(self):
super(TestWikiUuid, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=True, creator=self.user)
self.wname = 'foo.bar'
self.wkey = to_mongo_key(self.wname)
def test_uuid_generated_once(self):
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
url = self.project.web_url_for('project_wiki_view', wname=self.wname)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
private_uuid = self.project.wiki_private_uuids.get(self.wkey)
assert_true(private_uuid)
assert_not_in(private_uuid, res.body)
assert_in(get_sharejs_uuid(self.project, self.wname), res.body)
# Revisit page; uuid has not changed
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(private_uuid, self.project.wiki_private_uuids.get(self.wkey))
def test_uuid_not_visible_without_write_permission(self):
self.project.update_node_wiki(self.wname, 'some content', Auth(self.user))
self.project.save()
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
url = self.project.web_url_for('project_wiki_view', wname=self.wname)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
private_uuid = self.project.wiki_private_uuids.get(self.wkey)
assert_true(private_uuid)
assert_not_in(private_uuid, res.body)
assert_in(get_sharejs_uuid(self.project, self.wname), res.body)
# Users without write permission should not be able to access
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_not_in(get_sharejs_uuid(self.project, self.wname), res.body)
def test_uuid_not_generated_without_write_permission(self):
self.project.update_node_wiki(self.wname, 'some content', Auth(self.user))
self.project.save()
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
url = self.project.web_url_for('project_wiki_view', wname=self.wname)
res = self.app.get(url)
assert_equal(res.status_code, 200)
self.project.reload()
private_uuid = self.project.wiki_private_uuids.get(self.wkey)
assert_is_none(private_uuid)
def test_uuids_differ_between_pages(self):
wname1 = 'foo.bar'
url1 = self.project.web_url_for('project_wiki_view', wname=wname1)
res1 = self.app.get(url1, auth=self.user.auth)
assert_equal(res1.status_code, 200)
wname2 = 'bar.baz'
url2 = self.project.web_url_for('project_wiki_view', wname=wname2)
res2 = self.app.get(url2, auth=self.user.auth)
assert_equal(res2.status_code, 200)
self.project.reload()
uuid1 = get_sharejs_uuid(self.project, wname1)
uuid2 = get_sharejs_uuid(self.project, wname2)
assert_not_equal(uuid1, uuid2)
assert_in(uuid1, res1)
assert_in(uuid2, res2)
assert_not_in(uuid1, res2)
assert_not_in(uuid2, res1)
def test_uuids_differ_between_forks(self):
url = self.project.web_url_for('project_wiki_view', wname=self.wname)
project_res = self.app.get(url, auth=self.user.auth)
assert_equal(project_res.status_code, 200)
self.project.reload()
fork = self.project.fork_node(Auth(self.user))
assert_true(fork.is_fork_of(self.project))
fork_url = fork.web_url_for('project_wiki_view', wname=self.wname)
fork_res = self.app.get(fork_url, auth=self.user.auth)
assert_equal(fork_res.status_code, 200)
fork.reload()
# uuids are not copied over to forks
assert_not_equal(
self.project.wiki_private_uuids.get(self.wkey),
fork.wiki_private_uuids.get(self.wkey)
)
project_uuid = get_sharejs_uuid(self.project, self.wname)
fork_uuid = get_sharejs_uuid(fork, self.wname)
assert_not_equal(project_uuid, fork_uuid)
assert_in(project_uuid, project_res)
assert_in(fork_uuid, fork_res)
assert_not_in(project_uuid, fork_res)
assert_not_in(fork_uuid, project_res)
@pytest.mark.skip('#TODO: Fix or mock mongodb for sharejs')
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_migration_does_not_affect_forks(self, mock_sharejs):
original_uuid = generate_private_uuid(self.project, self.wname)
self.project.update_node_wiki(self.wname, 'Hello world', Auth(self.user))
fork = self.project.fork_node(Auth(self.user))
assert_equal(fork.wiki_private_uuids.get(self.wkey), None)
migrate_uuid(self.project, self.wname)
assert_not_equal(original_uuid, self.project.wiki_private_uuids.get(self.wkey))
assert_equal(fork.wiki_private_uuids.get(self.wkey), None)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_uuid_persists_after_delete(self, mock_sharejs):
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
# Create wiki page
self.project.update_node_wiki(self.wname, 'Hello world', Auth(self.user))
# Visit wiki edit page
edit_url = self.project.web_url_for('project_wiki_view', wname=self.wname)
res = self.app.get(edit_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
original_private_uuid = self.project.wiki_private_uuids.get(self.wkey)
original_sharejs_uuid = get_sharejs_uuid(self.project, self.wname)
# Delete wiki
delete_url = self.project.api_url_for('project_wiki_delete', wname=self.wname)
res = self.app.delete(delete_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(original_private_uuid, self.project.wiki_private_uuids.get(self.wkey))
# Revisit wiki edit page
res = self.app.get(edit_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(original_private_uuid, self.project.wiki_private_uuids.get(self.wkey))
assert_in(original_sharejs_uuid, res.body)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_uuid_persists_after_rename(self, mock_sharejs):
new_wname = 'barbaz'
new_wkey = to_mongo_key(new_wname)
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
assert_is_none(self.project.wiki_private_uuids.get(new_wkey))
# Create wiki page
self.project.update_node_wiki(self.wname, 'Hello world', Auth(self.user))
wiki_page = self.project.get_wiki_page(self.wname)
# Visit wiki edit page
original_edit_url = self.project.web_url_for('project_wiki_view', wname=self.wname)
res = self.app.get(original_edit_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
original_private_uuid = self.project.wiki_private_uuids.get(self.wkey)
original_sharejs_uuid = get_sharejs_uuid(self.project, self.wname)
# Rename wiki
rename_url = self.project.api_url_for('project_wiki_rename', wname=self.wname)
res = self.app.put_json(
rename_url,
{'value': new_wname, 'pk': wiki_page._id},
auth=self.user.auth,
)
assert_equal(res.status_code, 200)
self.project.reload()
assert_is_none(self.project.wiki_private_uuids.get(self.wkey))
assert_equal(original_private_uuid, self.project.wiki_private_uuids.get(new_wkey))
# Revisit original wiki edit page
res = self.app.get(original_edit_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_not_equal(original_private_uuid, self.project.wiki_private_uuids.get(self.wkey))
assert_not_in(original_sharejs_uuid, res.body)
@pytest.mark.skip('#TODO: Fix or mock mongodb for sharejs')
class TestWikiShareJSMongo(OsfTestCase):
@classmethod
def setUpClass(cls):
super(TestWikiShareJSMongo, cls).setUpClass()
cls._original_sharejs_db_name = settings.SHAREJS_DB_NAME
settings.SHAREJS_DB_NAME = 'sharejs_test'
def setUp(self):
super(TestWikiShareJSMongo, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=True, creator=self.user)
self.wname = 'foo.bar'
self.wkey = to_mongo_key(self.wname)
self.private_uuid = generate_private_uuid(self.project, self.wname)
self.sharejs_uuid = get_sharejs_uuid(self.project, self.wname)
# Create wiki page
self.project.update_node_wiki(self.wname, 'Hello world', Auth(self.user))
self.wiki_page = self.project.get_wiki_page(self.wname)
# Insert mongo data for current project/wiki
self.db = share_db()
example_uuid = EXAMPLE_DOCS[0]['_id']
self.example_docs = deepcopy(EXAMPLE_DOCS)
self.example_docs[0]['_id'] = self.sharejs_uuid
self.db.docs.insert(self.example_docs)
self.example_ops = deepcopy(EXAMPLE_OPS)
for item in self.example_ops:
item['_id'] = item['_id'].replace(example_uuid, self.sharejs_uuid)
item['name'] = item['name'].replace(example_uuid, self.sharejs_uuid)
self.db.docs_ops.insert(self.example_ops)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_migrate_uuid(self, mock_sharejs):
migrate_uuid(self.project, self.wname)
assert_is_none(self.db.docs.find_one({'_id': self.sharejs_uuid}))
assert_is_none(self.db.docs_ops.find_one({'name': self.sharejs_uuid}))
new_sharejs_uuid = get_sharejs_uuid(self.project, self.wname)
assert_equal(
EXAMPLE_DOCS[0]['_data'],
self.db.docs.find_one({'_id': new_sharejs_uuid})['_data']
)
assert_equal(
len([item for item in self.example_ops if item['name'] == self.sharejs_uuid]),
len([item for item in self.db.docs_ops.find({'name': new_sharejs_uuid})])
)
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_migrate_uuid_no_mongo(self, mock_sharejs):
# Case where no edits have been made to the wiki
wname = 'bar.baz'
wkey = to_mongo_key(wname)
share_uuid = generate_private_uuid(self.project, wname)
sharejs_uuid = get_sharejs_uuid(self.project, wname)
self.project.update_node_wiki(wname, 'Hello world', Auth(self.user))
migrate_uuid(self.project, wname)
assert_not_equal(share_uuid, self.project.wiki_private_uuids.get(wkey))
assert_is_none(self.db.docs.find_one({'_id': sharejs_uuid}))
assert_is_none(self.db.docs_ops.find_one({'name': sharejs_uuid}))
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_migrate_uuid_updates_node(self, mock_sharejs):
migrate_uuid(self.project, self.wname)
assert_not_equal(self.private_uuid, self.project.wiki_private_uuids[self.wkey])
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_manage_contributors_updates_uuid(self, mock_sharejs):
user = UserFactory()
self.project.add_contributor(
contributor=user,
permissions=['read', 'write', 'admin'],
auth=Auth(user=self.user),
)
self.project.save()
assert_equal(self.private_uuid, self.project.wiki_private_uuids[self.wkey])
# Removing admin permission does nothing
self.project.manage_contributors(
user_dicts=[
{'id': user._id, 'permission': 'write', 'visible': True},
{'id': self.user._id, 'permission': 'admin', 'visible': True},
],
auth=Auth(user=self.user),
save=True,
)
assert_equal(self.private_uuid, self.project.wiki_private_uuids[self.wkey])
# Removing write permission migrates uuid
self.project.manage_contributors(
user_dicts=[
{'id': user._id, 'permission': 'read', 'visible': True},
{'id': self.user._id, 'permission': 'admin', 'visible': True},
],
auth=Auth(user=self.user),
save=True,
)
assert_not_equal(self.private_uuid, self.project.wiki_private_uuids[self.wkey])
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_delete_share_doc(self, mock_sharejs):
delete_share_doc(self.project, self.wname)
assert_is_none(self.db.docs.find_one({'_id': self.sharejs_uuid}))
assert_is_none(self.db.docs_ops.find_one({'name': self.sharejs_uuid}))
@mock.patch('addons.wiki.utils.broadcast_to_sharejs')
def test_delete_share_doc_updates_node(self, mock_sharejs):
assert_equal(self.private_uuid, self.project.wiki_private_uuids[self.wkey])
delete_share_doc(self.project, self.wname)
assert_not_in(self.wkey, self.project.wiki_private_uuids)
def test_get_draft(self):
# draft is current with latest wiki save
current_content = self.wiki_page.get_draft(self.project)
assert_equals(current_content, self.wiki_page.content)
# modify the sharejs wiki page contents and ensure we
# return the draft contents
new_content = 'I am a teapot'
new_time = int(time.time() * 1000) + 10000
new_version = self.example_docs[0]['_v'] + 1
self.db.docs.update(
{'_id': self.sharejs_uuid},
{'$set': {
'_v': new_version,
'_m.mtime': new_time,
'_data': new_content
}}
)
current_content = self.wiki_page.get_draft(self.project)
assert_equals(current_content, new_content)
def tearDown(self):
super(TestWikiShareJSMongo, self).tearDown()
self.db.drop_collection('docs')
self.db.drop_collection('docs_ops')
@classmethod
def tearDownClass(cls):
share_db().connection.drop_database(settings.SHAREJS_DB_NAME)
settings.SHARE_DATABASE_NAME = cls._original_sharejs_db_name
class TestWikiUtils(OsfTestCase):
def setUp(self):
super(TestWikiUtils, self).setUp()
self.project = ProjectFactory()
def test_get_sharejs_uuid(self):
wname = 'foo.bar'
wname2 = 'bar.baz'
private_uuid = generate_private_uuid(self.project, wname)
sharejs_uuid = get_sharejs_uuid(self.project, wname)
# Provides consistent results
assert_equal(sharejs_uuid, get_sharejs_uuid(self.project, wname))
# Provides obfuscation
assert_not_in(wname, sharejs_uuid)
assert_not_in(sharejs_uuid, wname)
assert_not_in(private_uuid, sharejs_uuid)
assert_not_in(sharejs_uuid, private_uuid)
# Differs based on share uuid provided
assert_not_equal(sharejs_uuid, get_sharejs_uuid(self.project, wname2))
# Differs across projects and forks
project = ProjectFactory()
assert_not_equal(sharejs_uuid, get_sharejs_uuid(project, wname))
fork = self.project.fork_node(Auth(self.project.creator))
assert_not_equal(sharejs_uuid, get_sharejs_uuid(fork, wname))
def test_generate_share_uuid(self):
wname = 'bar.baz'
wkey = to_mongo_key(wname)
assert_is_none(self.project.wiki_private_uuids.get(wkey))
share_uuid = generate_private_uuid(self.project, wname)
self.project.reload()
assert_equal(self.project.wiki_private_uuids[wkey], share_uuid)
new_uuid = generate_private_uuid(self.project, wname)
self.project.reload()
assert_not_equal(share_uuid, new_uuid)
assert_equal(self.project.wiki_private_uuids[wkey], new_uuid)
def test_format_wiki_version(self):
assert_is_none(format_wiki_version(None, 5, False))
assert_is_none(format_wiki_version('', 5, False))
assert_equal(format_wiki_version('3', 5, False), 3)
assert_equal(format_wiki_version('4', 5, False), 'previous')
assert_equal(format_wiki_version('5', 5, False), 'current')
assert_equal(format_wiki_version('previous', 5, False), 'previous')
assert_equal(format_wiki_version('current', 5, False), 'current')
assert_equal(format_wiki_version('preview', 5, True), 'preview')
assert_equal(format_wiki_version('current', 0, False), 'current')
assert_equal(format_wiki_version('preview', 0, True), 'preview')
with assert_raises(InvalidVersionError):
format_wiki_version('1', 0, False)
with assert_raises(InvalidVersionError):
format_wiki_version('previous', 0, False)
with assert_raises(InvalidVersionError):
format_wiki_version('6', 5, False)
with assert_raises(InvalidVersionError):
format_wiki_version('0', 5, False)
with assert_raises(InvalidVersionError):
format_wiki_version('preview', 5, False)
with assert_raises(InvalidVersionError):
format_wiki_version('nonsense', 5, True)
class TestPublicWiki(OsfTestCase):
def setUp(self):
super(TestPublicWiki, self).setUp()
self.project = ProjectFactory()
self.consolidate_auth = Auth(user=self.project.creator)
self.user = AuthUserFactory()
def test_addon_on_children(self):
parent = ProjectFactory()
node = NodeFactory(parent=parent, category='project')
sub_component = NodeFactory(parent=node)
parent.delete_addon('wiki', self.consolidate_auth)
node.delete_addon('wiki', self.consolidate_auth)
sub_component.delete_addon('wiki', self.consolidate_auth)
NodeFactory(parent=node)
has_addon_on_child_node =\
node.has_addon_on_children('wiki')
assert_true(has_addon_on_child_node)
def test_check_user_has_addon_excludes_deleted_components(self):
parent = ProjectFactory()
parent.delete_addon('wiki', self.consolidate_auth)
node = NodeFactory(parent=parent, category='project')
mock_now = datetime.datetime(2017, 3, 16, 11, 00, tzinfo=pytz.utc)
with mock.patch.object(timezone, 'now', return_value=mock_now):
node.delete_addon('wiki', self.consolidate_auth)
sub_component = NodeFactory(parent=node)
sub_component.is_deleted = True
sub_component.save()
has_addon_on_child_node =\
node.has_addon_on_children('wiki')
assert_false(has_addon_on_child_node)
def test_set_editing(self):
parent = ProjectFactory()
node = NodeFactory(parent=parent, category='project', is_public=True)
wiki = node.get_addon('wiki')
# Set as publicly editable
wiki.set_editing(permissions=True, auth=self.consolidate_auth, log=True)
assert_true(wiki.is_publicly_editable)
assert_equal(node.logs.latest().action, 'made_wiki_public')
# Try to set public when the wiki is already public
with assert_raises(NodeStateError):
wiki.set_editing(permissions=True, auth=self.consolidate_auth, log=False)
# Turn off public editing
wiki.set_editing(permissions=False, auth=self.consolidate_auth, log=True)
assert_false(wiki.is_publicly_editable)
assert_equal(node.logs.latest().action, 'made_wiki_private')
node = NodeFactory(parent=parent, category='project')
wiki = node.get_addon('wiki')
# Try to set to private wiki already private
with assert_raises(NodeStateError):
wiki.set_editing(permissions=False, auth=self.consolidate_auth, log=False)
# Try to set public when the project is private
with assert_raises(NodeStateError):
wiki.set_editing(permissions=True, auth=self.consolidate_auth, log=False)
def test_serialize_wiki_settings(self):
node = NodeFactory(parent=self.project, creator=self.user, is_public=True)
node.get_addon('wiki').set_editing(
permissions=True, auth=self.consolidate_auth, log=True)
data = serialize_wiki_settings(self.user, [node])
expected = [{
'node': {
'id': node._id,
'title': node.title,
'url': node.url,
},
'children': [
{
'select': {
'title': 'permission',
'permission': 'public'
},
}
],
'kind': 'folder',
'nodeType': 'component',
'category': 'hypothesis',
'permissions': {'view': True}
}]
assert_equal(data, expected)
def test_serialize_wiki_settings(self):
node = NodeFactory(parent=self.project, creator=self.user, is_public=True)
node.get_addon('wiki').set_editing(
permissions=True, auth=self.consolidate_auth, log=True)
node.add_pointer(self.project, Auth(self.user))
node.save()
data = serialize_wiki_settings(self.user, [node])
expected = [{
'node': {
'id': node._id,
'title': node.title,
'url': node.url,
'is_public': True
},
'children': [
{
'select': {
'title': 'permission',
'permission': 'public'
},
}
],
'kind': 'folder',
'nodeType': 'component',
'category': 'hypothesis',
'permissions': {'view': True,
'admin': True}
}]
assert_equal(data, expected)
def test_serialize_wiki_settings_disabled_wiki(self):
node = NodeFactory(parent=self.project, creator=self.user)
node.delete_addon('wiki', self.consolidate_auth)
data = serialize_wiki_settings(self.user, [node])
expected = [{'node':
{'url': node.url,
'is_public': False,
'id': node._id,
'title': node.title},
'category': 'hypothesis',
'kind': 'folder',
'nodeType': 'component',
'children': [],
'permissions': {'admin': True,
'view': True}
}]
assert_equal(data, expected)
class TestWikiMenu(OsfTestCase):
def setUp(self):
super(TestWikiMenu, self).setUp()
self.user = UserFactory()
self.project = ProjectFactory(creator=self.user, is_public=True)
self.component = NodeFactory(creator=self.user, parent=self.project, is_public=True)
self.consolidate_auth = Auth(user=self.project.creator)
self.non_contributor = UserFactory()
def test_format_home_wiki_page_no_content(self):
data = views.format_home_wiki_page(self.project)
expected = {
'page': {
'url': self.project.web_url_for('project_wiki_home'),
'name': 'Home',
'id': 'None',
}
}
assert_equal(data, expected)
def test_format_project_wiki_pages_contributor(self):
self.project.update_node_wiki('home', 'content here', self.consolidate_auth)
self.project.update_node_wiki('zoo', 'koala', self.consolidate_auth)
home_page = self.project.get_wiki_page(name='home')
zoo_page = self.project.get_wiki_page(name='zoo')
data = views.format_project_wiki_pages(self.project, self.consolidate_auth)
expected = [
{
'page': {
'url': self.project.web_url_for('project_wiki_view', wname='home', _guid=True),
'name': 'Home',
'id': home_page._primary_key,
}
},
{
'page': {
'url': self.project.web_url_for('project_wiki_view', wname='zoo', _guid=True),
'name': 'zoo',
'id': zoo_page._primary_key,
}
}
]
assert_equal(data, expected)
def test_format_project_wiki_pages_no_content_non_contributor(self):
self.project.update_node_wiki('home', 'content here', self.consolidate_auth)
self.project.update_node_wiki('zoo', '', self.consolidate_auth)
home_page = self.project.get_wiki_version(name='home')
data = views.format_project_wiki_pages(self.project, auth=Auth(self.non_contributor))
expected = [
{
'page': {
'url': self.project.web_url_for('project_wiki_view', wname='home', _guid=True),
'name': 'Home',
'id': home_page.wiki_page._primary_key,
}
}
]
assert_equal(data, expected)
def test_format_component_wiki_pages_contributor(self):
self.component.update_node_wiki('home', 'home content', self.consolidate_auth)
self.component.update_node_wiki('zoo', 'koala', self.consolidate_auth)
zoo_page = self.component.get_wiki_page(name='zoo')
expected = [
{
'page': {
'name': self.component.title,
'url': self.component.web_url_for('project_wiki_view', wname='home', _guid=True),
},
'children': [
{
'page': {
'url': self.component.web_url_for('project_wiki_view', wname='home', _guid=True),
'name': 'Home',
'id': self.component._primary_key,
}
},
{
'page': {
'url': self.component.web_url_for('project_wiki_view', wname='zoo', _guid=True),
'name': 'zoo',
'id': zoo_page._primary_key,
},
}
],
'kind': 'component',
'category': self.component.category,
'pointer': False,
}
]
data = views.format_component_wiki_pages(node=self.project, auth=self.consolidate_auth)
assert_equal(data, expected)
def test_format_component_wiki_pages_no_content_non_contributor(self):
data = views.format_component_wiki_pages(node=self.project, auth=Auth(self.non_contributor))
expected = []
assert_equal(data, expected)
def test_project_wiki_grid_data(self):
self.project.update_node_wiki('home', 'project content', self.consolidate_auth)
self.component.update_node_wiki('home', 'component content', self.consolidate_auth)
data = views.project_wiki_grid_data(auth=self.consolidate_auth, wname='home', node=self.project)
expected = [
{
'title': 'Project Wiki Pages',
'kind': 'folder',
'type': 'heading',
'children': views.format_project_wiki_pages(node=self.project, auth=self.consolidate_auth),
},
{
'title': 'Component Wiki Pages',
'kind': 'folder',
'type': 'heading',
'children': views.format_component_wiki_pages(node=self.project, auth=self.consolidate_auth)
}
]
assert_equal(data, expected)
|
{
"content_hash": "3dfc15dc6c16bd6c68904f1e1cf585b8",
"timestamp": "",
"source": "github",
"line_count": 1421,
"max_line_length": 139,
"avg_line_length": 43.88106966924701,
"alnum_prop": 0.623253949162056,
"repo_name": "binoculars/osf.io",
"id": "7df849841b2d6d717ee6792729f618c045323c4d",
"size": "62446",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "addons/wiki/tests/test_wiki.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "106867"
},
{
"name": "HTML",
"bytes": "236223"
},
{
"name": "JavaScript",
"bytes": "1831128"
},
{
"name": "Mako",
"bytes": "666783"
},
{
"name": "Python",
"bytes": "7866290"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
}
|
"""Computes the arc distance between a collection of points
This code is challenging because it requires efficient vectorisation of
trigonometric functions that are note natively supported in SSE/AVX. The numpy
version makes use of numpy.tile and transpose, which proves to be challenging
too.
See also http://en.wikipedia.org/wiki/Great-circle_distance
"""
import numpy as np
def make_env(n=1000):
rng = np.random.RandomState(42)
a = rng.rand(n, 2)
b = rng.rand(n, 2)
return (a, b), {}
|
{
"content_hash": "61b03eaf28cba85e2c5654063209df03",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 78,
"avg_line_length": 28.22222222222222,
"alnum_prop": 0.734251968503937,
"repo_name": "numfocus/python-benchmarks",
"id": "b4eb763839171db189f612659fd47b277eedbc2c",
"size": "549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arc_distance/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "150278"
},
{
"name": "JavaScript",
"bytes": "61962"
},
{
"name": "Python",
"bytes": "44572"
}
],
"symlink_target": ""
}
|
from django.db import models
class Article(models.Model):
"""
Article to test ckeditor
"""
title = models.CharField(max_length=60)
content = models.TextField()
|
{
"content_hash": "3e056ae51b1ef98aa02eed9062d0e94b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 43,
"avg_line_length": 20.22222222222222,
"alnum_prop": 0.6648351648351648,
"repo_name": "arkanister/django-flickr-gallery",
"id": "9cb584b08fab8a477bca0e636af1b8ef7ac37300",
"size": "182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/website/models.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "4194"
},
{
"name": "JavaScript",
"bytes": "6317"
},
{
"name": "Python",
"bytes": "55553"
}
],
"symlink_target": ""
}
|
"""
sync_wikipedia.py
superlachaise_api
Created by Maxime Le Moine on 09/06/2015.
Copyright (c) 2015 Maxime Le Moine.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http:www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json, os, re, requests, sys, traceback
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone, translation
from django.utils.translation import ugettext as _
from HTMLParser import HTMLParser
from superlachaise_api.models import *
def print_unicode(str):
print str.encode('utf-8')
def none_to_blank(s):
if s is None:
return u''
return unicode(s)
class WikipediaIntroHTMLParser(HTMLParser):
def __init__(self, language_code):
self.reset()
self.language_code = language_code
self.result = []
self.opened_tags = [{'tag': 'root', 'attrs': [], 'data': False, 'content': self.result}]
self.current_content = self.result
self.data = False
def can_read_data(self):
if len(self.opened_tags) > 1 and self.opened_tags[1]['tag'] == 'div':
return False
for opened_tag in self.opened_tags:
if opened_tag['tag'] == 'table':
return False
if opened_tag['tag'] == 'ref':
return False
if opened_tag['tag'] == 'ol':
for attr in opened_tag['attrs']:
if attr[0] in ['id', 'class']:
return False
if opened_tag['tag'] == 'ul':
for attr in opened_tag['attrs']:
if attr[0] in ['id', 'class']:
return False
if opened_tag['tag'] == 'strong':
for attr in opened_tag['attrs']:
if attr[0] == 'class' and 'error' in attr[1]:
return False
if opened_tag['tag'] == 'sup':
for attr in opened_tag['attrs']:
if attr[0] in ['id', 'class']:
return False
if opened_tag['tag'] == 'span':
for attr in opened_tag['attrs']:
if attr[0] == 'id' or (attr[0] == 'class' and attr[1] in ['noprint', 'unicode haudio']):
return False
if opened_tag['tag'] == 'small':
for attr in opened_tag['attrs']:
if attr[0] == 'id' or (attr[0] == 'class' and 'metadata' in attr[1]):
return False
if opened_tag['tag'] == 'li':
for attr in opened_tag['attrs']:
if attr[0] in ['id', 'class']:
return False
for attr in opened_tag['attrs']:
if attr[0] == 'style' and 'display:none' in attr[1]:
return False
return True
def handle_data(self, data):
if self.can_read_data():
self.current_content.append(data)
self.opened_tags[-1]['data'] = True
def handle_entityref(self, name):
if self.can_read_data():
self.current_content.append('&'+name+';')
self.opened_tags[-1]['data'] = True
def handle_charref(self, name):
if self.can_read_data():
self.current_content.append('&#'+name+';')
self.opened_tags[-1]['data'] = True
def handle_starttag(self, tag, attrs):
self.current_content = []
self.opened_tags.append({'tag': tag, 'attrs': attrs, 'data': False, 'content': self.current_content})
if self.can_read_data():
self.current_content.append('<%s' % tag)
if tag == 'a':
for attr in attrs:
if attr[0] == 'href':
if attr[1].startswith('/wiki/') or attr[1].startswith('/w/'):
self.current_content.append(' href="https://{language_code}.wikipedia.org{link}"'.format(language_code=self.language_code, link=attr[1]))
elif attr[1].startswith('//'):
self.current_content.append(' href="http:{link}"'.format(link=attr[1]))
self.current_content.append('>')
def handle_endtag(self, tag):
if self.can_read_data():
self.current_content.append('</%s>' % tag)
if self.can_read_data() and (self.opened_tags[-1]['data'] or self.opened_tags[-1]['tag'] == 'a'):
self.opened_tags[-2]['content'].append(''.join(self.current_content))
self.opened_tags[-2]['data'] = True
else:
# Delete last whitespace if any
content = self.opened_tags[-2]['content']
while isinstance(content, list):
if len(content) > 0:
if not isinstance(content[-1], list) and content[-1] in [u' ', u' ']:
del content[-1]
if len(content) < 2:
self.opened_tags[-2]['data'] = False
break
content = content[-1]
else:
content = None
self.opened_tags = self.opened_tags[:-1]
self.current_content = self.opened_tags[-1]['content']
def get_data(self):
return ''.join(self.result).strip()
class Command(BaseCommand):
def request_wikipedia_pages(self, language_code, wikipedia_titles):
pages = {}
last_continue = {
'continue': '',
}
titles = '|'.join(wikipedia_titles).encode('utf8')
while True:
# Request properties
params = {
'action': 'query',
'prop': 'revisions',
'rvprop': 'content',
'format': 'json',
'titles': titles,
}
params.update(last_continue)
if settings.MEDIAWIKI_USER_AGENT:
headers = {"User-Agent" : settings.MEDIAWIKI_USER_AGENT}
else:
raise 'no USER_AGENT defined in settings.py'
json_result = requests.get('https://%s.wikipedia.org/w/api.php' % (language_code), params=params, headers=headers).json()
if 'pages' in json_result['query']:
for page in json_result['query']['pages'].values():
pages[page['title']] = page
if 'continue' not in json_result: break
last_continue = json_result['continue']
return pages
def request_wikipedia_pre_section(self, language_code, title):
# Request properties
params = {
'action': 'parse',
'prop': 'text',
'section': '0',
'format': 'json',
'page': title.encode('utf8'),
}
if settings.MEDIAWIKI_USER_AGENT:
headers = {"User-Agent" : settings.MEDIAWIKI_USER_AGENT}
else:
raise 'no USER_AGENT defined in settings.py'
json_result = requests.get('https://%s.wikipedia.org/w/api.php' % (language_code), params=params, headers=headers).json()
return json_result['parse']['text']['*']
def get_wikipedia_intro(self, language_code, title):
# Get wikipedia pre-section (intro)
pre_section = self.request_wikipedia_pre_section(language_code, title)
# Process HTML
parser = WikipediaIntroHTMLParser(language_code)
parser.feed(pre_section)
return none_to_blank(parser.get_data())
def get_default_sort(self, page):
try:
if len(page['revisions']) != 1:
raise BaseException
wikitext = page['revisions'][0]['*']
default_sort = u''
for line in wikitext.split('\n'):
match_obj = re.search(r'^{{DEFAULTSORT:(.*)}}$', line)
if match_obj:
default_sort = match_obj.group(1).strip()
break
match_obj = re.search(r'^{{CLEDETRI:(.*)}}$', line)
if match_obj:
default_sort = match_obj.group(1).strip()
break
return default_sort
except:
return u''
def hande_wikidata_localized_entry(self, wikidata_localized_entry):
# Get values
values_dict = {
'title': wikidata_localized_entry.wikipedia,
'intro': self.get_wikipedia_intro(wikidata_localized_entry.language.code, wikidata_localized_entry.wikipedia),
}
# Get or create object in database
target_object_id_dict = {"wikidata_localized_entry": wikidata_localized_entry}
wikipedia_page, created = WikipediaPage.objects.get_or_create(**target_object_id_dict)
self.fetched_objects_pks.append(wikipedia_page.pk)
modified = False
if wikidata_localized_entry.language.code in self.default_sort and wikidata_localized_entry.wikipedia in self.default_sort[wikidata_localized_entry.language.code]:
values_dict['default_sort'] = self.default_sort[wikidata_localized_entry.language.code][wikidata_localized_entry.wikipedia]
else:
values_dict['default_sort'] = u''
if created:
self.created_objects = self.created_objects + 1
else:
# Search for modifications
for field, value in values_dict.iteritems():
if value != getattr(wikipedia_page, field):
modified = True
self.modified_objects = self.modified_objects + 1
break
if created or modified:
for field, value in values_dict.iteritems():
setattr(wikipedia_page, field, value)
wikipedia_page.save()
def sync_wikipedia(self, wikidata_localized_entry_ids):
if wikidata_localized_entry_ids:
wikidata_localized_entries = WikidataLocalizedEntry.objects.filter(id__in=wikidata_localized_entry_ids.split('|')).exclude(wikipedia__exact='')
else:
wikidata_localized_entries = WikidataLocalizedEntry.objects.exclude(wikipedia__exact='')
print_unicode(_('Requesting Wikipedia revisions...'))
self.default_sort = {}
total = len(wikidata_localized_entries)
count = 0
max_count_per_request = 25
for language in Language.objects.all():
self.default_sort[language.code] = {}
wikipedia_titles = wikidata_localized_entries.filter(language=language).values_list('wikipedia', flat=True)
for chunk in [wikipedia_titles[i:i+max_count_per_request] for i in range(0,len(wikipedia_titles),max_count_per_request)]:
print_unicode(str(count) + u'/' + str(total))
count += len(chunk)
pages_result = self.request_wikipedia_pages(language.code, chunk)
for title, page in pages_result.iteritems():
self.default_sort[language.code][title] = self.get_default_sort(page)
print_unicode(str(count) + u'/' + str(total))
print_unicode(_('Requesting Wikipedia page content...'))
total = len(wikidata_localized_entries)
count = 0
max_count_per_request = 25
self.fetched_objects_pks = []
for chunk in [wikidata_localized_entries[i:i+max_count_per_request] for i in range(0,len(wikidata_localized_entries),max_count_per_request)]:
print_unicode(str(count) + u'/' + str(total))
count += len(chunk)
for wikidata_localized_entry in chunk:
self.hande_wikidata_localized_entry(wikidata_localized_entry)
print_unicode(str(count) + u'/' + str(total))
if not wikidata_localized_entry_ids:
# Look for deleted elements
for wikipedia_page in WikipediaPage.objects.exclude(pk__in=self.fetched_objects_pks):
self.deleted_objects = self.deleted_objects + 1
wikipedia_page.delete()
def add_arguments(self, parser):
parser.add_argument('--wikidata_localized_entry_ids',
action='store',
dest='wikidata_localized_entry_ids')
def handle(self, *args, **options):
try:
self.synchronization = Synchronization.objects.get(name=os.path.basename(__file__).split('.')[0].split('sync_')[-1])
except:
raise CommandError(sys.exc_info()[1])
error = None
try:
translation.activate(settings.LANGUAGE_CODE)
self.created_objects = 0
self.modified_objects = 0
self.deleted_objects = 0
self.errors = []
print_unicode(_('== Start %s ==') % self.synchronization.name)
self.sync_wikipedia(options['wikidata_localized_entry_ids'])
print_unicode(_('== End %s ==') % self.synchronization.name)
self.synchronization.created_objects = self.created_objects
self.synchronization.modified_objects = self.modified_objects
self.synchronization.deleted_objects = self.deleted_objects
self.synchronization.errors = ', '.join(self.errors)
translation.deactivate()
except:
print_unicode(traceback.format_exc())
error = sys.exc_info()[1]
self.synchronization.errors = traceback.format_exc()
self.synchronization.last_executed = timezone.now()
self.synchronization.save()
if error:
raise CommandError(error)
|
{
"content_hash": "71f5c9dd822744b4d5b564be228c3e25",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 171,
"avg_line_length": 40.55211267605634,
"alnum_prop": 0.5430675187552098,
"repo_name": "MaximeLM/superlachaise_api",
"id": "d464cf251b8f35de664fe95ff4181f4965ab945c",
"size": "14421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "management/commands/sync_wikipedia.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5176"
},
{
"name": "Python",
"bytes": "296396"
}
],
"symlink_target": ""
}
|
from keras.models import Model
from keras.layers import Input, Dense
from keras.optimizers import SGD
class FeedForwardNN():
def __init__(self, x_train, y_train, hidden_units=20, activation='sigmoid',
loss_func='mean_squared_error', learning_rate=0.01,
epochs=2000):
self.x_train = x_train
self.y_train = y_train
self.hidden_units = hidden_units
self.loss_func = loss_func
self.activation = activation
self.learning_rate = learning_rate
self.epochs = epochs
self.model = None
def fit(self):
input_layer = Input(shape=self.x_train[0].shape)
hidden_layer = Dense(units=self.hidden_units,
activation=self.activation)(input_layer)
output_layer = Dense(1)(hidden_layer)
sgd = SGD(lr=self.learning_rate)
self.model = Model(inputs=input_layer, outputs=output_layer)
self.model.compile(optimizer=sgd, loss=self.loss_func,
metrics=['accuracy'])
self.model.fit(self.x_train, self.y_train, len(self.x_train),
self.epochs)
def predict(self, data):
return self.model.predict(data)
|
{
"content_hash": "af9b1ce93fff26028e65ce1000de98df",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 38.28125,
"alnum_prop": 0.6024489795918367,
"repo_name": "HPCC-Cloud-Computing/press",
"id": "46e4160195e76ea1144001694a8ab4c127641d59",
"size": "1225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "prediction/predict/feedforward/feedforward.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "544"
},
{
"name": "Jupyter Notebook",
"bytes": "2238135"
},
{
"name": "Python",
"bytes": "118721"
},
{
"name": "Shell",
"bytes": "35"
}
],
"symlink_target": ""
}
|
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DevicesOperations(object):
"""DevicesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.databoxedge.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_subscription(
self,
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DataBoxEdgeDeviceList"]
"""Gets all the Data Box Edge/Data Box Gateway devices in a subscription.
:param expand: Specify $expand=details to populate additional fields related to the resource or
Specify $skipToken=:code:`<token>` to populate the next page in the list.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataBoxEdgeDeviceList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDeviceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDeviceList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DataBoxEdgeDeviceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.DataBoxEdgeDeviceList"]
"""Gets all the Data Box Edge/Data Box Gateway devices in a resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param expand: Specify $expand=details to populate additional fields related to the resource or
Specify $skipToken=:code:`<token>` to populate the next page in the list.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DataBoxEdgeDeviceList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDeviceList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDeviceList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('DataBoxEdgeDeviceList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices'} # type: ignore
def get(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DataBoxEdgeDevice"
"""Gets the properties of the Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataBoxEdgeDevice, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDevice
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDevice"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataBoxEdgeDevice', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def _create_or_update_initial(
self,
device_name, # type: str
resource_group_name, # type: str
data_box_edge_device, # type: "_models.DataBoxEdgeDevice"
**kwargs # type: Any
):
# type: (...) -> "_models.DataBoxEdgeDevice"
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDevice"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(data_box_edge_device, 'DataBoxEdgeDevice')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataBoxEdgeDevice', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def begin_create_or_update(
self,
device_name, # type: str
resource_group_name, # type: str
data_box_edge_device, # type: "_models.DataBoxEdgeDevice"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.DataBoxEdgeDevice"]
"""Creates or updates a Data Box Edge/Data Box Gateway resource.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param data_box_edge_device: The resource object.
:type data_box_edge_device: ~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDevice
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either DataBoxEdgeDevice or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDevice]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDevice"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
device_name=device_name,
resource_group_name=resource_group_name,
data_box_edge_device=data_box_edge_device,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('DataBoxEdgeDevice', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def _delete_initial(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def begin_delete(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
device_name=device_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def update(
self,
device_name, # type: str
resource_group_name, # type: str
parameters, # type: "_models.DataBoxEdgeDevicePatch"
**kwargs # type: Any
):
# type: (...) -> "_models.DataBoxEdgeDevice"
"""Modifies a Data Box Edge/Data Box Gateway resource.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param parameters: The resource parameters.
:type parameters: ~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDevicePatch
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataBoxEdgeDevice, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDevice
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDevice"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'DataBoxEdgeDevicePatch')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataBoxEdgeDevice', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}'} # type: ignore
def _download_updates_initial(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._download_updates_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_download_updates_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/downloadUpdates'} # type: ignore
def begin_download_updates(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Downloads the updates on a Data Box Edge/Data Box Gateway device.
Downloads the updates on a Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._download_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_download_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/downloadUpdates'} # type: ignore
def get_extended_information(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.DataBoxEdgeDeviceExtendedInfo"
"""Gets additional information for the specified Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DataBoxEdgeDeviceExtendedInfo, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.DataBoxEdgeDeviceExtendedInfo
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DataBoxEdgeDeviceExtendedInfo"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get_extended_information.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DataBoxEdgeDeviceExtendedInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_extended_information.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/getExtendedInformation'} # type: ignore
def _install_updates_initial(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._install_updates_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_install_updates_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/installUpdates'} # type: ignore
def begin_install_updates(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Installs the updates on the Data Box Edge/Data Box Gateway device.
Installs the updates on the Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._install_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_install_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/installUpdates'} # type: ignore
def get_network_settings(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkSettings"
"""Gets the network settings of the specified Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSettings, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.NetworkSettings
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSettings"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get_network_settings.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSettings', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_network_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/networkSettings/default'} # type: ignore
def _scan_for_updates_initial(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self._scan_for_updates_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_scan_for_updates_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/scanForUpdates'} # type: ignore
def begin_scan_for_updates(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Scans for updates on a Data Box Edge/Data Box Gateway device.
Scans for updates on a Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._scan_for_updates_initial(
device_name=device_name,
resource_group_name=resource_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_scan_for_updates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/scanForUpdates'} # type: ignore
def _create_or_update_security_settings_initial(
self,
device_name, # type: str
resource_group_name, # type: str
security_settings, # type: "_models.SecuritySettings"
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_security_settings_initial.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(security_settings, 'SecuritySettings')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_create_or_update_security_settings_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/securitySettings/default/update'} # type: ignore
def begin_create_or_update_security_settings(
self,
device_name, # type: str
resource_group_name, # type: str
security_settings, # type: "_models.SecuritySettings"
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Updates the security settings on a Data Box Edge/Data Box Gateway device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param security_settings: The security settings.
:type security_settings: ~azure.mgmt.databoxedge.v2019_07_01.models.SecuritySettings
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_security_settings_initial(
device_name=device_name,
resource_group_name=resource_group_name,
security_settings=security_settings,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update_security_settings.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/securitySettings/default/update'} # type: ignore
def get_update_summary(
self,
device_name, # type: str
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.UpdateSummary"
"""Gets information about the availability of updates based on the last scan of the device. It also gets information about any ongoing download or install jobs on the device.
Gets information about the availability of updates based on the last scan of the device. It
also gets information about any ongoing download or install jobs on the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UpdateSummary, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.UpdateSummary
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateSummary"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
# Construct URL
url = self.get_update_summary.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('UpdateSummary', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_update_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/updateSummary/default'} # type: ignore
def upload_certificate(
self,
device_name, # type: str
resource_group_name, # type: str
parameters, # type: "_models.UploadCertificateRequest"
**kwargs # type: Any
):
# type: (...) -> "_models.UploadCertificateResponse"
"""Uploads registration certificate for the device.
:param device_name: The device name.
:type device_name: str
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param parameters: The upload certificate request.
:type parameters: ~azure.mgmt.databoxedge.v2019_07_01.models.UploadCertificateRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UploadCertificateResponse, or the result of cls(response)
:rtype: ~azure.mgmt.databoxedge.v2019_07_01.models.UploadCertificateResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UploadCertificateResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.upload_certificate.metadata['url'] # type: ignore
path_format_arguments = {
'deviceName': self._serialize.url("device_name", device_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'UploadCertificateRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('UploadCertificateResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
upload_certificate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataBoxEdge/dataBoxEdgeDevices/{deviceName}/uploadCertificate'} # type: ignore
|
{
"content_hash": "e6a6756c04ceb257cd770df5835d1a0e",
"timestamp": "",
"source": "github",
"line_count": 1256,
"max_line_length": 248,
"avg_line_length": 49.27388535031847,
"alnum_prop": 0.6403987849017581,
"repo_name": "Azure/azure-sdk-for-python",
"id": "e1c98ec9a63f5fb3d8223235ccfec9fcdaf60390",
"size": "62355",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/databoxedge/v2019_07_01/operations/_devices_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from zords import constants
from zords.models import Zord
class ZordTestCase(TestCase):
fixtures = ['zords.json']
def test_zord(self):
zord = Zord.objects.get(pk=18)
self.assertIsInstance(zord, Zord)
self.assertEqual(zord.name, 'Unicorn Thunderzord')
self.assertEqual(zord.type, constants.ZORD_TYPE_THUNDERZORD)
def test_zord_to_string(self):
zord = Zord.objects.get(pk=1)
self.assertEqual(str(zord), 'Dino Megazord')
def test_zord_mode_to_string(self):
zord = Zord.objects.get(pk=19)
self.assertIsInstance(zord, Zord)
self.assertEqual(zord.name, 'White Tigerzord')
self.assertEqual(zord.type, constants.ZORD_TYPE_THUNDERZORD)
mode = zord.modes.first()
self.assertEqual(mode.id, 1)
self.assertEqual(str(mode), 'Tiger Mode')
self.assertRegexpMatches(mode.description, '^Tiger mode is the primary form')
|
{
"content_hash": "577a462f4d673f56495787d7be52d0ac",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 85,
"avg_line_length": 33.44827586206897,
"alnum_prop": 0.6731958762886598,
"repo_name": "reiniervdwindt/power-rangers-api",
"id": "9e10a3ccb3883ac6507259f33878123af9affc81",
"size": "970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/zords/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "617"
},
{
"name": "Python",
"bytes": "70343"
}
],
"symlink_target": ""
}
|
"""
Name: u6.py
Desc: Defines the U6 class, which makes working with a U6 much easier. All of
the low-level functions for the U6 are implemented as functions of the U6
class. There are also a handful additional functions which improve upon
the interface provided by the low-level functions.
To learn about the low-level functions, please see Section 5.2 of the U6 User's Guide:
http://labjack.com/support/u6/users-guide/5.2
"""
from LabJackPython import *
import struct, ConfigParser
def openAllU6():
"""
A helpful function which will open all the connected U6s. Returns a
dictionary where the keys are the serialNumber, and the value is the device
object.
"""
returnDict = dict()
for i in range(deviceCount(6)):
d = U6(firstFound = False, devNumber = i+1)
returnDict[str(d.serialNumber)] = d
return returnDict
def dumpPacket(buffer):
"""
Name: dumpPacket(buffer)
Args: byte array
Desc: Returns hex value of all bytes in the buffer
"""
return repr([ hex(x) for x in buffer ])
def getBit(n, bit):
"""
Name: getBit(n, bit)
Args: n, the original integer you want the bit of
bit, the index of the bit you want
Desc: Returns the bit at position "bit" of integer "n"
>>> n = 5
>>> bit = 2
>>> getBit(n, bit)
1
>>> bit = 0
>>> getBit(n, bit)
1
"""
return int(bool((int(n) & (1 << bit)) >> bit))
def toBitList(inbyte):
"""
Name: toBitList(inbyte)
Args: a byte
Desc: Converts a byte into list for access to individual bits
>>> inbyte = 5
>>> toBitList(inbyte)
[1, 0, 1, 0, 0, 0, 0, 0]
"""
return [ getBit(inbyte, b) for b in range(8) ]
def dictAsString(d):
"""Helper function that returns a string representation of a dictionary"""
s = "{"
for key, val in sorted(d.items()):
s += "%s: %s, " % (key, val)
s = s.rstrip(", ") # Nuke the trailing comma
s += "}"
return s
class CalibrationInfo(object):
""" A class to hold the calibration info for a U6 """
def __init__(self):
# A flag to tell difference between nominal and actual values.
self.nominal = True
# Positive Channel calibration
self.ain10vSlope = 3.1580578 * (10 ** -4)
self.ain10vOffset = -10.5869565220
self.ain1vSlope = 3.1580578 * (10 ** -5)
self.ain1vOffset = -1.05869565220
self.ain100mvSlope = 3.1580578 * (10 ** -6)
self.ain100mvOffset = -0.105869565220
self.ain10mvSlope = 3.1580578 * (10 ** -7)
self.ain10mvOffset = -0.0105869565220
self.ainSlope = [self.ain10vSlope, self.ain1vSlope, self.ain100mvSlope, self.ain10mvSlope]
self.ainOffset = [ self.ain10vOffset, self.ain1vOffset, self.ain100mvOffset, self.ain10mvOffset ]
# Negative Channel calibration
self.ain10vNegSlope = -3.15805800 * (10 ** -4)
self.ain10vCenter = 33523.0
self.ain1vNegSlope = -3.15805800 * (10 ** -5)
self.ain1vCenter = 33523.0
self.ain100mvNegSlope = -3.15805800 * (10 ** -6)
self.ain100mvCenter = 33523.0
self.ain10mvNegSlope = -3.15805800 * (10 ** -7)
self.ain10mvCenter = 33523.0
self.ainNegSlope = [ self.ain10vNegSlope, self.ain1vNegSlope, self.ain100mvNegSlope, self.ain10mvNegSlope ]
self.ainCenter = [ self.ain10vCenter, self.ain1vCenter, self.ain100mvCenter, self.ain10mvCenter ]
# Miscellaneous
self.dac0Slope = 13200.0
self.dac0Offset = 0
self.dac1Slope = 13200.0
self.dac1Offset = 0
self.currentOutput0 = 0.0000100000
self.currentOutput1 = 0.0002000000
self.temperatureSlope = -92.379
self.temperatureOffset = 465.129
# Hi-Res ADC stuff
# Positive Channel calibration
self.proAin10vSlope = 3.1580578 * (10 ** -4)
self.proAin10vOffset = -10.5869565220
self.proAin1vSlope = 3.1580578 * (10 ** -5)
self.proAin1vOffset = -1.05869565220
self.proAin100mvSlope = 3.1580578 * (10 ** -6)
self.proAin100mvOffset = -0.105869565220
self.proAin10mvSlope = 3.1580578 * (10 ** -7)
self.proAin10mvOffset = -0.0105869565220
# Negative Channel calibration
self.proAin10vNegSlope = -3.15805800 * (10 ** -4)
self.proAin10vCenter = 33523.0
self.proAin1vNegSlope = -3.15805800 * (10 ** -5)
self.proAin1vCenter = 33523.0
self.proAin100mvNegSlope = -3.15805800 * (10 ** -6)
self.proAin100mvCenter = 33523.0
self.proAin10mvNegSlope = -3.15805800 * (10 ** -7)
self.proAin10mvCenter = 33523.0
def __str__(self):
return str(self.__dict__)
class U6(Device):
"""
U6 Class for all U6 specific low-level commands.
Example:
>>> import u6
>>> d = u6.U6()
>>> print d.configU6()
{'SerialNumber': 320032102, ... , 'FirmwareVersion': '1.26'}
"""
def __init__(self, debug = False, autoOpen = True, **kargs):
"""
Name: U6.__init__(self, debug = False, autoOpen = True, **kargs)
Args: debug, Do you want debug information?
autoOpen, If true, then the constructor will call open for you
**kargs, The arguments to be passed to open.
Desc: Your basic constructor.
"""
Device.__init__(self, None, devType = 6)
self.firmwareVersion = 0
self.bootloaderVersion = 0
self.hardwareVersion = 0
self.productId = 0
self.fioDirection = [None] * 8
self.fioState = [None] * 8
self.eioDirection = [None] * 8
self.eioState = [None] * 8
self.cioDirection = [None] * 8
self.cioState = [None] * 8
self.dac1Enable = 0
self.dac0 = 0
self.dac1 = 0
self.calInfo = CalibrationInfo()
self.productName = "U6"
self.debug = debug
if autoOpen:
self.open(**kargs)
def open(self, localId = None, firstFound = True, serial = None, devNumber = None, handleOnly = False, LJSocket = None):
"""
Name: U6.open(localId = None, firstFound = True, devNumber = None,
handleOnly = False, LJSocket = None)
Args: firstFound, If True, use the first found U6
serial, open a U6 with the given serial number
localId, open a U6 with the given local id.
devNumber, open a U6 with the given devNumber
handleOnly, if True, LabJackPython will only open a handle
LJSocket, set to "<ip>:<port>" to connect to LJSocket
Desc: Opens a U6 for reading and writing.
>>> myU6 = u6.U6(autoOpen = False)
>>> myU6.open()
"""
Device.open(self, 6, firstFound = firstFound, serial = serial, localId = localId, devNumber = devNumber, handleOnly = handleOnly, LJSocket = LJSocket )
def configU6(self, LocalID = None):
"""
Name: U6.configU6(LocalID = None)
Args: LocalID, if set, will write the new value to U6
Desc: Writes the Local ID, and reads some hardware information.
>>> myU6 = u6.U6()
>>> myU6.configU6()
{'BootloaderVersion': '6.15',
'FirmwareVersion': '0.88',
'HardwareVersion': '2.0',
'LocalID': 1,
'ProductID': 6,
'SerialNumber': 360005087,
'VersionInfo': 4}
"""
command = [ 0 ] * 26
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x0A
command[3] = 0x08
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
if LocalID != None:
command[6] = (1 << 3)
command[8] = LocalID
#command[7] = Reserved
#command[9-25] = Reserved
try:
result = self._writeRead(command, 38, [0xF8, 0x10, 0x08])
except LabJackException, e:
if e.errorCode is 4:
print "NOTE: ConfigU6 returned an error of 4. This probably means you are using U6 with a *really old* firmware. Please upgrade your U6's firmware as soon as possible."
result = self._writeRead(command, 38, [0xF8, 0x10, 0x08], checkBytes = False)
else:
raise e
self.firmwareVersion = "%s.%02d" % (result[10], result[9])
self.bootloaderVersion = "%s.%02d" % (result[12], result[11])
self.hardwareVersion = "%s.%02d" % (result[14], result[13])
self.serialNumber = struct.unpack("<I", struct.pack(">BBBB", *result[15:19]))[0]
self.productId = struct.unpack("<H", struct.pack(">BB", *result[19:21]))[0]
self.localId = result[21]
self.versionInfo = result[37]
self.deviceName = 'U6'
if self.versionInfo == 12:
self.deviceName = 'U6-Pro'
return { 'FirmwareVersion' : self.firmwareVersion, 'BootloaderVersion' : self.bootloaderVersion, 'HardwareVersion' : self.hardwareVersion, 'SerialNumber' : self.serialNumber, 'ProductID' : self.productId, 'LocalID' : self.localId, 'VersionInfo' : self.versionInfo, 'DeviceName' : self.deviceName }
def configIO(self, NumberTimersEnabled = None, EnableCounter1 = None, EnableCounter0 = None, TimerCounterPinOffset = None, EnableUART = None):
"""
Name: U6.configIO(NumberTimersEnabled = None, EnableCounter1 = None, EnableCounter0 = None, TimerCounterPinOffset = None)
Args: NumberTimersEnabled, Number of timers to enable
EnableCounter1, Set to True to enable counter 1, F to disable
EnableCounter0, Set to True to enable counter 0, F to disable
TimerCounterPinOffset, where should the timers/counters start
if all args are None, command just reads.
Desc: Writes and reads the current IO configuration.
>>> myU6 = u6.U6()
>>> myU6.configIO()
{'Counter0Enabled': False,
'Counter1Enabled': False,
'NumberTimersEnabled': 0,
'TimerCounterPinOffset': 0}
"""
command = [ 0 ] * 16
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x05
command[3] = 0x0B
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
if NumberTimersEnabled != None:
command[6] = 1
command[7] = NumberTimersEnabled
if EnableCounter0 != None:
command[6] = 1
if EnableCounter0:
command[8] = 1
if EnableCounter1 != None:
command[6] = 1
if EnableCounter1:
command[8] |= (1 << 1)
if TimerCounterPinOffset != None:
command[6] = 1
command[9] = TimerCounterPinOffset
if EnableUART is not None:
command[6] |= 1
command[6] |= (1 << 5)
result = self._writeRead(command, 16, [0xf8, 0x05, 0x0B])
return { 'NumberTimersEnabled' : result[8], 'Counter0Enabled' : bool(result[9] & 1), 'Counter1Enabled' : bool( (result[9] >> 1) & 1), 'TimerCounterPinOffset' : result[10] }
def configTimerClock(self, TimerClockBase = None, TimerClockDivisor = None):
"""
Name: U6.configTimerClock(TimerClockBase = None, TimerClockDivisor = None)
Args: TimerClockBase, which timer base to use
TimerClockDivisor, set the divisor
if all args are None, command just reads.
Also, if you cannot set the divisor without setting the base.
Desc: Writes and read the timer clock configuration.
>>> myU6 = u6.U6()
>>> myU6.configTimerClock()
{'TimerClockDivisor': 256, 'TimerClockBase': 2}
"""
command = [ 0 ] * 10
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x02
command[3] = 0x0A
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
#command[6] = Reserved
#command[7] = Reserved
if TimerClockBase != None:
command[8] = (1 << 7)
command[8] |= TimerClockBase & 7
if TimerClockDivisor != None:
command[9] = TimerClockDivisor
result = self._writeRead(command, 10, [0xF8, 0x2, 0x0A])
divisor = result[9]
if divisor == 0:
divisor = 256
return { 'TimerClockBase' : (result[8] & 7), 'TimerClockDivisor' : divisor }
def _buildBuffer(self, sendBuffer, readLen, commandlist):
for cmd in commandlist:
if isinstance(cmd, FeedbackCommand):
sendBuffer += cmd.cmdBytes
readLen += cmd.readLen
elif isinstance(cmd, list):
sendBuffer, readLen = self._buildBuffer(sendBuffer, readLen, cmd)
return (sendBuffer, readLen)
def _buildFeedbackResults(self, rcvBuffer, commandlist, results, i):
for cmd in commandlist:
if isinstance(cmd, FeedbackCommand):
results.append(cmd.handle(rcvBuffer[i:i+cmd.readLen]))
i += cmd.readLen
elif isinstance(cmd, list):
self._buildFeedbackResults(rcvBuffer, cmd, results, i)
return results
def getFeedback(self, *commandlist):
"""
Name: getFeedback(commandlist)
Args: the FeedbackCommands to run
Desc: Forms the commandlist into a packet, sends it to the U6, and reads the response.
>>> myU6 = U6()
>>> ledCommand = u6.LED(False)
>>> internalTempCommand = u6.AIN(30, 31, True)
>>> myU6.getFeedback(ledCommand, internalTempCommand)
[None, 23200]
OR if you like the list version better:
>>> myU6 = U6()
>>> ledCommand = u6.LED(False)
>>> internalTempCommand = u6.AIN(30, 31, True)
>>> commandList = [ ledCommand, internalTempCommand ]
>>> myU6.getFeedback(commandList)
[None, 23200]
"""
sendBuffer = [0] * 7
sendBuffer[1] = 0xF8
readLen = 9
sendBuffer, readLen = self._buildBuffer(sendBuffer, readLen, commandlist)
if len(sendBuffer) % 2:
sendBuffer += [0]
sendBuffer[2] = len(sendBuffer) / 2 - 3
if readLen % 2:
readLen += 1
if len(sendBuffer) > MAX_USB_PACKET_LENGTH:
raise LabJackException("ERROR: The feedback command you are attempting to send is bigger than 64 bytes ( %s bytes ). Break your commands up into separate calls to getFeedback()." % len(sendBuffer))
if readLen > MAX_USB_PACKET_LENGTH:
raise LabJackException("ERROR: The feedback command you are attempting to send would yield a response that is greater than 64 bytes ( %s bytes ). Break your commands up into separate calls to getFeedback()." % readLen)
rcvBuffer = self._writeRead(sendBuffer, readLen, [], checkBytes = False, stream = False, checksum = True)
# Check the response for errors
try:
self._checkCommandBytes(rcvBuffer, [0xF8])
if rcvBuffer[3] != 0x00:
raise LabJackException("Got incorrect command bytes")
except LowlevelErrorException, e:
if isinstance(commandlist[0], list):
culprit = commandlist[0][ (rcvBuffer[7] -1) ]
else:
culprit = commandlist[ (rcvBuffer[7] -1) ]
raise LowlevelErrorException("\nThis Command\n %s\nreturned an error:\n %s" % ( culprit, lowlevelErrorToString(rcvBuffer[6]) ) )
results = []
i = 9
return self._buildFeedbackResults(rcvBuffer, commandlist, results, i)
def readMem(self, BlockNum, ReadCal=False):
"""
Name: U6.readMem(BlockNum, ReadCal=False)
Args: BlockNum, which block to read
ReadCal, set to True to read the calibration data
Desc: Reads 1 block (32 bytes) from the non-volatile user or
calibration memory. Please read section 5.2.6 of the user's
guide before you do something you may regret.
>>> myU6 = U6()
>>> myU6.readMem(0)
[ < userdata stored in block 0 > ]
NOTE: Do not call this function while streaming.
"""
command = [ 0 ] * 8
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x01
command[3] = 0x2A
if ReadCal:
command[3] = 0x2D
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
command[6] = 0x00
command[7] = BlockNum
result = self._writeRead(command, 40, [ 0xF8, 0x11, command[3] ])
return result[8:]
def readCal(self, BlockNum):
return self.readMem(BlockNum, ReadCal = True)
def writeMem(self, BlockNum, Data, WriteCal=False):
"""
Name: U6.writeMem(BlockNum, Data, WriteCal=False)
Args: BlockNum, which block to write
Data, a list of bytes to write
WriteCal, set to True to write calibration.
Desc: Writes 1 block (32 bytes) from the non-volatile user or
calibration memory. Please read section 5.2.7 of the user's
guide before you do something you may regret.
>>> myU6 = U6()
>>> myU6.writeMem(0, [ < userdata to be stored in block 0 > ])
NOTE: Do not call this function while streaming.
"""
if not isinstance(Data, list):
raise LabJackException("Data must be a list of bytes")
command = [ 0 ] * 40
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x11
command[3] = 0x28
if WriteCal:
command[3] = 0x2B
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
command[6] = 0x00
command[7] = BlockNum
command[8:] = Data
self._writeRead(command, 8, [0xF8, 0x11, command[3]])
def writeCal(self, BlockNum, Data):
return self.writeMem(BlockNum, Data, WriteCal = True)
def eraseMem(self, EraseCal=False):
"""
Name: U6.eraseMem(EraseCal=False)
Args: EraseCal, set to True to erase the calibration memory.
Desc: The U6 uses flash memory that must be erased before writing.
Please read section 5.2.8 of the user's guide before you do
something you may regret.
>>> myU6 = U6()
>>> myU6.eraseMem()
NOTE: Do not call this function while streaming.
"""
if eraseCal:
command = [ 0 ] * 8
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x01
command[3] = 0x2C
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
command[6] = 0x4C
command[7] = 0x6C
else:
command = [ 0 ] * 6
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x00
command[3] = 0x29
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
self._writeRead(command, 8, [0xF8, 0x01, command[3]])
def eraseCal(self):
return self.eraseMem(EraseCal=True)
def streamConfig(self, NumChannels = 1, ResolutionIndex = 0, SamplesPerPacket = 25, SettlingFactor = 0, InternalStreamClockFrequency = 0, DivideClockBy256 = False, ScanInterval = 1, ChannelNumbers = [0], ChannelOptions = [0], SampleFrequency = None):
"""
Name: U6.streamConfig(
NumChannels = 1, ResolutionIndex = 0,
SamplesPerPacket = 25, SettlingFactor = 0,
InternalStreamClockFrequency = 0, DivideClockBy256 = False,
ScanInterval = 1, ChannelNumbers = [0],
ChannelOptions = [0], SampleFrequency = None )
Args: NumChannels, the number of channels to stream
ResolutionIndex, the resolution of the samples
SettlingFactor, the settling factor to be used
ChannelNumbers, a list of channel numbers to stream
ChannelOptions, a list of channel options bytes
Set Either:
SampleFrequency, the frequency in Hz to sample
-- OR --
SamplesPerPacket, how many samples make one packet
InternalStreamClockFrequency, 0 = 4 MHz, 1 = 48 MHz
DivideClockBy256, True = divide the clock by 256
ScanInterval, clock/ScanInterval = frequency.
Desc: Configures streaming on the U6. On a decent machine, you can
expect to stream a range of 0.238 Hz to 15 Hz. Without the
conversion, you can get up to 55 Hz.
"""
if NumChannels != len(ChannelNumbers) or NumChannels != len(ChannelOptions):
raise LabJackException("NumChannels must match length of ChannelNumbers and ChannelOptions")
if len(ChannelNumbers) != len(ChannelOptions):
raise LabJackException("len(ChannelNumbers) doesn't match len(ChannelOptions)")
if SampleFrequency != None:
if SampleFrequency < 1000:
if SampleFrequency < 25:
SamplesPerPacket = SampleFrequency
DivideClockBy256 = True
ScanInterval = 15625/SampleFrequency
else:
DivideClockBy256 = False
ScanInterval = 4000000/SampleFrequency
# Force Scan Interval into correct range
ScanInterval = min( ScanInterval, 65535 )
ScanInterval = int( ScanInterval )
ScanInterval = max( ScanInterval, 1 )
# Same with Samples per packet
SamplesPerPacket = max( SamplesPerPacket, 1)
SamplesPerPacket = int( SamplesPerPacket )
SamplesPerPacket = min ( SamplesPerPacket, 25)
command = [ 0 ] * (14 + NumChannels*2)
#command[0] = Checksum8
command[1] = 0xF8
command[2] = NumChannels+4
command[3] = 0x11
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
command[6] = NumChannels
command[7] = ResolutionIndex
command[8] = SamplesPerPacket
#command[9] = Reserved
command[10] = SettlingFactor
command[11] = (InternalStreamClockFrequency & 1) << 3
if DivideClockBy256:
command[11] |= 1 << 1
t = struct.pack("<H", ScanInterval)
command[12] = ord(t[0])
command[13] = ord(t[1])
for i in range(NumChannels):
command[14+(i*2)] = ChannelNumbers[i]
command[15+(i*2)] = ChannelOptions[i]
self._writeRead(command, 8, [0xF8, 0x01, 0x11])
# Set up the variables for future use.
self.streamSamplesPerPacket = SamplesPerPacket
self.streamChannelNumbers = ChannelNumbers
self.streamChannelOptions = ChannelOptions
self.streamConfiged = True
if InternalStreamClockFrequency == 1:
freq = float(48000000)
else:
freq = float(4000000)
if DivideClockBy256:
freq /= 256
freq = freq/ScanInterval
self.packetsPerRequest = max(1, int(freq/SamplesPerPacket))
self.packetsPerRequest = min(self.packetsPerRequest, 48)
def processStreamData(self, result, numBytes = None):
"""
Name: U6.processStreamData(result, numPackets = None)
Args: result, the string returned from streamData()
numBytes, the number of bytes per packet
Desc: Breaks stream data into individual channels and applies
calibrations.
>>> reading = d.streamData(convert = False)
>>> print proccessStreamData(reading['result'])
defaultDict(list, {'AIN0' : [3.123, 3.231, 3.232, ...]})
"""
if numBytes is None:
numBytes = 14 + (self.streamSamplesPerPacket * 2)
returnDict = collections.defaultdict(list)
j = self.streamPacketOffset
for packet in self.breakupPackets(result, numBytes):
for sample in self.samplesFromPacket(packet):
if j >= len(self.streamChannelNumbers):
j = 0
if self.streamChannelNumbers[j] in (193, 194):
value = struct.unpack('<BB', sample )
elif self.streamChannelNumbers[j] >= 200:
value = struct.unpack('<H', sample )[0]
else:
if (self.streamChannelOptions[j] >> 7) == 1:
# do signed
value = struct.unpack('<H', sample )[0]
else:
# do unsigned
value = struct.unpack('<H', sample )[0]
gainIndex = (self.streamChannelOptions[j] >> 4) & 0x3
value = self.binaryToCalibratedAnalogVoltage(gainIndex, value, is16Bits=True)
returnDict["AIN%s" % self.streamChannelNumbers[j]].append(value)
j += 1
self.streamPacketOffset = j
return returnDict
def watchdog(self, Write = False, ResetOnTimeout = False, SetDIOStateOnTimeout = False, TimeoutPeriod = 60, DIOState = 0, DIONumber = 0):
"""
Name: U6.watchdog(Write = False, ResetOnTimeout = False, SetDIOStateOnTimeout = False, TimeoutPeriod = 60, DIOState = 0, DIONumber = 0)
Args: Write, Set to True to write new values to the watchdog.
ResetOnTimeout, True means reset the device on timeout
SetDIOStateOnTimeout, True means set the sate of a DIO on timeout
TimeoutPeriod, Time, in seconds, to wait before timing out.
DIOState, 1 = High, 0 = Low
DIONumber, which DIO to set.
Desc: Controls a firmware based watchdog timer.
"""
command = [ 0 ] * 16
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x05
command[3] = 0x09
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
if Write:
command[6] = 1
if ResetOnTimeout:
command[7] = (1 << 5)
if SetDIOStateOnTimeout:
command[7] |= (1 << 4)
t = struct.pack("<H", TimeoutPeriod)
command[8] = ord(t[0])
command[9] = ord(t[1])
command[10] = ((DIOState & 1 ) << 7)
command[10] |= (DIONumber & 0xf)
result = self._writeRead(command, 16, [ 0xF8, 0x05, 0x09])
watchdogStatus = {}
if result[7] == 0:
watchdogStatus['WatchDogEnabled'] = False
watchdogStatus['ResetOnTimeout'] = False
watchdogStatus['SetDIOStateOnTimeout'] = False
else:
watchdogStatus['WatchDogEnabled'] = True
if (( result[7] >> 5 ) & 1):
watchdogStatus['ResetOnTimeout'] = True
else:
watchdogStatus['ResetOnTimeout'] = False
if (( result[7] >> 4 ) & 1):
watchdogStatus['SetDIOStateOnTimeout'] = True
else:
watchdogStatus['SetDIOStateOnTimeout'] = False
watchdogStatus['TimeoutPeriod'] = struct.unpack('<H', struct.pack("BB", *result[8:10]))
if (( result[10] >> 7 ) & 1):
watchdogStatus['DIOState'] = 1
else:
watchdogStatus['DIOState'] = 0
watchdogStatus['DIONumber'] = ( result[10] & 15 )
return watchdogStatus
SPIModes = { 'A' : 0, 'B' : 1, 'C' : 2, 'D' : 3 }
def spi(self, SPIBytes, AutoCS=True, DisableDirConfig = False, SPIMode = 'A', SPIClockFactor = 0, CSPINNum = 0, CLKPinNum = 1, MISOPinNum = 2, MOSIPinNum = 3):
"""
Name: U6.spi(SPIBytes, AutoCS=True, DisableDirConfig = False,
SPIMode = 'A', SPIClockFactor = 0, CSPINNum = 0,
CLKPinNum = 1, MISOPinNum = 2, MOSIPinNum = 3)
Args: SPIBytes, A list of bytes to send.
AutoCS, If True, the CS line is automatically driven low
during the SPI communication and brought back high
when done.
DisableDirConfig, If True, function does not set the direction
of the line.
SPIMode, 'A', 'B', 'C', or 'D'.
SPIClockFactor, Sets the frequency of the SPI clock.
CSPINNum, which pin is CS
CLKPinNum, which pin is CLK
MISOPinNum, which pin is MISO
MOSIPinNum, which pin is MOSI
Desc: Sends and receives serial data using SPI synchronous
communication. See Section 5.2.17 of the user's guide.
"""
if not isinstance(SPIBytes, list):
raise LabJackException("SPIBytes MUST be a list of bytes")
numSPIBytes = len(SPIBytes)
oddPacket = False
if numSPIBytes%2 != 0:
SPIBytes.append(0)
numSPIBytes = numSPIBytes + 1
oddPacket = True
command = [ 0 ] * (13 + numSPIBytes)
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 4 + (numSPIBytes/2)
command[3] = 0x3A
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
if AutoCS:
command[6] |= (1 << 7)
if DisableDirConfig:
command[6] |= (1 << 6)
command[6] |= ( self.SPIModes[SPIMode] & 3 )
command[7] = SPIClockFactor
#command[8] = Reserved
command[9] = CSPINNum
command[10] = CLKPinNum
command[11] = MISOPinNum
command[12] = MOSIPinNum
command[13] = numSPIBytes
if oddPacket:
command[13] = numSPIBytes - 1
command[14:] = SPIBytes
result = self._writeRead(command, 8+numSPIBytes, [ 0xF8, 1+(numSPIBytes/2), 0x3A ])
return { 'NumSPIBytesTransferred' : result[7], 'SPIBytes' : result[8:] }
def asynchConfig(self, Update = True, UARTEnable = True, DesiredBaud = None, BaudFactor = 63036):
"""
Name: U6.asynchConfig(Update = True, UARTEnable = True,
DesiredBaud = None, BaudFactor = 63036)
Args: Update, If True, new values are written.
UARTEnable, If True, UART will be enabled.
DesiredBaud, If set, will apply the formualt to
calculate BaudFactor.
BaudFactor, = 2^16 - 48000000/(2 * Desired Baud). Ignored
if DesiredBaud is set.
Desc: Configures the U6 UART for asynchronous communication. See
section 5.2.18 of the User's Guide.
"""
if UARTEnable:
self.configIO(EnableUART = True)
command = [ 0 ] * 10
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x02
command[3] = 0x14
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
#commmand[6] = 0x00
if Update:
command[7] = (1 << 7)
if UARTEnable:
command[7] |= (1 << 6)
if DesiredBaud != None:
BaudFactor = (2**16) - 48000000/(2 * DesiredBaud)
t = struct.pack("<H", BaudFactor)
command[8] = ord(t[0])
command[9] = ord(t[1])
results = self._writeRead(command, 10, [0xF8, 0x02, 0x14])
if command[8] != results[8] and command[9] != results[9]:
raise LabJackException("BaudFactor didn't stick.")
def asynchTX(self, AsynchBytes):
"""
Name: U6.asynchTX(AsynchBytes)
Args: AsynchBytes, List of bytes to send
Desc: Sends bytes to the U6 UART which will be sent asynchronously
on the transmit line. Section 5.2.19 of the User's Guide.
"""
numBytes = len(AsynchBytes)
oddPacket = False
if numBytes%2 != 0:
oddPacket = True
AsynchBytes.append(0)
numBytes = numBytes + 1
command = [ 0 ] * (8+numBytes)
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 1 + (numBytes/2)
command[3] = 0x15
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
#commmand[6] = 0x00
command[7] = numBytes
if oddPacket:
command[7] = numBytes-1
command[8:] = AsynchBytes
result = self._writeRead(command, 10, [ 0xF8, 0x02, 0x15])
return { 'NumAsynchBytesSent' : result[7], 'NumAsynchBytesInRXBuffer' : result[8] }
def asynchRX(self, Flush = False):
"""
Name: U6.asynchTX(AsynchBytes)
Args: Flush, If True, empties the entire 256-byte RX buffer.
Desc: Sends bytes to the U6 UART which will be sent asynchronously
on the transmit line. Section 5.2.20 of the User's Guide.
"""
command = [ 0, 0xF8, 0x01, 0x16, 0, 0, 0, int(Flush)]
result = self._writeRead(command, 40, [ 0xF8, 0x11, 0x16 ])
return { 'NumAsynchBytesInRXBuffer' : result[7], 'AsynchBytes' : result[8:] }
def i2c(self, Address, I2CBytes, EnableClockStretching = False, NoStopWhenRestarting = False, ResetAtStart = False, SpeedAdjust = 0, SDAPinNum = 0, SCLPinNum = 1, NumI2CBytesToReceive = 0, AddressByte = None):
"""
Name: U6.i2c(Address, I2CBytes, EnableClockStretching = False, NoStopWhenRestarting = False, ResetAtStart = False, SpeedAdjust = 0, SDAPinNum = 0, SCLPinNum = 1, NumI2CBytesToReceive = 0, AddressByte = None)
Args: Address, the address (Not shifted over)
I2CBytes, a list of bytes to send
EnableClockStretching, True enables clock stretching
NoStopWhenRestarting, True means no stop sent when restarting
ResetAtStart, if True, an I2C bus reset will be done
before communicating.
SpeedAdjust, Allows the communication frequency to be reduced.
SDAPinNum, Which pin will be data
SCLPinNum, Which pin is clock
NumI2CBytesToReceive, Number of I2C bytes to expect back.
AddressByte, The address as you would put it in the lowlevel
packet. Overrides Address. Optional.
Desc: Sends and receives serial data using I2C synchronous
communication. Section 5.2.21 of the User's Guide.
"""
numBytes = len(I2CBytes)
oddPacket = False
if numBytes%2 != 0:
oddPacket = True
I2CBytes.append(0)
numBytes = numBytes+1
command = [ 0 ] * (14+numBytes)
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 4 + (numBytes/2)
command[3] = 0x3B
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
if EnableClockStretching:
command[6] |= (1 << 3)
if NoStopWhenRestarting:
command[6] |= (1 << 2)
if ResetAtStart:
command[6] |= (1 << 1)
command[7] = SpeedAdjust
command[8] = SDAPinNum
command[9] = SCLPinNum
if AddressByte != None:
command[10] = AddressByte
else:
command[10] = Address << 1
#command[11] = Reserved
command[12] = numBytes
if oddPacket:
command[12] = numBytes-1
command[13] = NumI2CBytesToReceive
command[14:] = I2CBytes
oddResponse = False
if NumI2CBytesToReceive%2 != 0:
NumI2CBytesToReceive = NumI2CBytesToReceive+1
oddResponse = True
result = self._writeRead(command, (12+NumI2CBytesToReceive), [0xF8, (3+(NumI2CBytesToReceive/2)), 0x3B])
if NumI2CBytesToReceive != 0:
return { 'AckArray' : result[8:12], 'I2CBytes' : result[12:] }
else:
return { 'AckArray' : result[8:12] }
def sht1x(self, DataPinNum = 0, ClockPinNum = 1, SHTOptions = 0xc0):
"""
Name: U6.sht1x(DataPinNum = 0, ClockPinNum = 1, SHTOptions = 0xc0)
Args: DataPinNum, Which pin is the Data line
ClockPinNum, Which line is the Clock line
SHTOptions (and proof people read documentation):
bit 7 = Read Temperature
bit 6 = Read Realtive Humidity
bit 2 = Heater. 1 = on, 0 = off
bit 1 = Reserved at 0
bit 0 = Resolution. 1 = 8 bit RH, 12 bit T; 0 = 12 RH, 14 bit T
Desc: Reads temperature and humidity from a Sensirion SHT1X sensor.
Section 5.2.22 of the User's Guide.
"""
command = [ 0 ] * 10
#command[0] = Checksum8
command[1] = 0xF8
command[2] = 0x02
command[3] = 0x39
#command[4] = Checksum16 (LSB)
#command[5] = Checksum16 (MSB)
command[6] = DataPinNum
command[7] = ClockPinNum
#command[8] = Reserved
command[9] = SHTOptions
result = self._writeRead(command, 16, [ 0xF8, 0x05, 0x39])
val = (result[11]*256) + result[10]
temp = -39.60 + 0.01*val
val = (result[14]*256) + result[13]
humid = -4 + 0.0405*val + -.0000028*(val*val)
humid = (temp - 25)*(0.01 + 0.00008*val) + humid
return { 'StatusReg' : result[8], 'StatusCRC' : result[9], 'Temperature' : temp, 'TemperatureCRC' : result[12], 'Humidity' : humid, 'HumidityCRC' : result[15] }
# --------------------------- Old U6 code -------------------------------
def _readCalDataBlock(self, n):
"""
Internal routine to read the specified calibration block (0-2)
"""
sendBuffer = [0] * 8
sendBuffer[1] = 0xF8 # command byte
sendBuffer[2] = 0x01 # number of data words
sendBuffer[3] = 0x2D # extended command number
sendBuffer[6] = 0x00
sendBuffer[7] = n # Blocknum = 0
self.write(sendBuffer)
buff = self.read(40)
return buff[8:]
def getCalibrationData(self):
"""
Name: getCalibrationData(self)
Args: None
Desc: Gets the slopes and offsets for AIN and DACs,
as well as other calibration data
>>> myU6 = U6()
>>> myU6.getCalibrationData()
>>> myU6.calInfo
<ainDiffOffset: -2.46886488446,...>
"""
if self.debug is True:
print "Calibration data retrieval"
self.calInfo.nominal = False
#reading block 0 from memory
rcvBuffer = self._readCalDataBlock(0)
# Positive Channel calibration
self.calInfo.ain10vSlope = toDouble(rcvBuffer[:8])
self.calInfo.ain10vOffset = toDouble(rcvBuffer[8:16])
self.calInfo.ain1vSlope = toDouble(rcvBuffer[16:24])
self.calInfo.ain1vOffset = toDouble(rcvBuffer[24:])
#reading block 1 from memory
rcvBuffer = self._readCalDataBlock(1)
self.calInfo.ain100mvSlope = toDouble(rcvBuffer[:8])
self.calInfo.ain100mvOffset = toDouble(rcvBuffer[8:16])
self.calInfo.ain10mvSlope = toDouble(rcvBuffer[16:24])
self.calInfo.ain10mvOffset = toDouble(rcvBuffer[24:])
self.calInfo.ainSlope = [self.calInfo.ain10vSlope, self.calInfo.ain1vSlope, self.calInfo.ain100mvSlope, self.calInfo.ain10mvSlope]
self.calInfo.ainOffset = [ self.calInfo.ain10vOffset, self.calInfo.ain1vOffset, self.calInfo.ain100mvOffset, self.calInfo.ain10mvOffset ]
#reading block 2 from memory
rcvBuffer = self._readCalDataBlock(2)
# Negative Channel calibration
self.calInfo.ain10vNegSlope = toDouble(rcvBuffer[:8])
self.calInfo.ain10vCenter = toDouble(rcvBuffer[8:16])
self.calInfo.ain1vNegSlope = toDouble(rcvBuffer[16:24])
self.calInfo.ain1vCenter = toDouble(rcvBuffer[24:])
#reading block 3 from memory
rcvBuffer = self._readCalDataBlock(3)
self.calInfo.ain100mvNegSlope = toDouble(rcvBuffer[:8])
self.calInfo.ain100mvCenter = toDouble(rcvBuffer[8:16])
self.calInfo.ain10mvNegSlope = toDouble(rcvBuffer[16:24])
self.calInfo.ain10mvCenter = toDouble(rcvBuffer[24:])
self.calInfo.ainNegSlope = [ self.calInfo.ain10vNegSlope, self.calInfo.ain1vNegSlope, self.calInfo.ain100mvNegSlope, self.calInfo.ain10mvNegSlope ]
self.calInfo.ainCenter = [ self.calInfo.ain10vCenter, self.calInfo.ain1vCenter, self.calInfo.ain100mvCenter, self.calInfo.ain10mvCenter ]
#reading block 4 from memory
rcvBuffer = self._readCalDataBlock(4)
# Miscellaneous
self.calInfo.dac0Slope = toDouble(rcvBuffer[:8])
self.calInfo.dac0Offset = toDouble(rcvBuffer[8:16])
self.calInfo.dac1Slope = toDouble(rcvBuffer[16:24])
self.calInfo.dac1Offset = toDouble(rcvBuffer[24:])
#reading block 5 from memory
rcvBuffer = self._readCalDataBlock(5)
self.calInfo.currentOutput0 = toDouble(rcvBuffer[:8])
self.calInfo.currentOutput1 = toDouble(rcvBuffer[8:16])
self.calInfo.temperatureSlope = toDouble(rcvBuffer[16:24])
self.calInfo.temperatureOffset = toDouble(rcvBuffer[24:])
if self.productName == "U6-Pro":
# Hi-Res ADC stuff
#reading block 6 from memory
rcvBuffer = self._readCalDataBlock(6)
# Positive Channel calibration
self.calInfo.proAin10vSlope = toDouble(rcvBuffer[:8])
self.calInfo.proAin10vOffset = toDouble(rcvBuffer[8:16])
self.calInfo.proAin1vSlope = toDouble(rcvBuffer[16:24])
self.calInfo.proAin1vOffset = toDouble(rcvBuffer[24:])
#reading block 7 from memory
rcvBuffer = self._readCalDataBlock(7)
self.calInfo.proAin100mvSlope = toDouble(rcvBuffer[:8])
self.calInfo.proAin100mvOffset = toDouble(rcvBuffer[8:16])
self.calInfo.proAin10mvSlope = toDouble(rcvBuffer[16:24])
self.calInfo.proAin10mvOffset = toDouble(rcvBuffer[24:])
self.calInfo.proAinSlope = [self.calInfo.proAin10vSlope, self.calInfo.proAin1vSlope, self.calInfo.proAin100mvSlope, self.calInfo.proAin10mvSlope]
self.calInfo.proAinOffset = [ self.calInfo.proAin10vOffset, self.calInfo.proAin1vOffset, self.calInfo.proAin100mvOffset, self.calInfo.proAin10mvOffset ]
#reading block 8 from memory
rcvBuffer = self._readCalDataBlock(8)
# Negative Channel calibration
self.calInfo.proAin10vNegSlope = toDouble(rcvBuffer[:8])
self.calInfo.proAin10vCenter = toDouble(rcvBuffer[8:16])
self.calInfo.proAin1vNegSlope = toDouble(rcvBuffer[16:24])
self.calInfo.proAin1vCenter = toDouble(rcvBuffer[24:])
#reading block 9 from memory
rcvBuffer = self._readCalDataBlock(9)
self.calInfo.proAin100mvNegSlope = toDouble(rcvBuffer[:8])
self.calInfo.proAin100mvCenter = toDouble(rcvBuffer[8:16])
self.calInfo.proAin10mvNegSlope = toDouble(rcvBuffer[16:24])
self.calInfo.proAin10mvCenter = toDouble(rcvBuffer[24:])
self.calInfo.proAinNegSlope = [ self.calInfo.proAin10vNegSlope, self.calInfo.proAin1vNegSlope, self.calInfo.proAin100mvNegSlope, self.calInfo.proAin10mvNegSlope ]
self.calInfo.proAinCenter = [ self.calInfo.proAin10vCenter, self.calInfo.proAin1vCenter, self.calInfo.proAin100mvCenter, self.calInfo.proAin10mvCenter ]
def binaryToCalibratedAnalogVoltage(self, gainIndex, bytesVoltage, is16Bits=False):
"""
Name: binaryToCalibratedAnalogVoltage(gainIndex, bytesVoltage, is16Bits = False)
Args: gainIndex, which gain did you use?
bytesVoltage, bytes returned from the U6
is16bits, set to True if bytesVolotage is 16 bits (not 24)
Desc: Converts binary voltage to an analog value.
"""
if not is16Bits:
bits = float(bytesVoltage)/256
else:
bits = float(bytesVoltage)
center = self.calInfo.ainCenter[gainIndex]
negSlope = self.calInfo.ainNegSlope[gainIndex]
posSlope = self.calInfo.ainSlope[gainIndex]
if self.productName == "U6-Pro":
center = self.calInfo.proAinCenter[gainIndex]
negSlope = self.calInfo.proAinNegSlope[gainIndex]
posSlope = self.calInfo.proAinSlope[gainIndex]
if bits < center:
return (center - bits) * negSlope
else:
return (bits - center) * posSlope
def binaryToCalibratedAnalogTemperature(self, bytesTemperature):
voltage = self.binaryToCalibratedAnalogVoltage(0, bytesTemperature)
return self.calInfo.temperatureSlope * float(voltage) + self.calInfo.temperatureOffset
def softReset(self):
"""
Name: softReset
Args: none
Desc: Send a soft reset.
>>> myU6 = U6()
>>> myU6.softReset()
"""
command = [ 0x00, 0x99, 0x01, 0x00 ]
command = setChecksum8(command, 4)
self.write(command, False, False)
results = self.read(4)
if results[3] != 0:
raise LowlevelErrorException(results[3], "The softReset command returned an error:\n %s" % lowlevelErrorToString(results[3]))
def hardReset(self):
"""
Name: hardReset
Args: none
Desc: Send a hard reset.
>>> myU6 = U6()
>>> myU6.hardReset()
"""
command = [ 0x00, 0x99, 0x02, 0x00 ]
command = setChecksum8(command, 4)
self.write(command, False, False)
results = self.read(4)
if results[3] != 0:
raise LowlevelErrorException(results[3], "The softHard command returned an error:\n %s" % lowlevelErrorToString(results[3]))
self.close()
def setLED(self, state):
"""
Name: setLED(self, state)
Args: state: 1 = On, 0 = Off
Desc: Sets the state of the LED. (5.2.5.4 of user's guide)
>>> myU6 = U6()
>>> myU6.setLED(0)
... (LED turns off) ...
"""
self.getFeedback(LED(state))
def getTemperature(self):
"""
Name: getTemperature
Args: none
Desc: Reads the U6's internal temperature sensor in Kelvin.
See Section 2.6.4 of the U6 User's Guide.
>>> myU6.getTemperature()
299.87723471224308
"""
if self.calInfo.nominal:
# Read the actual calibration constants if we haven't already.
self.getCalibrationData()
result = self.getFeedback(AIN24AR(14))
return self.binaryToCalibratedAnalogTemperature(result[0]['AIN'])
def getAIN(self, positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False):
"""
Name: getAIN
Args: positiveChannel, resolutionIndex = 0, gainIndex = 0, settlingFactor = 0, differential = False
Desc: Reads an AIN and applies the calibration constants to it.
>>> myU6.getAIN(14)
299.87723471224308
"""
result = self.getFeedback(AIN24AR(positiveChannel, resolutionIndex, gainIndex, settlingFactor, differential))
return self.binaryToCalibratedAnalogVoltage(result[0]['GainIndex'], result[0]['AIN'])
def readDefaultsConfig(self):
"""
Name: U6.readDefaultsConfig( )
Args: None
Desc: Reads the power-up defaults stored in flash.
"""
results = dict()
defaults = self.readDefaults(0)
results['FIODirection'] = defaults[4]
results['FIOState'] = defaults[5]
results['EIODirection'] = defaults[8]
results['EIOState'] = defaults[9]
results['CIODirection'] = defaults[12]
results['CIOState'] = defaults[13]
results['ConfigWriteMask'] = defaults[16]
results['NumOfTimersEnable'] = defaults[17]
results['CounterMask'] = defaults[18]
results['PinOffset'] = defaults[19]
defaults = self.readDefaults(1)
results['ClockSource'] = defaults[0]
results['Divisor'] = defaults[1]
results['TMR0Mode'] = defaults[16]
results['TMR0ValueL'] = defaults[17]
results['TMR0ValueH'] = defaults[18]
results['TMR1Mode'] = defaults[20]
results['TMR1ValueL'] = defaults[21]
results['TMR1ValueH'] = defaults[22]
results['TMR2Mode'] = defaults[24]
results['TMR2ValueL'] = defaults[25]
results['TMR2ValueH'] = defaults[26]
results['TMR3Mode'] = defaults[28]
results['TMR3ValueL'] = defaults[29]
results['TMR3ValueH'] = defaults[30]
defaults = self.readDefaults(2)
results['DAC0'] = struct.unpack( ">H", struct.pack("BB", *defaults[16:18]) )[0]
results['DAC1'] = struct.unpack( ">H", struct.pack("BB", *defaults[20:22]) )[0]
defaults = self.readDefaults(3)
for i in range(14):
results["AIN%sGainRes" % i] = defaults[i]
results["AIN%sOptions" % i] = defaults[i+16]
return results
def exportConfig(self):
"""
Name: U6.exportConfig( )
Args: None
Desc: Takes a configuration and puts it into a ConfigParser object.
"""
# Make a new configuration file
parser = ConfigParser.SafeConfigParser()
# Change optionxform so that options preserve their case.
parser.optionxform = str
# Local Id and name
section = "Identifiers"
parser.add_section(section)
parser.set(section, "Local ID", str(self.localId))
parser.set(section, "Name", str(self.getName()))
parser.set(section, "Device Type", str(self.devType))
# FIO Direction / State
section = "FIOs"
parser.add_section(section)
dirs, states = self.getFeedback( PortDirRead(), PortStateRead() )
for key, value in dirs.items():
parser.set(section, "%s Directions" % key, str(value))
for key, value in states.items():
parser.set(section, "%s States" % key, str(value))
# DACs
section = "DACs"
parser.add_section(section)
dac0 = self.readRegister(5000)
dac0 = max(dac0, 0)
dac0 = min(dac0, 5)
parser.set(section, "DAC0", "%0.2f" % dac0)
dac1 = self.readRegister(5002)
dac1 = max(dac1, 0)
dac1 = min(dac1, 5)
parser.set(section, "DAC1", "%0.2f" % dac1)
# Timer Clock Configuration
section = "Timer Clock Speed Configuration"
parser.add_section(section)
timerclockconfig = self.configTimerClock()
for key, value in timerclockconfig.items():
parser.set(section, key, str(value))
# Timers / Counters
section = "Timers And Counters"
parser.add_section(section)
ioconfig = self.configIO()
for key, value in ioconfig.items():
parser.set(section, key, str(value))
for i in range(ioconfig['NumberTimersEnabled']):
mode, value = self.readRegister(7100 + (2 * i), numReg = 2, format = ">HH")
parser.set(section, "Timer%s Mode" % i, str(mode))
parser.set(section, "Timer%s Value" % i, str(value))
return parser
def loadConfig(self, configParserObj):
"""
Name: U6.loadConfig( configParserObj )
Args: configParserObj, A Config Parser object to load in
Desc: Takes a configuration and updates the U6 to match it.
"""
parser = configParserObj
# Set Identifiers:
section = "Identifiers"
if parser.has_section(section):
if parser.has_option(section, "device type"):
if parser.getint(section, "device type") != self.devType:
raise Exception("Not a U6 Config file.")
if parser.has_option(section, "local id"):
self.configU6( LocalID = parser.getint(section, "local id"))
if parser.has_option(section, "name"):
self.setName( parser.get(section, "name") )
# Set FIOs:
section = "FIOs"
if parser.has_section(section):
fiodirs = 0
eiodirs = 0
ciodirs = 0
fiostates = 0
eiostates = 0
ciostates = 0
if parser.has_option(section, "fios directions"):
fiodirs = parser.getint(section, "fios directions")
if parser.has_option(section, "eios directions"):
eiodirs = parser.getint(section, "eios directions")
if parser.has_option(section, "cios directions"):
ciodirs = parser.getint(section, "cios directions")
if parser.has_option(section, "fios states"):
fiostates = parser.getint(section, "fios states")
if parser.has_option(section, "eios states"):
eiostates = parser.getint(section, "eios states")
if parser.has_option(section, "cios states"):
ciostates = parser.getint(section, "cios states")
self.getFeedback( PortStateWrite([fiostates, eiostates, ciostates]), PortDirWrite([fiodirs, eiodirs, ciodirs]) )
# Set DACs:
section = "DACs"
if parser.has_section(section):
if parser.has_option(section, "dac0"):
self.writeRegister(5000, parser.getfloat(section, "dac0"))
if parser.has_option(section, "dac1"):
self.writeRegister(5002, parser.getfloat(section, "dac1"))
# Set Timer Clock Configuration
section = "Timer Clock Speed Configuration"
if parser.has_section(section):
if parser.has_option(section, "timerclockbase") and parser.has_option(section, "timerclockdivisor"):
self.configTimerClock(TimerClockBase = parser.getint(section, "timerclockbase"), TimerClockDivisor = parser.getint(section, "timerclockdivisor"))
# Set Timers / Counters
section = "Timers And Counters"
if parser.has_section(section):
nte = None
c0e = None
c1e = None
cpo = None
if parser.has_option(section, "NumberTimersEnabled"):
nte = parser.getint(section, "NumberTimersEnabled")
if parser.has_option(section, "TimerCounterPinOffset"):
cpo = parser.getint(section, "TimerCounterPinOffset")
if parser.has_option(section, "Counter0Enabled"):
c0e = parser.getboolean(section, "Counter0Enabled")
if parser.has_option(section, "Counter1Enabled"):
c1e = parser.getboolean(section, "Counter1Enabled")
self.configIO(NumberTimersEnabled = nte, EnableCounter1 = c1e, EnableCounter0 = c0e, TimerCounterPinOffset = cpo)
mode = None
value = None
for i in range(4):
if parser.has_option(section, "timer%i mode" % i):
mode = parser.getint(section, "timer%i mode" % i)
if parser.has_option(section, "timer%i value" % i):
value = parser.getint(section, "timer%i value" % i)
self.getFeedback( TimerConfig(i, mode, value) )
class FeedbackCommand(object):
'''
The base FeedbackCommand class
Used to make Feedback easy. Make a list of these
and call getFeedback.
'''
readLen = 0
def handle(self, input):
return None
validChannels = range(144)
class AIN(FeedbackCommand):
'''
Analog Input Feedback command
AIN(PositiveChannel)
PositiveChannel : the positive channel to use
NOTE: This function kept for compatibility. Please use
the new AIN24 and AIN24AR.
returns 16-bit unsigned int sample
>>> d.getFeedback( u6.AIN( PositiveChannel ) )
[ 19238 ]
'''
def __init__(self, PositiveChannel):
if PositiveChannel not in validChannels:
raise LabJackException("Invalid Positive Channel specified")
self.positiveChannel = PositiveChannel
self.cmdBytes = [ 0x01, PositiveChannel, 0 ]
readLen = 2
def __repr__(self):
return "<u6.AIN( PositiveChannel = %s )>" % self.positiveChannel
def handle(self, input):
result = (input[1] << 8) + input[0]
return result
class AIN24(FeedbackCommand):
'''
Analog Input 24-bit Feedback command
ainCommand = AIN24(PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False)
See section 5.2.5.2 of the user's guide.
NOTE: If you use a gain index of 15 (autorange), you should be using
the AIN24AR command instead.
positiveChannel : The positive channel to use
resolutionIndex : 0=default, 1-8 for high-speed ADC,
9-12 for high-res ADC on U6-Pro.
gainIndex : 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange
settlingFactor : 0=5us, 1=10us, 2=100us, 3=1ms, 4=10ms
differential : If this bit is set, a differential reading is done where
the negative channel is positiveChannel+1
returns 24-bit unsigned int sample
>>> d.getFeedback( u6.AIN24(PositiveChannel, ResolutionIndex = 0,
GainIndex = 0, SettlingFactor = 0,
Differential = False ) )
[ 193847 ]
'''
def __init__(self, PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False):
if PositiveChannel not in validChannels:
raise LabJackException("Invalid Positive Channel specified")
self.positiveChannel = PositiveChannel
self.resolutionIndex = ResolutionIndex
self.gainIndex = GainIndex
self.settlingFactor = SettlingFactor
self.differential = Differential
byte2 = ( ResolutionIndex & 0xf )
byte2 = ( ( GainIndex & 0xf ) << 4 ) + byte2
byte3 = (int(Differential) << 7) + SettlingFactor
self.cmdBytes = [ 0x02, PositiveChannel, byte2, byte3 ]
def __repr__(self):
return "<u6.AIN24( PositiveChannel = %s, ResolutionIndex = %s, GainIndex = %s, SettlingFactor = %s, Differential = %s )>" % (self.positiveChannel, self.resolutionIndex, self.gainIndex, self.settlingFactor, self.differential)
readLen = 3
def handle(self, input):
#Put it all into an integer.
result = (input[2] << 16 ) + (input[1] << 8 ) + input[0]
return result
class AIN24AR(FeedbackCommand):
'''
Autorange Analog Input 24-bit Feedback command
ainARCommand = AIN24AR(0, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False)
See section 5.2.5.3 of the user's guide
PositiveChannel : The positive channel to use
ResolutionIndex : 0=default, 1-8 for high-speed ADC,
9-13 for high-res ADC on U6-Pro.
GainIndex : 0=x1, 1=x10, 2=x100, 3=x1000, 15=autorange
SettlingFactor : 0=5us, 1=10us, 2=100us, 3=1ms, 4=10ms
Differential : If this bit is set, a differential reading is done where
the negative channel is positiveChannel+1
returns a dictionary:
{
'AIN' : < 24-bit binary reading >,
'ResolutionIndex' : < actual resolution setting used for the reading >,
'GainIndex' : < actual gain used for the reading >,
'Status' : < reserved for future use >
}
>>> d.getFeedback( u6.AIN24AR( PositiveChannel, ResolutionIndex = 0,
GainIndex = 0, SettlingFactor = 0,
Differential = False ) )
{ 'AIN' : 193847, 'ResolutionIndex' : 0, 'GainIndex' : 0, 'Status' : 0 }
'''
def __init__(self, PositiveChannel, ResolutionIndex = 0, GainIndex = 0, SettlingFactor = 0, Differential = False):
if PositiveChannel not in validChannels:
raise LabJackException("Invalid Positive Channel specified")
self.positiveChannel = PositiveChannel
self.resolutionIndex = ResolutionIndex
self.gainIndex = GainIndex
self.settlingFactor = SettlingFactor
self.differential = Differential
byte2 = ( ResolutionIndex & 0xf )
byte2 = ( ( GainIndex & 0xf ) << 4 ) + byte2
byte3 = (int(Differential) << 7) + SettlingFactor
self.cmdBytes = [ 0x03, PositiveChannel, byte2, byte3 ]
def __repr__(self):
return "<u6.AIN24AR( PositiveChannel = %s, ResolutionIndex = %s, GainIndex = %s, SettlingFactor = %s, Differential = %s )>" % (self.positiveChannel, self.resolutionIndex, self.gainIndex, self.settlingFactor, self.differential)
readLen = 5
def handle(self, input):
#Put it all into an integer.
result = (input[2] << 16 ) + (input[1] << 8 ) + input[0]
resolutionIndex = input[3] & 0xf
gainIndex = ( input[3] >> 4 ) & 0xf
status = input[4]
return { 'AIN' : result, 'ResolutionIndex' : resolutionIndex, 'GainIndex' : gainIndex, 'Status' : status }
class WaitShort(FeedbackCommand):
'''
WaitShort Feedback command
specify the number of 128us time increments to wait
>>> d.getFeedback( u6.WaitShort( Time ) )
[ None ]
'''
def __init__(self, Time):
self.time = Time % 256
self.cmdBytes = [ 5, Time % 256 ]
def __repr__(self):
return "<u6.WaitShort( Time = %s )>" % self.time
class WaitLong(FeedbackCommand):
'''
WaitLong Feedback command
specify the number of 32ms time increments to wait
>>> d.getFeedback( u6.WaitLog( Time ) )
[ None ]
'''
def __init__(self, Time):
self.time = Time
self.cmdBytes = [ 6, Time % 256 ]
def __repr__(self):
return "<u6.WaitLog( Time = %s )>" % self.time
class LED(FeedbackCommand):
'''
LED Toggle
specify whether the LED should be on or off by truth value
1 or True = On, 0 or False = Off
>>> d.getFeedback( u6.LED( State ) )
[ None ]
'''
def __init__(self, State):
self.state = State
self.cmdBytes = [ 9, int(bool(State)) ]
def __repr__(self):
return "<u6.LED( State = %s )>" % self.state
class BitStateRead(FeedbackCommand):
'''
BitStateRead Feedback command
read the state of a single bit of digital I/O. Only digital
lines return valid readings.
IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO
return 0 or 1
>>> d.getFeedback( u6.BitStateRead( IONumber ) )
[ 1 ]
'''
def __init__(self, IONumber):
self.ioNumber = IONumber
self.cmdBytes = [ 10, IONumber % 20 ]
def __repr__(self):
return "<u6.BitStateRead( IONumber = %s )>" % self.ioNumber
readLen = 1
def handle(self, input):
return int(bool(input[0]))
class BitStateWrite(FeedbackCommand):
'''
BitStateWrite Feedback command
write a single bit of digital I/O. The direction of the
specified line is forced to output.
IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO
State: 0 or 1
>>> d.getFeedback( u6.BitStateWrite( IONumber, State ) )
[ None ]
'''
def __init__(self, IONumber, State):
self.ioNumber = IONumber
self.state = State
self.cmdBytes = [ 11, (IONumber % 20) + (int(bool(State)) << 7) ]
def __repr__(self):
return "<u6.BitStateWrite( IONumber = %s, State = %s )>" % self.ioNumber
class BitDirRead(FeedbackCommand):
'''
Read the digital direction of one I/O
IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO
returns 1 = Output, 0 = Input
>>> d.getFeedback( u6.BitDirRead( IONumber ) )
[ 1 ]
'''
def __init__(self, IONumber):
self.ioNumber = IONumber
self.cmdBytes = [ 12, IONumber % 20 ]
def __repr__(self):
return "<u6.BitDirRead( IONumber = %s )>" % self.ioNumber
readLen = 1
def handle(self, input):
return int(bool(input[0]))
class BitDirWrite(FeedbackCommand):
'''
BitDirWrite Feedback command
Set the digital direction of one I/O
IONumber: 0-7=FIO, 8-15=EIO, 16-19=CIO
Direction: 1 = Output, 0 = Input
>>> d.getFeedback( u6.BitDirWrite( IONumber, Direction ) )
[ None ]
'''
def __init__(self, IONumber, Direction):
self.ioNumber = IONumber
self.direction = Direction
self.cmdBytes = [ 13, (IONumber % 20) + (int(bool(Direction)) << 7) ]
def __repr__(self):
return "<u6.BitDirWrite( IONumber = %s, Direction = %s )>" % (self.ioNumber, self.direction)
class PortStateRead(FeedbackCommand):
"""
PortStateRead Feedback command
Reads the state of all digital I/O.
>>> d.getFeedback( u6.PortStateRead() )
[ { 'FIO' : 10, 'EIO' : 0, 'CIO' : 0 } ]
"""
def __init__(self):
self.cmdBytes = [ 26 ]
def __repr__(self):
return "<u6.PortStateRead()>"
readLen = 3
def handle(self, input):
return {'FIO' : input[0], 'EIO' : input[1], 'CIO' : input[2] }
class PortStateWrite(FeedbackCommand):
"""
PortStateWrite Feedback command
State: A list of 3 bytes representing FIO, EIO, CIO
WriteMask: A list of 3 bytes, representing which to update.
The Default is all ones.
>>> d.getFeedback( u6.PortStateWrite( State,
WriteMask = [ 0xff, 0xff, 0xff] ) )
[ None ]
"""
def __init__(self, State, WriteMask = [ 0xff, 0xff, 0xff]):
self.state = State
self.writeMask = WriteMask
self.cmdBytes = [ 27 ] + WriteMask + State
def __repr__(self):
return "<u6.PortStateWrite( State = %s, WriteMask = %s )>" % (self.state, self.writeMask)
class PortDirRead(FeedbackCommand):
"""
PortDirRead Feedback command
Reads the direction of all digital I/O.
>>> d.getFeedback( u6.PortDirRead() )
[ { 'FIO' : 10, 'EIO' : 0, 'CIO' : 0 } ]
"""
def __init__(self):
self.cmdBytes = [ 28 ]
def __repr__(self):
return "<u6.PortDirRead()>"
readLen = 3
def handle(self, input):
return {'FIO' : input[0], 'EIO' : input[1], 'CIO' : input[2] }
class PortDirWrite(FeedbackCommand):
"""
PortDirWrite Feedback command
Direction: A list of 3 bytes representing FIO, EIO, CIO
WriteMask: A list of 3 bytes, representing which to update. Default is all ones.
>>> d.getFeedback( u6.PortDirWrite( Direction,
WriteMask = [ 0xff, 0xff, 0xff] ) )
[ None ]
"""
def __init__(self, Direction, WriteMask = [ 0xff, 0xff, 0xff]):
self.direction = Direction
self.writeMask = WriteMask
self.cmdBytes = [ 29 ] + WriteMask + Direction
def __repr__(self):
return "<u6.PortDirWrite( Direction = %s, WriteMask = %s )>" % (self.direction, self.writeMask)
class DAC8(FeedbackCommand):
'''
8-bit DAC Feedback command
Controls a single analog output
Dac: 0 or 1
Value: 0-255
>>> d.getFeedback( u6.DAC8( Dac, Value ) )
[ None ]
'''
def __init__(self, Dac, Value):
self.dac = Dac
self.value = Value % 256
self.cmdBytes = [ 34 + (Dac % 2), Value % 256 ]
def __repr__(self):
return "<u6.DAC8( Dac = %s, Value = %s )>" % (self.dac, self.value)
class DAC0_8(DAC8):
"""
8-bit DAC Feedback command for DAC0
Controls DAC0 in 8-bit mode.
Value: 0-255
>>> d.getFeedback( u6.DAC0_8( Value ) )
[ None ]
"""
def __init__(self, Value):
DAC8.__init__(self, 0, Value)
def __repr__(self):
return "<u6.DAC0_8( Value = %s )>" % self.value
class DAC1_8(DAC8):
"""
8-bit DAC Feedback command for DAC1
Controls DAC1 in 8-bit mode.
Value: 0-255
>>> d.getFeedback( u6.DAC1_8( Value ) )
[ None ]
"""
def __init__(self, Value):
DAC8.__init__(self, 1, Value)
def __repr__(self):
return "<u6.DAC1_8( Value = %s )>" % self.value
class DAC16(FeedbackCommand):
'''
16-bit DAC Feedback command
Controls a single analog output
Dac: 0 or 1
Value: 0-65535
>>> d.getFeedback( u6.DAC16( Dac, Value ) )
[ None ]
'''
def __init__(self, Dac, Value):
self.dac = Dac
self.value = Value
self.cmdBytes = [ 38 + (Dac % 2), Value % 256, Value >> 8 ]
def __repr__(self):
return "<u6.DAC8( Dac = %s, Value = %s )>" % (self.dac, self.value)
class DAC0_16(DAC16):
"""
16-bit DAC Feedback command for DAC0
Controls DAC0 in 16-bit mode.
Value: 0-65535
>>> d.getFeedback( u6.DAC0_16( Value ) )
[ None ]
"""
def __init__(self, Value):
DAC16.__init__(self, 0, Value)
def __repr__(self):
return "<u6.DAC0_16( Value = %s )>" % self.value
class DAC1_16(DAC16):
"""
16-bit DAC Feedback command for DAC1
Controls DAC1 in 16-bit mode.
Value: 0-65535
>>> d.getFeedback( u6.DAC1_16( Value ) )
[ None ]
"""
def __init__(self, Value):
DAC16.__init__(self, 1, Value)
def __repr__(self):
return "<u6.DAC1_16( Value = %s )>" % self.value
class Timer(FeedbackCommand):
"""
For reading the value of the Timer. It provides the ability to update/reset
a given timer, and read the timer value.
( Section 5.2.5.17 of the User's Guide)
timer: Either 0 or 1 for counter0 or counter1
UpdateReset: Set True if you want to update the value
Value: Only updated if the UpdateReset bit is 1. The meaning of this
parameter varies with the timer mode.
Mode: Set to the timer mode to handle any special processing. See classes
QuadratureInputTimer and TimerStopInput1.
Returns an unsigned integer of the timer value, unless Mode has been
specified and there are special return values. See Section 2.9.1 for
expected return values.
>>> d.getFeedback( u6.Timer( timer, UpdateReset = False, Value = 0 \
... , Mode = None ) )
[ 12314 ]
"""
def __init__(self, timer, UpdateReset = False, Value=0, Mode = None):
if timer != 0 and timer != 1:
raise LabJackException("Timer should be either 0 or 1.")
if UpdateReset and Value == None:
raise LabJackException("UpdateReset set but no value.")
self.timer = timer
self.updateReset = UpdateReset
self.value = Value
self.mode = Mode
self.cmdBytes = [ (42 + (2*timer)), UpdateReset, Value % 256, Value >> 8 ]
readLen = 4
def __repr__(self):
return "<u6.Timer( timer = %s, UpdateReset = %s, Value = %s, Mode = %s )>" % (self.timer, self.updateReset, self.value, self.mode)
def handle(self, input):
inStr = struct.pack('B' * len(input), *input)
if self.mode == 8:
return struct.unpack('<i', inStr )[0]
elif self.mode == 9:
maxCount, current = struct.unpack('<HH', inStr )
return current, maxCount
else:
return struct.unpack('<I', inStr )[0]
class Timer0(Timer):
"""
For reading the value of the Timer0. It provides the ability to
update/reset Timer0, and read the timer value.
( Section 5.2.5.17 of the User's Guide)
UpdateReset: Set True if you want to update the value
Value: Only updated if the UpdateReset bit is 1. The meaning of this
parameter varies with the timer mode.
Mode: Set to the timer mode to handle any special processing. See classes
QuadratureInputTimer and TimerStopInput1.
>>> d.getFeedback( u6.Timer0( UpdateReset = False, Value = 0, \
... Mode = None ) )
[ 12314 ]
"""
def __init__(self, UpdateReset = False, Value = 0, Mode = None):
Timer.__init__(self, 0, UpdateReset, Value, Mode)
def __repr__(self):
return "<u6.Timer0( UpdateReset = %s, Value = %s, Mode = %s )>" % (self.updateReset, self.value, self.mode)
class Timer1(Timer):
"""
For reading the value of the Timer1. It provides the ability to
update/reset Timer1, and read the timer value.
( Section 5.2.5.17 of the User's Guide)
UpdateReset: Set True if you want to update the value
Value: Only updated if the UpdateReset bit is 1. The meaning of this
parameter varies with the timer mode.
Mode: Set to the timer mode to handle any special processing. See classes
QuadratureInputTimer and TimerStopInput1.
>>> d.getFeedback( u6.Timer1( UpdateReset = False, Value = 0, \
... Mode = None ) )
[ 12314 ]
"""
def __init__(self, UpdateReset = False, Value = 0, Mode = None):
Timer.__init__(self, 1, UpdateReset, Value, Mode)
def __repr__(self):
return "<u6.Timer1( UpdateReset = %s, Value = %s, Mode = %s )>" % (self.updateReset, self.value, self.mode)
class QuadratureInputTimer(Timer):
"""
For reading Quadrature input timers. They are special because their values
are signed.
( Section 2.9.1.8 of the User's Guide)
Args:
UpdateReset: Set True if you want to reset the counter.
Value: Set to 0, and UpdateReset to True to reset the counter.
Returns a signed integer.
>>> # Setup the two timers to be quadrature
>>> d.getFeedback( u6.Timer0Config( 8 ), u6.Timer1Config( 8 ) )
[None, None]
>>> # Read the value
>>> d.getFeedback( u6.QuadratureInputTimer() )
[-21]
"""
def __init__(self, UpdateReset = False, Value = 0):
Timer.__init__(self, 0, UpdateReset, Value, Mode = 8)
def __repr__(self):
return "<u6.QuadratureInputTimer( UpdateReset = %s, Value = %s )>" % (self.updateReset, self.value)
class TimerStopInput1(Timer1):
"""
For reading a stop input timer. They are special because the value returns
the current edge count and the stop value.
( Section 2.9.1.9 of the User's Guide)
Args:
UpdateReset: Set True if you want to update the value.
Value: The stop value. Only updated if the UpdateReset bit is 1.
Returns a tuple where the first value is current edge count, and the second
value is the stop value.
>>> # Setup the timer to be Stop Input
>>> d.getFeedback( u6.Timer0Config( 9, Value = 30 ) )
[None]
>>> # Read the timer
>>> d.getFeedback( u6.TimerStopInput1() )
[(0, 30)]
"""
def __init__(self, UpdateReset = False, Value = 0):
Timer.__init__(self, 1, UpdateReset, Value, Mode = 9)
def __repr__(self):
return "<u6.TimerStopInput1( UpdateReset = %s, Value = %s )>" % (self.updateReset, self.value)
class TimerConfig(FeedbackCommand):
"""
This IOType configures a particular timer.
timer = # of the timer to configure
TimerMode = See Section 2.9 for more information about the available modes.
Value = The meaning of this parameter varies with the timer mode.
>>> d.getFeedback( u6.TimerConfig( timer, TimerMode, Value = 0 ) )
[ None ]
"""
def __init__(self, timer, TimerMode, Value=0):
'''Creates command bytes for configureing a Timer'''
#Conditions come from pages 33-34 of user's guide
if timer not in range(4):
raise LabJackException("Timer should be either 0-3.")
if TimerMode > 13 or TimerMode < 0:
raise LabJackException("Invalid Timer Mode.")
self.timer = timer
self.timerMode = TimerMode
self.value = Value
self.cmdBytes = [43 + (timer * 2), TimerMode, Value % 256, Value >> 8]
def __repr__(self):
return "<u6.TimerConfig( timer = %s, TimerMode = %s, Value = %s )>" % (self.timer, self.timerMode, self.value)
class Timer0Config(TimerConfig):
"""
This IOType configures Timer0.
TimerMode = See Section 2.9 for more information about the available modes.
Value = The meaning of this parameter varies with the timer mode.
>>> d.getFeedback( u6.Timer0Config( TimerMode, Value = 0 ) )
[ None ]
"""
def __init__(self, TimerMode, Value = 0):
TimerConfig.__init__(self, 0, TimerMode, Value)
def __repr__(self):
return "<u6.Timer0Config( TimerMode = %s, Value = %s )>" % (self.timerMode, self.value)
class Timer1Config(TimerConfig):
"""
This IOType configures Timer1.
TimerMode = See Section 2.9 for more information about the available modes.
Value = The meaning of this parameter varies with the timer mode.
>>> d.getFeedback( u6.Timer1Config( TimerMode, Value = 0 ) )
[ None ]
"""
def __init__(self, TimerMode, Value = 0):
TimerConfig.__init__(self, 1, TimerMode, Value)
def __repr__(self):
return "<u6.Timer1Config( TimerMode = %s, Value = %s )>" % (self.timerMode, self.value)
class Counter(FeedbackCommand):
'''
Counter Feedback command
Reads a hardware counter, optionally resetting it
counter: 0 or 1
Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset
Returns the current count from the counter if enabled. If reset,
this is the value before the reset.
>>> d.getFeedback( u6.Counter( counter, Reset = False ) )
[ 2183 ]
'''
def __init__(self, counter, Reset):
self.counter = counter
self.reset = Reset
self.cmdBytes = [ 54 + (counter % 2), int(bool(Reset))]
def __repr__(self):
return "<u6.Counter( counter = %s, Reset = %s )>" % (self.counter, self.reset)
readLen = 4
def handle(self, input):
inStr = ''.join([chr(x) for x in input])
return struct.unpack('<I', inStr )[0]
class Counter0(Counter):
'''
Counter0 Feedback command
Reads hardware counter0, optionally resetting it
Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset
Returns the current count from the counter if enabled. If reset,
this is the value before the reset.
>>> d.getFeedback( u6.Counter0( Reset = False ) )
[ 2183 ]
'''
def __init__(self, Reset = False):
Counter.__init__(self, 0, Reset)
def __repr__(self):
return "<u6.Counter0( Reset = %s )>" % self.reset
class Counter1(Counter):
'''
Counter1 Feedback command
Reads hardware counter1, optionally resetting it
Reset: True ( or 1 ) = Reset, False ( or 0 ) = Don't Reset
Returns the current count from the counter if enabled. If reset,
this is the value before the reset.
>>> d.getFeedback( u6.Counter1( Reset = False ) )
[ 2183 ]
'''
def __init__(self, Reset = False):
Counter.__init__(self, 1, Reset)
def __repr__(self):
return "<u6.Counter1( Reset = %s )>" % self.reset
class DSP(FeedbackCommand):
'''
DSP Feedback command
Acquires 1000 samples from the specified AIN at 50us intervals and performs
the specified analysis on the acquired data.
AcquireNewData: True, acquire new data; False, operate on existing data
DSPAnalysis: 1, True RMS; 2, DC Offset; 3, Peak To Peak; 4, Period (ms)
PLine: Positive Channel
Gain: The gain you would like to use
Resolution: The resolution index to use
SettlingFactor: The SettlingFactor to use
Differential: True, do differential readings; False, single-ended readings
See section 5.2.5.20 of the U3 User's Guide
(http://labjack.com/support/u6/users-guide/5.2.5.20)
>>> d.getFeedback( u6.DSP( PLine, Resolution = 0, Gain = 0,
SettlingFactor = 0, Differential = False,
DSPAnalysis = 1, AcquireNewData = True) )
[ 2183 ]
'''
def __init__(self, PLine, Resolution = 0, Gain = 0, SettlingFactor = 0, Differential = False, DSPAnalysis = 1, AcquireNewData = True):
self.pline = PLine
self.resolution = Resolution
self.gain = Gain
self.settlingFactor = SettlingFactor
self.differential = Differential
self.dspAnalysis = DSPAnalysis
self.acquireNewData = AcquireNewData
byte1 = DSPAnalysis + ( int(AcquireNewData) << 7 )
byte4 = ( Gain << 4 ) + Resolution
byte5 = ( int(Differential) << 7 ) + SettlingFactor
self.cmdBytes = [ 62, byte1, PLine, 0, byte4, byte5, 0, 0 ]
def __repr__(self):
return "<u6.DSP( PLine = %s, Resolution = %s, Gain = %s, SettlingFactor = %s, Differential = %s, DSPAnalysis = %s, AcquireNewData = %s )>" % (self.pline, self.resolution, self.gain, self.settlingFactor, self.differential, self.dspAnalysis, self.acquireNewData)
readLen = 4
def handle(self, input):
inStr = ''.join([chr(x) for x in input])
return struct.unpack('<I', inStr )[0]
|
{
"content_hash": "020fe727489d6852b94bbd43485d5ed4",
"timestamp": "",
"source": "github",
"line_count": 2299,
"max_line_length": 305,
"avg_line_length": 36.359721618094824,
"alnum_prop": 0.5693077005897764,
"repo_name": "Alwnikrotikz/smap-data",
"id": "adad8b591300c58269456567b408703ad4e0abd7",
"size": "83591",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "python/smap/drivers/labjack/labjackpython/u6.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "301328"
},
{
"name": "HTML",
"bytes": "7902"
},
{
"name": "Makefile",
"bytes": "5268"
},
{
"name": "Python",
"bytes": "1394465"
},
{
"name": "R",
"bytes": "23461"
},
{
"name": "Shell",
"bytes": "1273"
},
{
"name": "TeX",
"bytes": "40212"
},
{
"name": "XSLT",
"bytes": "5081"
}
],
"symlink_target": ""
}
|
import json
import os
from django.core.management.base import BaseCommand
from reports import models
CONFIG_DIR_PATH = os.path.expanduser('~/.jetere')
CONFIG_FILE_PATH = os.path.join(CONFIG_DIR_PATH, 'config.json')
def load_jenkins_configuration_from_file():
if os.path.exists(CONFIG_FILE_PATH):
with open(CONFIG_FILE_PATH, 'r') as f:
return json.load(f)
def update_jenkins_configuration_in_db(config,
config_model=models.Configuration()):
config_model.jenkins_url = config['jenkins_url']
config_model.jenkins_username = config['jenkins_username']
config_model.jenkins_password = config['jenkins_password']
config_model.save()
return config_model
class Command(BaseCommand):
help = 'Configure jetere'
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('--jenkins-url',
required=True,
type=str,
help='Jenkins URL')
parser.add_argument('--jenkins-username',
required=True,
type=str,
help='Jenkins username')
parser.add_argument('--jenkins-password',
required=True,
type=str,
help='Jenkins username')
def handle(self, *args, **options):
config = {
'jenkins_url': options['jenkins_url'],
'jenkins_username': options['jenkins_username'],
'jenkins_password': options['jenkins_password'],
}
if not os.path.exists(CONFIG_DIR_PATH):
os.makedirs(CONFIG_DIR_PATH)
self.stdout.write('Writing configuration to %s' % CONFIG_FILE_PATH)
with open(CONFIG_FILE_PATH, 'w') as f:
f.write(json.dumps(config, indent=2))
conf_obj = models.Configuration.objects.all()
if len(conf_obj) == 0:
self.stdout.write('Creating a new configuration in DB...')
update_jenkins_configuration_in_db(config)
else:
self.stdout.write(
'Existing configuration found in DB, updating...')
update_jenkins_configuration_in_db(config, conf_obj[0])
self.stdout.write(self.style.SUCCESS('Done.'))
|
{
"content_hash": "2b9e7922e3836584317672af4539fb7c",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 76,
"avg_line_length": 34.55072463768116,
"alnum_prop": 0.5704697986577181,
"repo_name": "idanmo/jetere",
"id": "39999f625c3d5f53d1c593ca83fbbf086007e10d",
"size": "2384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reports/management/commands/configure.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "955"
},
{
"name": "HTML",
"bytes": "10380"
},
{
"name": "Python",
"bytes": "25366"
},
{
"name": "Shell",
"bytes": "96"
}
],
"symlink_target": ""
}
|
import sys
import pathlib
import logging
from logging import handlers
class WurbLogging(object):
""" Log module for the CloudedBats recording unit.
Two target log files are used. Both are implemented as rotation logs.
1. Internal log stored on the Raspberry Pi micro SD card.
2. External log stored on the USB memory for easy access when moving the
USB memory to another computer.
Usage:
self._logger = logging.getLogger('CloudedBatsWURB')
self._logger.info('Info message.')
self._logger.warning('Warning message.')
self._logger.error('Error message.')
self._logger.debug('Debug message.')
try: ...
except Exception as e:
self._logger.error('Exception: ' + str(e))
"""
def __init__(self):
""" """
def setup(self,
usb_memory_used=True,
internal_path = 'wurb_log_files',
external_path = '/media/usb0/cloudedbats_wurb/log_files'):
""" """
self._usb_memory_used = usb_memory_used
log = logging.getLogger('CloudedBatsWURB')
# log.setLevel(logging.INFO)
log.setLevel(logging.DEBUG)
#
# Internal.
dir_path = pathlib.Path(sys.modules['__main__'].__file__).parents[0] # Same level as wurb_main.py.
self._internal_dir_path = pathlib.Path(dir_path, internal_path)
self._internal_log_path = pathlib.Path(self._internal_dir_path, 'wurb_log.txt')
# External.
self._external_dir_path = pathlib.Path(external_path)
self._external_log_path = pathlib.Path(self._external_dir_path, 'wurb_log.txt')
# Log directories.
if not self._internal_dir_path.exists():
self._internal_dir_path.mkdir(parents=True)
if self._usb_memory_used:
if pathlib.Path('/media/usb0').exists():
if not self._external_dir_path.exists():
self._external_dir_path.mkdir(parents=True)
# Define rotation log files for internal log files.
try:
log_handler = handlers.RotatingFileHandler(str(self._internal_log_path),
maxBytes = 128*1024,
backupCount = 10)
log_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-10s : %(message)s '))
log_handler.setLevel(logging.DEBUG)
log.addHandler(log_handler)
except Exception as e:
print('WURB logging: Failed to set up logging: ' + str(e))
# Define rotation log files for external log files.
try:
if self._usb_memory_used:
if pathlib.Path('/media/usb0').exists():
log_handler_ext = handlers.RotatingFileHandler(str(self._external_log_path),
maxBytes = 128*1024,
backupCount = 10)
log_handler_ext.setFormatter(logging.Formatter('%(asctime)s %(levelname)-10s : %(message)s '))
log_handler_ext.setLevel(logging.INFO)
log.addHandler(log_handler_ext)
else:
log.warning('')
log.warning('')
log.warning('Logging: Path /media/usb0 does not exist.')
except Exception as e:
print('WURB logging: Failed to set up logging on /media/usb0: ' + str(e))
|
{
"content_hash": "c8a4135570c551f0d5692e0640a96539",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 114,
"avg_line_length": 45.575,
"alnum_prop": 0.5375754251234229,
"repo_name": "cloudedbats/cloudedbats_wurb",
"id": "7137820af67293743c5767fe75a85cdbbe155f52",
"size": "3848",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudedbats_wurb/wurb_core/wurb_logging.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "171483"
}
],
"symlink_target": ""
}
|
"""
salt.serializers.plist
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 3001
Implements plist serializer.
Wrapper around plistlib.
"""
import logging
import plistlib
from salt.serializers import DeserializationError, SerializationError
log = logging.getLogger(__name__)
__all__ = ["deserialize", "serialize", "available"]
available = True
def deserialize(stream_or_string, **options):
"""
Deserialize any string or stream like object into a Python data structure.
:param stream_or_string: stream or string to deserialize.
:param options: options given to lower plist module.
:returns: Deserialized data structure.
"""
try:
if not isinstance(stream_or_string, (bytes, str)):
log.trace("Using plistlib.load to deserialize.")
return plistlib.load(stream_or_string, **options)
if isinstance(stream_or_string, str):
log.trace("Need to encode plist string.")
stream_or_string = stream_or_string.encode("utf-8")
log.trace("Using plistlib.loads to deserialize.")
return plistlib.loads(stream_or_string, **options)
except Exception as error: # pylint: disable=broad-except
raise DeserializationError(error)
def serialize(value, **options):
"""
Serialize Python data to plist. To create a binary plist pass
``fmt: FMT_BINARY`` as an option.
:param obj: the data structure to serialize
:param options: options given to lower plist module.
:returns: bytes of serialized plist.
"""
fmt = options.pop("fmt", None)
# add support for serializing to binary.
if fmt == "FMT_BINARY":
log.trace("Adding plistlib.FMT_BINARY to options.")
options["fmt"] = plistlib.FMT_BINARY
try:
if "fp" in options:
log.trace("Using plistlib.dump to serialize.")
return plistlib.dump(value, **options)
log.trace("Using plistlib.dumps to serialize.")
return plistlib.dumps(value, **options)
except Exception as error: # pylint: disable=broad-except
raise SerializationError(error)
|
{
"content_hash": "4622d972bafe724636e4d6672e84d930",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 78,
"avg_line_length": 30.34285714285714,
"alnum_prop": 0.652542372881356,
"repo_name": "saltstack/salt",
"id": "a833b4e7897ecc755c804745225adfda06694da3",
"size": "2124",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/serializers/plist.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
}
|
from rest_framework.authentication import (
TokenAuthentication, SessionAuthentication)
from rest_framework.decorators import (
authentication_classes, permission_classes)
from rest_framework.parsers import FormParser, MultiPartParser
from rest_framework.viewsets import GenericViewSet
from rest_framework.mixins import (
ListModelMixin, RetrieveModelMixin, UpdateModelMixin)
from rest_framework.filters import DjangoFilterBackend, SearchFilter
from rest_framework.response import Response
from rest_framework.status import HTTP_401_UNAUTHORIZED
from django.contrib.auth.models import User
from users.permissions import (
UpdateProfileIfOwnerOrReadOnly, UpdateUserIfSelfOrReadOnly)
from users.models import Profile
from users.serializers import ProfileSerializer, UserSerializer
from users.filters import UserFilter, ProfileFilter
@authentication_classes((TokenAuthentication, SessionAuthentication,))
@permission_classes((UpdateUserIfSelfOrReadOnly,))
class UserView(GenericViewSet,
ListModelMixin,
RetrieveModelMixin,
UpdateModelMixin):
queryset = User.objects.all()
serializer_class = UserSerializer
filter_backends = (DjangoFilterBackend, SearchFilter,)
search_fields = ('username', 'email', 'first_name', 'last_name',)
filter_class = UserFilter
def update(self, request, pk=None):
user = self.get_object()
serializer = self.get_serializer(user, data=request.data)
serializer.is_valid(raise_exception=True)
if not request.user.is_staff:
read_only_field_changed = False
new_username = serializer.validated_data['username']
new_is_staff = serializer.validated_data['is_staff']
if new_username != user.username:
read_only_field_changed = True
if new_is_staff != user.is_staff:
read_only_field_changed = True
if read_only_field_changed:
message = {'error': 'Only staff can update some fields'}
return Response(message, status=HTTP_401_UNAUTHORIZED)
serializer.save()
return Response(serializer.data)
@authentication_classes((TokenAuthentication, SessionAuthentication,))
@permission_classes((UpdateProfileIfOwnerOrReadOnly,))
class ProfileView(GenericViewSet,
ListModelMixin,
RetrieveModelMixin,
UpdateModelMixin):
queryset = Profile.objects.all()
serializer_class = ProfileSerializer
parser_classes = (FormParser, MultiPartParser,)
filter_backends = (DjangoFilterBackend, SearchFilter,)
search_fields = ('user__username', 'rfid_tag', 'favorite_song',)
filter_class = ProfileFilter
def update(self, request, pk=None):
profile = self.get_object()
serializer = self.get_serializer(profile, data=request.data)
serializer.is_valid(raise_exception=True)
if not request.user.is_staff:
read_only_field_changed = False
new_rfid_tag = serializer.validated_data['rfid_tag']
new_user = serializer.validated_data['user']
if new_rfid_tag != profile.rfid_tag:
read_only_field_changed = True
if new_user != profile.user:
read_only_field_changed = True
if read_only_field_changed:
message = {'error': 'Only staff can update some fields'}
return Response(message, status=HTTP_401_UNAUTHORIZED)
serializer.save()
return Response(serializer.data)
|
{
"content_hash": "ab2a654077e9ce386c44034f9048b686",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 72,
"avg_line_length": 36.96907216494845,
"alnum_prop": 0.6829336307863916,
"repo_name": "J1bz/ecoloscore",
"id": "cd18ba034232e70e8affcf6a5ed50a27c2fec1f0",
"size": "3611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "users/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "59276"
}
],
"symlink_target": ""
}
|
"""
=======================================
Read and plot an image from a FITS file
=======================================
This example opens an image stored in a FITS file and displays it to the screen.
This example uses `astropy.utils.data` to download the file, `astropy.io.fits` to open
the file, and `matplotlib.pyplot` to display the image.
*By: Lia R. Corrales, Adrian Price-Whelan, Kelle Cruz*
*License: BSD*
"""
##############################################################################
# Set up matplotlib and use a nicer set of plot parameters
import matplotlib.pyplot as plt
from astropy.visualization import astropy_mpl_style
plt.style.use(astropy_mpl_style)
##############################################################################
# Download the example FITS files used by this example:
from astropy.io import fits
from astropy.utils.data import get_pkg_data_filename
image_file = get_pkg_data_filename('tutorials/FITS-images/HorseHead.fits')
##############################################################################
# Use `astropy.io.fits.info()` to display the structure of the file:
fits.info(image_file)
##############################################################################
# Generally the image information is located in the Primary HDU, also known
# as extension 0. Here, we use `astropy.io.fits.getdata()` to read the image
# data from this first extension using the keyword argument ``ext=0``:
image_data = fits.getdata(image_file, ext=0)
##############################################################################
# The data is now stored as a 2D numpy array. Print the dimensions using the
# shape attribute:
print(image_data.shape)
##############################################################################
# Display the image data:
plt.figure()
plt.imshow(image_data, cmap='gray')
plt.colorbar()
|
{
"content_hash": "0e84cd4fde2d6d238dfea5cac372138e",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 86,
"avg_line_length": 31.796610169491526,
"alnum_prop": 0.5303837953091685,
"repo_name": "pllim/astropy",
"id": "eed6f6b7273f47b83ca373265533b37ee5502883",
"size": "1876",
"binary": false,
"copies": "5",
"ref": "refs/heads/placeholder",
"path": "examples/io/plot_fits-image.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11040101"
},
{
"name": "C++",
"bytes": "47001"
},
{
"name": "Cython",
"bytes": "78776"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Lex",
"bytes": "183333"
},
{
"name": "M4",
"bytes": "18757"
},
{
"name": "Makefile",
"bytes": "52508"
},
{
"name": "Python",
"bytes": "12404182"
},
{
"name": "Shell",
"bytes": "17024"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
}
|
import datetime
from flask import Flask
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from flask.ext.login import LoginManager
from flask.ext.login import login_required
from flask.ext.login import login_user
from flask.ext.login import logout_user
from flask.ext.login import current_user
import config
if config.test:
from mockdbhelper import MockDBHelper as DBHelper
else:
from dbhelper import DBHelper
from passwordhelper import PasswordHelper
from bitlyhelper import BitlyHelper
from user import User
from forms import RegistrationForm
from forms import LoginForm
from forms import CreateTableForm
app = Flask(__name__)
app.secret_key = "Gxf613UhGRkzAKd47R5daLrUelnlUL4L6AU4z0uu++TNBpdzhAolufHqPQiiEdn34pbE97bmXbN"
login_manager = LoginManager(app)
DB = DBHelper()
PH = PasswordHelper()
BH = BitlyHelper()
@login_manager.user_loader
def load_user(user_id):
user_password = DB.get_user(user_id)
if user_password:
return User(user_id)
@app.route("/login", methods=["POST"])
def login():
form = LoginForm(request.form)
if form.validate():
stored_user = DB.get_user(form.loginemail.data)
if stored_user and PH.validate_password(form.loginpassword.data, stored_user['salt'], stored_user['hashed']):
user = User(form.loginemail.data)
login_user(user, remember=True)
return redirect(url_for('account'))
form.loginemail.errors.append("Email or password invalid")
return render_template("home.html", loginform=form, registrationform=RegistrationForm())
@app.route("/register", methods=["POST"])
def register():
form = RegistrationForm(request.form)
if form.validate():
if DB.get_user(form.email.data):
form.email.errors.append("Email address already registered")
return render_template("home.html", loginform=LoginForm(), registrationform=form)
salt = PH.get_salt()
hashed = PH.get_hash(form.password2.data + salt)
DB.add_user(form.email.data, salt, hashed)
return render_template("home.html", loginform=LoginForm(), registrationform=form, onloadmessage="Registration successful. Please log in.")
return render_template("home.html", loginform=LoginForm(), registrationform=form)
@app.route("/logout")
def logout():
logout_user()
return redirect(url_for("home"))
@app.route("/")
def home():
return render_template("home.html", loginform=LoginForm(), registrationform=RegistrationForm())
@app.route("/dashboard")
@login_required
def dashboard():
now = datetime.datetime.now()
requests = DB.get_requests(current_user.get_id())
for req in requests:
deltaseconds = (now - req['time']).seconds
req['wait_minutes'] = "{}.{}".format((deltaseconds/60), str(deltaseconds % 60).zfill(2))
return render_template("dashboard.html", requests=requests)
@app.route("/dashboard/resolve")
@login_required
def dashboard_resolve():
request_id = request.args.get("request_id")
DB.delete_request(request_id)
return redirect(url_for('dashboard'))
@app.route("/account")
@login_required
def account():
tables = DB.get_tables(current_user.get_id())
return render_template("account.html", createtableform=CreateTableForm(), tables=tables)
@app.route("/account/createtable", methods=["POST"])
@login_required
def account_createtable():
form = CreateTableForm(request.form)
if form.validate():
tableid = DB.add_table(form.tablenumber.data, current_user.get_id())
new_url = BH.shorten_url(config.base_url + "newrequest/" + str(tableid))
DB.update_table(tableid, new_url)
return redirect(url_for('account'))
return render_template("account.html", createtableform=form, tables=DB.get_tables(current_user.get_id()))
@app.route("/account/deletetable")
@login_required
def account_deletetable():
tableid = request.args.get("tableid")
DB.delete_table(tableid)
return redirect(url_for('account'))
@app.route("/newrequest/<tid>")
def new_request(tid):
if DB.add_request(tid, datetime.datetime.now()):
return "Your request has been logged and a waiter will be with you shortly"
return "There is already a request pending for this table. Please be patient, a waiter will be there ASAP"
if __name__ == '__main__':
app.run(debug=True)
|
{
"content_hash": "c48d9ffd1685900ae49c711995c649d8",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 146,
"avg_line_length": 31.884057971014492,
"alnum_prop": 0.7088636363636364,
"repo_name": "nikitabrazhnik/flask2",
"id": "20921b71678347dfaa07d3f3b9d45594df02860b",
"size": "4400",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Module 1/Chapter11/waitercaller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23096"
},
{
"name": "HTML",
"bytes": "731057"
},
{
"name": "JavaScript",
"bytes": "67540"
},
{
"name": "Mako",
"bytes": "824"
},
{
"name": "Nginx",
"bytes": "231"
},
{
"name": "Python",
"bytes": "423344"
}
],
"symlink_target": ""
}
|
"""A set of functions that are used for visualization.
These functions often receive an image, perform some visualization on the image.
The functions do not return a value, instead they modify the image itself.
"""
import collections
import functools
# Set headless-friendly backend.
import matplotlib; matplotlib.use('Agg') # pylint: disable=multiple-statements
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
import numpy as np
import PIL.Image as Image
import PIL.ImageColor as ImageColor
import PIL.ImageDraw as ImageDraw
import PIL.ImageFont as ImageFont
import six
import tensorflow as tf
import standard_fields as fields
_TITLE_LEFT_MARGIN = 10
_TITLE_TOP_MARGIN = 10
STANDARD_COLORS = [
'AliceBlue', 'Chartreuse', 'Aqua', 'Aquamarine', 'Azure', 'Beige', 'Bisque',
'BlanchedAlmond', 'BlueViolet', 'BurlyWood', 'CadetBlue', 'AntiqueWhite',
'Chocolate', 'Coral', 'CornflowerBlue', 'Cornsilk', 'Crimson', 'Cyan',
'DarkCyan', 'DarkGoldenRod', 'DarkGrey', 'DarkKhaki', 'DarkOrange',
'DarkOrchid', 'DarkSalmon', 'DarkSeaGreen', 'DarkTurquoise', 'DarkViolet',
'DeepPink', 'DeepSkyBlue', 'DodgerBlue', 'FireBrick', 'FloralWhite',
'ForestGreen', 'Fuchsia', 'Gainsboro', 'GhostWhite', 'Gold', 'GoldenRod',
'Salmon', 'Tan', 'HoneyDew', 'HotPink', 'IndianRed', 'Ivory', 'Khaki',
'Lavender', 'LavenderBlush', 'LawnGreen', 'LemonChiffon', 'LightBlue',
'LightCoral', 'LightCyan', 'LightGoldenRodYellow', 'LightGray', 'LightGrey',
'LightGreen', 'LightPink', 'LightSalmon', 'LightSeaGreen', 'LightSkyBlue',
'LightSlateGray', 'LightSlateGrey', 'LightSteelBlue', 'LightYellow', 'Lime',
'LimeGreen', 'Linen', 'Magenta', 'MediumAquaMarine', 'MediumOrchid',
'MediumPurple', 'MediumSeaGreen', 'MediumSlateBlue', 'MediumSpringGreen',
'MediumTurquoise', 'MediumVioletRed', 'MintCream', 'MistyRose', 'Moccasin',
'NavajoWhite', 'OldLace', 'Olive', 'OliveDrab', 'Orange', 'OrangeRed',
'Orchid', 'PaleGoldenRod', 'PaleGreen', 'PaleTurquoise', 'PaleVioletRed',
'PapayaWhip', 'PeachPuff', 'Peru', 'Pink', 'Plum', 'PowderBlue', 'Purple',
'Red', 'RosyBrown', 'RoyalBlue', 'SaddleBrown', 'Green', 'SandyBrown',
'SeaGreen', 'SeaShell', 'Sienna', 'Silver', 'SkyBlue', 'SlateBlue',
'SlateGray', 'SlateGrey', 'Snow', 'SpringGreen', 'SteelBlue', 'GreenYellow',
'Teal', 'Thistle', 'Tomato', 'Turquoise', 'Violet', 'Wheat', 'White',
'WhiteSmoke', 'Yellow', 'YellowGreen'
]
def save_image_array_as_png(image, output_path):
"""Saves an image (represented as a numpy array) to PNG.
Args:
image: a numpy array with shape [height, width, 3].
output_path: path to which image should be written.
"""
image_pil = Image.fromarray(np.uint8(image)).convert('RGB')
with tf.gfile.Open(output_path, 'w') as fid:
image_pil.save(fid, 'PNG')
def encode_image_array_as_png_str(image):
"""Encodes a numpy array into a PNG string.
Args:
image: a numpy array with shape [height, width, 3].
Returns:
PNG encoded image string.
"""
image_pil = Image.fromarray(np.uint8(image))
output = six.BytesIO()
image_pil.save(output, format='PNG')
png_string = output.getvalue()
output.close()
return png_string
def draw_bounding_box_on_image_array(image,
ymin,
xmin,
ymax,
xmax,
color='red',
thickness=4,
display_str_list=(),
use_normalized_coordinates=True):
"""Adds a bounding box to an image (numpy array).
Bounding box coordinates can be specified in either absolute (pixel) or
normalized coordinates by setting the use_normalized_coordinates argument.
Args:
image: a numpy array with shape [height, width, 3].
ymin: ymin of bounding box.
xmin: xmin of bounding box.
ymax: ymax of bounding box.
xmax: xmax of bounding box.
color: color to draw bounding box. Default is red.
thickness: line thickness. Default value is 4.
display_str_list: list of strings to display in box
(each to be shown on its own line).
use_normalized_coordinates: If True (default), treat coordinates
ymin, xmin, ymax, xmax as relative to the image. Otherwise treat
coordinates as absolute.
"""
image_pil = Image.fromarray(np.uint8(image)).convert('RGB')
draw_bounding_box_on_image(image_pil, ymin, xmin, ymax, xmax, color,
thickness, display_str_list,
use_normalized_coordinates)
np.copyto(image, np.array(image_pil))
def draw_bounding_box_on_image(image,
ymin,
xmin,
ymax,
xmax,
color='red',
thickness=4,
display_str_list=(),
use_normalized_coordinates=True):
"""Adds a bounding box to an image.
Bounding box coordinates can be specified in either absolute (pixel) or
normalized coordinates by setting the use_normalized_coordinates argument.
Each string in display_str_list is displayed on a separate line above the
bounding box in black text on a rectangle filled with the input 'color'.
If the top of the bounding box extends to the edge of the image, the strings
are displayed below the bounding box.
Args:
image: a PIL.Image object.
ymin: ymin of bounding box.
xmin: xmin of bounding box.
ymax: ymax of bounding box.
xmax: xmax of bounding box.
color: color to draw bounding box. Default is red.
thickness: line thickness. Default value is 4.
display_str_list: list of strings to display in box
(each to be shown on its own line).
use_normalized_coordinates: If True (default), treat coordinates
ymin, xmin, ymax, xmax as relative to the image. Otherwise treat
coordinates as absolute.
"""
draw = ImageDraw.Draw(image)
im_width, im_height = image.size
if use_normalized_coordinates:
(left, right, top, bottom) = (xmin * im_width, xmax * im_width,
ymin * im_height, ymax * im_height)
else:
(left, right, top, bottom) = (xmin, xmax, ymin, ymax)
draw.line([(left, top), (left, bottom), (right, bottom),
(right, top), (left, top)], width=thickness, fill=color)
try:
font = ImageFont.truetype('arial.ttf', 24)
except IOError:
font = ImageFont.load_default()
# If the total height of the display strings added to the top of the bounding
# box exceeds the top of the image, stack the strings below the bounding box
# instead of above.
display_str_heights = [font.getsize(ds)[1] for ds in display_str_list]
# Each display_str has a top and bottom margin of 0.05x.
total_display_str_height = (1 + 2 * 0.05) * sum(display_str_heights)
if top > total_display_str_height:
text_bottom = top
else:
text_bottom = bottom + total_display_str_height
# Reverse list and print from bottom to top.
for display_str in display_str_list[::-1]:
text_width, text_height = font.getsize(display_str)
margin = np.ceil(0.05 * text_height)
draw.rectangle(
[(left, text_bottom - text_height - 2 * margin), (left + text_width,
text_bottom)],
fill=color)
draw.text(
(left + margin, text_bottom - text_height - margin),
display_str,
fill='black',
font=font)
text_bottom -= text_height - 2 * margin
def draw_bounding_boxes_on_image_array(image,
boxes,
color='red',
thickness=4,
display_str_list_list=()):
"""Draws bounding boxes on image (numpy array).
Args:
image: a numpy array object.
boxes: a 2 dimensional numpy array of [N, 4]: (ymin, xmin, ymax, xmax).
The coordinates are in normalized format between [0, 1].
color: color to draw bounding box. Default is red.
thickness: line thickness. Default value is 4.
display_str_list_list: list of list of strings.
a list of strings for each bounding box.
The reason to pass a list of strings for a
bounding box is that it might contain
multiple labels.
Raises:
ValueError: if boxes is not a [N, 4] array
"""
image_pil = Image.fromarray(image)
draw_bounding_boxes_on_image(image_pil, boxes, color, thickness,
display_str_list_list)
np.copyto(image, np.array(image_pil))
def draw_bounding_boxes_on_image(image,
boxes,
color='red',
thickness=4,
display_str_list_list=()):
"""Draws bounding boxes on image.
Args:
image: a PIL.Image object.
boxes: a 2 dimensional numpy array of [N, 4]: (ymin, xmin, ymax, xmax).
The coordinates are in normalized format between [0, 1].
color: color to draw bounding box. Default is red.
thickness: line thickness. Default value is 4.
display_str_list_list: list of list of strings.
a list of strings for each bounding box.
The reason to pass a list of strings for a
bounding box is that it might contain
multiple labels.
Raises:
ValueError: if boxes is not a [N, 4] array
"""
boxes_shape = boxes.shape
if not boxes_shape:
return
if len(boxes_shape) != 2 or boxes_shape[1] != 4:
raise ValueError('Input must be of size [N, 4]')
for i in range(boxes_shape[0]):
display_str_list = ()
if display_str_list_list:
display_str_list = display_str_list_list[i]
draw_bounding_box_on_image(image, boxes[i, 0], boxes[i, 1], boxes[i, 2],
boxes[i, 3], color, thickness, display_str_list)
def _visualize_boxes(image, boxes, classes, scores, category_index, **kwargs):
return visualize_boxes_and_labels_on_image_array(
image, boxes, classes, scores, category_index=category_index, **kwargs)
def _visualize_boxes_and_masks(image, boxes, classes, scores, masks,
category_index, **kwargs):
return visualize_boxes_and_labels_on_image_array(
image,
boxes,
classes,
scores,
category_index=category_index,
instance_masks=masks,
**kwargs)
def _visualize_boxes_and_keypoints(image, boxes, classes, scores, keypoints,
category_index, **kwargs):
return visualize_boxes_and_labels_on_image_array(
image,
boxes,
classes,
scores,
category_index=category_index,
keypoints=keypoints,
**kwargs)
def _visualize_boxes_and_masks_and_keypoints(
image, boxes, classes, scores, masks, keypoints, category_index, **kwargs):
return visualize_boxes_and_labels_on_image_array(
image,
boxes,
classes,
scores,
category_index=category_index,
instance_masks=masks,
keypoints=keypoints,
**kwargs)
def draw_bounding_boxes_on_image_tensors(images,
boxes,
classes,
scores,
category_index,
instance_masks=None,
keypoints=None,
max_boxes_to_draw=20,
min_score_thresh=0.2,
use_normalized_coordinates=True):
"""Draws bounding boxes, masks, and keypoints on batch of image tensors.
Args:
images: A 4D uint8 image tensor of shape [N, H, W, C]. If C > 3, additional
channels will be ignored.
boxes: [N, max_detections, 4] float32 tensor of detection boxes.
classes: [N, max_detections] int tensor of detection classes. Note that
classes are 1-indexed.
scores: [N, max_detections] float32 tensor of detection scores.
category_index: a dict that maps integer ids to category dicts. e.g.
{1: {1: 'dog'}, 2: {2: 'cat'}, ...}
instance_masks: A 4D uint8 tensor of shape [N, max_detection, H, W] with
instance masks.
keypoints: A 4D float32 tensor of shape [N, max_detection, num_keypoints, 2]
with keypoints.
max_boxes_to_draw: Maximum number of boxes to draw on an image. Default 20.
min_score_thresh: Minimum score threshold for visualization. Default 0.2.
use_normalized_coordinates: Whether to assume boxes and kepoints are in
normalized coordinates (as opposed to absolute coordiantes).
Default is True.
Returns:
4D image tensor of type uint8, with boxes drawn on top.
"""
# Additional channels are being ignored.
images = images[:, :, :, 0:3]
visualization_keyword_args = {
'use_normalized_coordinates': use_normalized_coordinates,
'max_boxes_to_draw': max_boxes_to_draw,
'min_score_thresh': min_score_thresh,
'agnostic_mode': False,
'line_thickness': 4
}
if instance_masks is not None and keypoints is None:
visualize_boxes_fn = functools.partial(
_visualize_boxes_and_masks,
category_index=category_index,
**visualization_keyword_args)
elems = [images, boxes, classes, scores, instance_masks]
elif instance_masks is None and keypoints is not None:
visualize_boxes_fn = functools.partial(
_visualize_boxes_and_keypoints,
category_index=category_index,
**visualization_keyword_args)
elems = [images, boxes, classes, scores, keypoints]
elif instance_masks is not None and keypoints is not None:
visualize_boxes_fn = functools.partial(
_visualize_boxes_and_masks_and_keypoints,
category_index=category_index,
**visualization_keyword_args)
elems = [images, boxes, classes, scores, instance_masks, keypoints]
else:
visualize_boxes_fn = functools.partial(
_visualize_boxes,
category_index=category_index,
**visualization_keyword_args)
elems = [images, boxes, classes, scores]
def draw_boxes(image_and_detections):
"""Draws boxes on image."""
image_with_boxes = tf.py_func(visualize_boxes_fn, image_and_detections,
tf.uint8)
return image_with_boxes
images = tf.map_fn(draw_boxes, elems, dtype=tf.uint8, back_prop=False)
return images
def draw_side_by_side_evaluation_image(eval_dict,
category_index,
max_boxes_to_draw=20,
min_score_thresh=0.2,
use_normalized_coordinates=True):
"""Creates a side-by-side image with detections and groundtruth.
Bounding boxes (and instance masks, if available) are visualized on both
subimages.
Args:
eval_dict: The evaluation dictionary returned by
eval_util.result_dict_for_single_example().
category_index: A category index (dictionary) produced from a labelmap.
max_boxes_to_draw: The maximum number of boxes to draw for detections.
min_score_thresh: The minimum score threshold for showing detections.
use_normalized_coordinates: Whether to assume boxes and kepoints are in
normalized coordinates (as opposed to absolute coordiantes).
Default is True.
Returns:
A [1, H, 2 * W, C] uint8 tensor. The subimage on the left corresponds to
detections, while the subimage on the right corresponds to groundtruth.
"""
detection_fields = fields.DetectionResultFields()
input_data_fields = fields.InputDataFields()
instance_masks = None
if detection_fields.detection_masks in eval_dict:
instance_masks = tf.cast(
tf.expand_dims(eval_dict[detection_fields.detection_masks], axis=0),
tf.uint8)
keypoints = None
if detection_fields.detection_keypoints in eval_dict:
keypoints = tf.expand_dims(
eval_dict[detection_fields.detection_keypoints], axis=0)
groundtruth_instance_masks = None
if input_data_fields.groundtruth_instance_masks in eval_dict:
groundtruth_instance_masks = tf.cast(
tf.expand_dims(
eval_dict[input_data_fields.groundtruth_instance_masks], axis=0),
tf.uint8)
images_with_detections = draw_bounding_boxes_on_image_tensors(
eval_dict[input_data_fields.original_image],
tf.expand_dims(eval_dict[detection_fields.detection_boxes], axis=0),
tf.expand_dims(eval_dict[detection_fields.detection_classes], axis=0),
tf.expand_dims(eval_dict[detection_fields.detection_scores], axis=0),
category_index,
instance_masks=instance_masks,
keypoints=keypoints,
max_boxes_to_draw=max_boxes_to_draw,
min_score_thresh=min_score_thresh,
use_normalized_coordinates=use_normalized_coordinates)
images_with_groundtruth = draw_bounding_boxes_on_image_tensors(
eval_dict[input_data_fields.original_image],
tf.expand_dims(eval_dict[input_data_fields.groundtruth_boxes], axis=0),
tf.expand_dims(eval_dict[input_data_fields.groundtruth_classes], axis=0),
tf.expand_dims(
tf.ones_like(
eval_dict[input_data_fields.groundtruth_classes],
dtype=tf.float32),
axis=0),
category_index,
instance_masks=groundtruth_instance_masks,
keypoints=None,
max_boxes_to_draw=None,
min_score_thresh=0.0,
use_normalized_coordinates=use_normalized_coordinates)
return tf.concat([images_with_detections, images_with_groundtruth], axis=2)
def draw_keypoints_on_image_array(image,
keypoints,
color='red',
radius=2,
use_normalized_coordinates=True):
"""Draws keypoints on an image (numpy array).
Args:
image: a numpy array with shape [height, width, 3].
keypoints: a numpy array with shape [num_keypoints, 2].
color: color to draw the keypoints with. Default is red.
radius: keypoint radius. Default value is 2.
use_normalized_coordinates: if True (default), treat keypoint values as
relative to the image. Otherwise treat them as absolute.
"""
image_pil = Image.fromarray(np.uint8(image)).convert('RGB')
draw_keypoints_on_image(image_pil, keypoints, color, radius,
use_normalized_coordinates)
np.copyto(image, np.array(image_pil))
def draw_keypoints_on_image(image,
keypoints,
color='red',
radius=2,
use_normalized_coordinates=True):
"""Draws keypoints on an image.
Args:
image: a PIL.Image object.
keypoints: a numpy array with shape [num_keypoints, 2].
color: color to draw the keypoints with. Default is red.
radius: keypoint radius. Default value is 2.
use_normalized_coordinates: if True (default), treat keypoint values as
relative to the image. Otherwise treat them as absolute.
"""
draw = ImageDraw.Draw(image)
im_width, im_height = image.size
keypoints_x = [k[1] for k in keypoints]
keypoints_y = [k[0] for k in keypoints]
if use_normalized_coordinates:
keypoints_x = tuple([im_width * x for x in keypoints_x])
keypoints_y = tuple([im_height * y for y in keypoints_y])
for keypoint_x, keypoint_y in zip(keypoints_x, keypoints_y):
draw.ellipse([(keypoint_x - radius, keypoint_y - radius),
(keypoint_x + radius, keypoint_y + radius)],
outline=color, fill=color)
def draw_mask_on_image_array(image, mask, color='red', alpha=0.4):
"""Draws mask on an image.
Args:
image: uint8 numpy array with shape (img_height, img_height, 3)
mask: a uint8 numpy array of shape (img_height, img_height) with
values between either 0 or 1.
color: color to draw the keypoints with. Default is red.
alpha: transparency value between 0 and 1. (default: 0.4)
Raises:
ValueError: On incorrect data type for image or masks.
"""
if image.dtype != np.uint8:
raise ValueError('`image` not of type np.uint8')
if mask.dtype != np.uint8:
raise ValueError('`mask` not of type np.uint8')
if np.any(np.logical_and(mask != 1, mask != 0)):
raise ValueError('`mask` elements should be in [0, 1]')
if image.shape[:2] != mask.shape:
raise ValueError('The image has spatial dimensions %s but the mask has '
'dimensions %s' % (image.shape[:2], mask.shape))
rgb = ImageColor.getrgb(color)
pil_image = Image.fromarray(image)
solid_color = np.expand_dims(
np.ones_like(mask), axis=2) * np.reshape(list(rgb), [1, 1, 3])
pil_solid_color = Image.fromarray(np.uint8(solid_color)).convert('RGBA')
pil_mask = Image.fromarray(np.uint8(255.0*alpha*mask)).convert('L')
pil_image = Image.composite(pil_solid_color, pil_image, pil_mask)
np.copyto(image, np.array(pil_image.convert('RGB')))
def visualize_boxes_and_labels_on_image_array(
image,
boxes,
classes,
scores,
category_index,
instance_masks=None,
instance_boundaries=None,
keypoints=None,
use_normalized_coordinates=False,
max_boxes_to_draw=20,
min_score_thresh=.5,
agnostic_mode=False,
line_thickness=4,
groundtruth_box_visualization_color='black',
skip_scores=False,
skip_labels=False):
"""Overlay labeled boxes on an image with formatted scores and label names.
This function groups boxes that correspond to the same location
and creates a display string for each detection and overlays these
on the image. Note that this function modifies the image in place, and returns
that same image.
Args:
image: uint8 numpy array with shape (img_height, img_width, 3)
boxes: a numpy array of shape [N, 4]
classes: a numpy array of shape [N]. Note that class indices are 1-based,
and match the keys in the label map.
scores: a numpy array of shape [N] or None. If scores=None, then
this function assumes that the boxes to be plotted are groundtruth
boxes and plot all boxes as black with no classes or scores.
category_index: a dict containing category dictionaries (each holding
category index `id` and category name `name`) keyed by category indices.
instance_masks: a numpy array of shape [N, image_height, image_width] with
values ranging between 0 and 1, can be None.
instance_boundaries: a numpy array of shape [N, image_height, image_width]
with values ranging between 0 and 1, can be None.
keypoints: a numpy array of shape [N, num_keypoints, 2], can
be None
use_normalized_coordinates: whether boxes is to be interpreted as
normalized coordinates or not.
max_boxes_to_draw: maximum number of boxes to visualize. If None, draw
all boxes.
min_score_thresh: minimum score threshold for a box to be visualized
agnostic_mode: boolean (default: False) controlling whether to evaluate in
class-agnostic mode or not. This mode will display scores but ignore
classes.
line_thickness: integer (default: 4) controlling line width of the boxes.
groundtruth_box_visualization_color: box color for visualizing groundtruth
boxes
skip_scores: whether to skip score when drawing a single detection
skip_labels: whether to skip label when drawing a single detection
Returns:
uint8 numpy array with shape (img_height, img_width, 3) with overlaid boxes.
"""
# Create a display string (and color) for every box location, group any boxes
# that correspond to the same location.
box_to_display_str_map = collections.defaultdict(list)
box_to_color_map = collections.defaultdict(str)
box_to_instance_masks_map = {}
box_to_instance_boundaries_map = {}
box_to_keypoints_map = collections.defaultdict(list)
if not max_boxes_to_draw:
max_boxes_to_draw = boxes.shape[0]
for i in range(min(max_boxes_to_draw, boxes.shape[0])):
if scores is None or scores[i] > min_score_thresh:
box = tuple(boxes[i].tolist())
if instance_masks is not None:
box_to_instance_masks_map[box] = instance_masks[i]
if instance_boundaries is not None:
box_to_instance_boundaries_map[box] = instance_boundaries[i]
if keypoints is not None:
box_to_keypoints_map[box].extend(keypoints[i])
if scores is None:
box_to_color_map[box] = groundtruth_box_visualization_color
else:
display_str = ''
if not skip_labels:
if not agnostic_mode:
if classes[i] in category_index.keys():
class_name = category_index[classes[i]]['name']
else:
class_name = 'N/A'
display_str = str(class_name)
if not skip_scores:
if not display_str:
display_str = '{}%'.format(int(100*scores[i]))
else:
display_str = '{}: {}%'.format(display_str, int(100*scores[i]))
box_to_display_str_map[box].append(display_str)
if agnostic_mode:
box_to_color_map[box] = 'DarkOrange'
else:
box_to_color_map[box] = STANDARD_COLORS[
classes[i] % len(STANDARD_COLORS)]
# Draw all boxes onto image.
for box, color in box_to_color_map.items():
ymin, xmin, ymax, xmax = box
if instance_masks is not None:
draw_mask_on_image_array(
image,
box_to_instance_masks_map[box],
color=color
)
if instance_boundaries is not None:
draw_mask_on_image_array(
image,
box_to_instance_boundaries_map[box],
color='red',
alpha=1.0
)
draw_bounding_box_on_image_array(
image,
ymin,
xmin,
ymax,
xmax,
color=color,
thickness=line_thickness,
display_str_list=box_to_display_str_map[box],
use_normalized_coordinates=use_normalized_coordinates)
if keypoints is not None:
draw_keypoints_on_image_array(
image,
box_to_keypoints_map[box],
color=color,
radius=line_thickness / 2,
use_normalized_coordinates=use_normalized_coordinates)
return image
def add_cdf_image_summary(values, name):
"""Adds a tf.summary.image for a CDF plot of the values.
Normalizes `values` such that they sum to 1, plots the cumulative distribution
function and creates a tf image summary.
Args:
values: a 1-D float32 tensor containing the values.
name: name for the image summary.
"""
def cdf_plot(values):
"""Numpy function to plot CDF."""
normalized_values = values / np.sum(values)
sorted_values = np.sort(normalized_values)
cumulative_values = np.cumsum(sorted_values)
fraction_of_examples = (np.arange(cumulative_values.size, dtype=np.float32)
/ cumulative_values.size)
fig = plt.figure(frameon=False)
ax = fig.add_subplot('111')
ax.plot(fraction_of_examples, cumulative_values)
ax.set_ylabel('cumulative normalized values')
ax.set_xlabel('fraction of examples')
fig.canvas.draw()
width, height = fig.get_size_inches() * fig.get_dpi()
image = np.fromstring(fig.canvas.tostring_rgb(), dtype='uint8').reshape(
1, int(height), int(width), 3)
return image
cdf_plot = tf.py_func(cdf_plot, [values], tf.uint8)
tf.summary.image(name, cdf_plot)
def add_hist_image_summary(values, bins, name):
"""Adds a tf.summary.image for a histogram plot of the values.
Plots the histogram of values and creates a tf image summary.
Args:
values: a 1-D float32 tensor containing the values.
bins: bin edges which will be directly passed to np.histogram.
name: name for the image summary.
"""
def hist_plot(values, bins):
"""Numpy function to plot hist."""
fig = plt.figure(frameon=False)
ax = fig.add_subplot('111')
y, x = np.histogram(values, bins=bins)
ax.plot(x[:-1], y)
ax.set_ylabel('count')
ax.set_xlabel('value')
fig.canvas.draw()
width, height = fig.get_size_inches() * fig.get_dpi()
image = np.fromstring(
fig.canvas.tostring_rgb(), dtype='uint8').reshape(
1, int(height), int(width), 3)
return image
hist_plot = tf.py_func(hist_plot, [values, bins], tf.uint8)
tf.summary.image(name, hist_plot)
|
{
"content_hash": "d0ce0397e2f286c87e2c738b73ed4d9e",
"timestamp": "",
"source": "github",
"line_count": 718,
"max_line_length": 80,
"avg_line_length": 40.26323119777159,
"alnum_prop": 0.6297346847002664,
"repo_name": "tombstone/models",
"id": "7a7aeb50561dba9f8713d12a184ddd824c3c0e19",
"size": "29599",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "research/cognitive_planning/visualization_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1365199"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "1858048"
},
{
"name": "Makefile",
"bytes": "4763"
},
{
"name": "Python",
"bytes": "7241242"
},
{
"name": "Shell",
"bytes": "102270"
},
{
"name": "TypeScript",
"bytes": "6515"
}
],
"symlink_target": ""
}
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('server', '0059_machine_report_format'),
]
operations = [
migrations.AddField(
model_name='machine',
name='broken_client',
field=models.BooleanField(default=False),
),
]
|
{
"content_hash": "18172328afad58737edb31e02ee59b64",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 53,
"avg_line_length": 21.75,
"alnum_prop": 0.5833333333333334,
"repo_name": "salopensource/sal",
"id": "f9dadbc26e3645c7e0e9023082f22867cdc5a079",
"size": "397",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "server/migrations/0060_machine_broken_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "119817"
},
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "152173"
},
{
"name": "JavaScript",
"bytes": "279963"
},
{
"name": "Less",
"bytes": "67048"
},
{
"name": "Makefile",
"bytes": "2208"
},
{
"name": "Procfile",
"bytes": "23"
},
{
"name": "Python",
"bytes": "613680"
},
{
"name": "SCSS",
"bytes": "51035"
},
{
"name": "Shell",
"bytes": "4535"
}
],
"symlink_target": ""
}
|
from pants.testutil.jvm.nailgun_task_test_base import NailgunTaskTestBase
class JarTaskTestBase(NailgunTaskTestBase):
"""Prepares an ephemeral test build root that supports jar tasks.
:API: public
"""
|
{
"content_hash": "39dfab78858f5945e12c3942f53b35ed",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 73,
"avg_line_length": 27,
"alnum_prop": 0.7592592592592593,
"repo_name": "tdyas/pants",
"id": "3f0928a517872d5da1399b9c15b4468cfba8b2cc",
"size": "348",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/python/pants/testutil/jvm/jar_task_test_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5596"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "44381"
},
{
"name": "Java",
"bytes": "518180"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "7955590"
},
{
"name": "Rust",
"bytes": "1031208"
},
{
"name": "Scala",
"bytes": "106520"
},
{
"name": "Shell",
"bytes": "109904"
},
{
"name": "Starlark",
"bytes": "502255"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
"""Implementation of RootOf class and related tools. """
from sympy.core import S, Basic, Expr, Integer, Float, I, Add, Lambda, symbols, sympify
from sympy.polys.polytools import Poly, PurePoly, factor
from sympy.polys.rationaltools import together
from sympy.polys.polyfuncs import symmetrize, viete
from sympy.polys.rootisolation import (
dup_isolate_complex_roots_sqf,
dup_isolate_real_roots_sqf)
from sympy.polys.polyroots import (
roots_linear, roots_quadratic,
roots_binomial, preprocess_roots)
from sympy.polys.polyerrors import (
MultivariatePolynomialError,
GeneratorsNeeded,
PolynomialError)
from sympy.polys.domains import QQ
from sympy.mpmath import (
mp, mpf, mpc, findroot)
from sympy.utilities import lambdify
import operator
def dup_minpoly_add(f, g, K):
F = dmp_raise(f, 1, 0, K)
G = dmp_raise(g, 1, 0, K)
H = [[-K.one], [K.one, K.zero]]
F = dmp_compose(F, H, 1, K)
return dmp_resultant(F, G, 1, K)
def dup_minpoly_sub(f, g, K):
F = dmp_raise(f, 1, 0, K)
G = dmp_raise(g, 1, 0, K)
H = [[K.one], [K.one, K.zero]]
F = dmp_compose(F, H, 1, K)
return dmp_resultant(F, G, 1, K)
def dup_minpoly_mul(f, g, K):
f, F = reversed(f), []
for i, c in enumerate(f):
if not c:
F.append([])
else:
F.append(dup_lshift([c], i, K))
F = dmp_strip(F)
G = dmp_raise(g, 1, 0, K)
return dmp_resultant(F, G, 1, K)
def dup_minpoly_div(f, g, K):
F = dmp_raise(f, 1, 0, K)
G = dmp_raise(g, 1, 0, K)
H = [[K.one, K.zero], []]
F = dmp_compose(F, H, 1, K)
return dmp_resultant(F, G, 1, K)
def dup_minpoly_pow(f, p, q, K):
d = {(p, 0): -K.one, (0, q): K.one}
F = dmp_raise(f, 1, 0, K)
G = dmp_from_dict(d, 1, K)
return dmp_resultant(F, G, 1, K)
_reals_cache = {}
_complexes_cache = {}
class RootOf(Expr):
"""Represents ``k``-th root of a univariate polynomial. """
__slots__ = ['poly', 'index']
def __new__(cls, f, x, index=None, radicals=True, expand=True):
"""Construct a new ``RootOf`` object for ``k``-th root of ``f``. """
x = sympify(x)
if index is None and x.is_Integer:
x, index = None, x
else:
index = sympify(index)
if index.is_Integer:
index = int(index)
else:
raise ValueError("expected an integer root index, got %d" % index)
poly = PurePoly(f, x, greedy=False, expand=expand)
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
degree = poly.degree()
if degree <= 0:
raise PolynomialError("can't construct RootOf object for %s" % f)
if index < -degree or index >= degree:
raise IndexError("root index out of [%d, %d] range, got %d" % (-degree, degree-1, index))
elif index < 0:
index += degree
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError("RootOf is not supported over %s" % dom)
root = cls._indexed_root(poly, index)
return coeff*cls._postprocess_root(root, radicals)
@classmethod
def _new(cls, poly, index):
"""Construct new ``RootOf`` object from raw data. """
obj = Expr.__new__(cls)
obj.poly = poly
obj.index = index
return obj
def _hashable_content(self):
return (self.poly, self.index)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, Integer(self.index))
@property
def free_symbols(self):
return self.poly.free_symbols
@property
def is_commutative(self):
return True
@property
def is_real(self):
"""Return ``True`` if the root is real. """
return self.index < len(_reals_cache[self.poly])
@property
def is_complex(self):
"""Return ``True`` if the root is complex. """
return not self.is_real
@classmethod
def real_roots(cls, poly, radicals=True):
"""Get real roots of a polynomial. """
return cls._get_roots("_real_roots", poly, radicals)
@classmethod
def all_roots(cls, poly, radicals=True):
"""Get real and complex roots of a polynomial. """
return cls._get_roots("_all_roots", poly, radicals)
@classmethod
def _get_reals_sqf(cls, factor):
"""Compute real root isolating intervals for a square-free polynomial. """
if factor in _reals_cache:
real_part = _reals_cache[factor]
else:
_reals_cache[factor] = real_part = \
dup_isolate_real_roots_sqf(factor.rep.rep, factor.rep.dom, blackbox=True)
return real_part
@classmethod
def _get_complexes_sqf(cls, factor):
"""Compute complex root isolating intervals for a square-free polynomial. """
if factor in _complexes_cache:
complex_part = _complexes_cache[factor]
else:
_complexes_cache[factor] = complex_part = \
dup_isolate_complex_roots_sqf(factor.rep.rep, factor.rep.dom, blackbox=True)
return complex_part
@classmethod
def _get_reals(cls, factors):
"""Compute real root isolating intervals for a list of factors. """
reals = []
for factor, k in factors:
real_part = cls._get_reals_sqf(factor)
reals.extend([ (root, factor, k) for root in real_part ])
return reals
@classmethod
def _get_complexes(cls, factors):
"""Compute complex root isolating intervals for a list of factors. """
complexes = []
for factor, k in factors:
complex_part = cls._get_complexes_sqf(factor)
complexes.extend([ (root, factor, k) for root in complex_part ])
return complexes
@classmethod
def _reals_sorted(cls, reals):
"""Make real isolating intervals disjoint and sort roots. """
cache = {}
for i, (u, f, k) in enumerate(reals):
for j, (v, g, m) in enumerate(reals[i+1:]):
u, v = u.refine_disjoint(v)
reals[i+j+1] = (v, g, m)
reals[i] = (u, f, k)
reals = sorted(reals, key=lambda r: r[0].a)
for root, factor, _ in reals:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.iteritems():
_reals_cache[factor] = roots
return reals
@classmethod
def _complexes_sorted(cls, complexes):
"""Make complex isolating intervals disjoint and sort roots. """
cache = {}
for i, (u, f, k) in enumerate(complexes):
for j, (v, g, m) in enumerate(complexes[i+1:]):
u, v = u.refine_disjoint(v)
complexes[i+j+1] = (v, g, m)
complexes[i] = (u, f, k)
complexes = sorted(complexes, key=lambda r: (r[0].ax, r[0].ay))
for root, factor, _ in complexes:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.iteritems():
_complexes_cache[factor] = roots
return complexes
@classmethod
def _reals_index(cls, reals, index):
"""Map initial real root index to an index in a factor where the root belongs. """
i = 0
for j, (_, factor, k) in enumerate(reals):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in reals[:j]:
if factor == poly:
index += 1
return poly, index
else:
i += k
@classmethod
def _complexes_index(cls, complexes, index):
"""Map initial complex root index to an index in a factor where the root belongs. """
index, i = index, 0
for j, (_, factor, k) in enumerate(complexes):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in complexes[:j]:
if factor == poly:
index += 1
index += len(_reals_cache[poly])
return poly, index
else:
i += k
@classmethod
def _count_roots(cls, roots):
"""Count the number of real or complex roots including multiplicites. """
return sum([ k for _, _, k in roots ])
@classmethod
def _indexed_root(cls, poly, index):
"""Get a root of a composite polynomial by index. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals_count = cls._count_roots(reals)
if index < reals_count:
reals = cls._reals_sorted(reals)
return cls._reals_index(reals, index)
else:
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
return cls._complexes_index(complexes, index-reals_count)
@classmethod
def _real_roots(cls, poly):
"""Get real roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in xrange(0, reals_count):
roots.append(cls._reals_index(reals, index))
return roots
@classmethod
def _all_roots(cls, poly):
"""Get real and complex roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in xrange(0, reals_count):
roots.append(cls._reals_index(reals, index))
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
complexes_count = cls._count_roots(complexes)
for index in xrange(0, complexes_count):
roots.append(cls._complexes_index(complexes, index))
return roots
@classmethod
def _roots_trivial(cls, poly, radicals):
"""Compute roots in linear, quadratic and binomial cases. """
if poly.degree() == 1:
return roots_linear(poly)
if not radicals:
return None
if radicals and poly.degree() == 2:
return roots_quadratic(poly)
elif radicals and poly.length() == 2 and poly.TC():
return roots_binomial(poly)
else:
return None
@classmethod
def _preprocess_roots(cls, poly):
"""Take heroic measures to make ``poly`` compatible with ``RootOf``. """
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError("RootOf is not supported over %s" % dom)
return coeff, poly
@classmethod
def _postprocess_root(cls, root, radicals):
"""Return the root if it is trivial or a ``RootOf`` object. """
poly, index = root
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
else:
return cls._new(poly, index)
@classmethod
def _get_roots(cls, method, poly, radicals):
"""Return postprocessed roots of specified kind. """
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
coeff, poly = cls._preprocess_roots(poly)
roots = []
for root in getattr(cls, method)(poly):
roots.append(coeff*cls._postprocess_root(root, radicals))
return roots
def _get_interval(self):
"""Internal function for retrieving isolation interval from cache. """
if self.is_real:
return _reals_cache[self.poly][self.index]
else:
reals_count = len(_reals_cache[self.poly])
return _complexes_cache[self.poly][self.index - reals_count]
def _set_interval(self, interval):
"""Internal function for updating isolation interval in cache. """
if self.is_real:
_reals_cache[self.poly][self.index] = interval
else:
reals_count = len(_reals_cache[self.poly])
_complexes_cache[self.poly][self.index - reals_count] = interval
def _eval_evalf(self, prec):
"""Evaluate this complex root to the given precision. """
_prec, mp.prec = mp.prec, prec
try:
func = lambdify(self.poly.gen, self.expr)
interval = self._get_interval()
refined = False
while True:
if self.is_real:
x0 = mpf(str(interval.center))
else:
x0 = mpc(*map(str, interval.center))
try:
root = findroot(func, x0)
except ValueError:
interval = interval.refine()
refined = True
continue
else:
if refined:
self._set_interval(interval)
break
finally:
mp.prec = _prec
return Float._new(root.real._mpf_, prec) + I*Float._new(root.imag._mpf_, prec)
class RootSum(Expr):
"""Represents a sum of all roots of a univariate polynomial. """
__slots__ = ['poly', 'fun', 'auto']
def __new__(cls, expr, func=None, x=None, auto=True, quadratic=False):
"""Construct a new ``RootSum`` instance carrying all roots of a polynomial. """
coeff, poly = cls._transform(expr, x)
if not poly.is_univariate:
raise MultivariatePolynomialError("only univariate polynomials are allowed")
if func is None:
func = Lambda(poly.gen, poly.gen)
else:
try:
is_func = func.is_Function
except AttributeError:
is_func = False
if is_func and (func.nargs == 1 or 1 in func.nargs):
if not isinstance(func, Lambda):
func = Lambda(poly.gen, func(poly.gen))
else:
raise ValueError("expected a univariate function, got %s" % func)
var, expr = func.variables[0], func.expr
if coeff is not S.One:
expr = expr.subs(var, coeff*var)
deg = poly.degree()
if not expr.has(var):
return deg*expr
if expr.is_Add:
add_const, expr = expr.as_independent(var)
else:
add_const = S.Zero
if expr.is_Mul:
mul_const, expr = expr.as_independent(var)
else:
mul_const = S.One
func = Lambda(var, expr)
rational = cls._is_func_rational(poly, func)
(_, factors), terms = poly.factor_list(), []
for poly, k in factors:
if poly.is_linear:
term = func(roots_linear(poly)[0])
elif quadratic and poly.is_quadratic:
term = sum(map(func, roots_quadratic(poly)))
else:
if not rational or not auto:
term = cls._new(poly, func, auto)
else:
term = cls._rational_case(poly, func)
terms.append(k*term)
return mul_const*Add(*terms) + deg*add_const
@classmethod
def _new(cls, poly, func, auto=True):
"""Construct new raw ``RootSum`` instance. """
obj = Expr.__new__(cls)
obj.poly = poly
obj.fun = func
obj.auto = auto
return obj
@classmethod
def new(cls, poly, func, auto=True):
"""Construct new ``RootSum`` instance. """
if not func.expr.has(*func.variables):
return func.expr
rational = cls._is_func_rational(poly, func)
if not rational or not auto:
return cls._new(poly, func, auto)
else:
return cls._rational_case(poly, func)
@classmethod
def _transform(cls, expr, x):
"""Transform an expression to a polynomial. """
poly = PurePoly(expr, x, greedy=False)
return preprocess_roots(poly)
@classmethod
def _is_func_rational(cls, poly, func):
"""Check if a lambda is areational function. """
var, expr = func.variables[0], func.expr
return expr.is_rational_function(var)
@classmethod
def _rational_case(cls, poly, func):
"""Handle the rational function case. """
roots = symbols('r:%d' % poly.degree())
var, expr = func.variables[0], func.expr
f = sum(expr.subs(var, r) for r in roots)
p, q = together(f).as_numer_denom()
domain = QQ[roots]
p = p.expand()
q = q.expand()
try:
p = Poly(p, domain=domain, expand=False)
except GeneratorsNeeded:
p, p_coeff = None, (p,)
else:
p_monom, p_coeff = zip(*p.terms())
try:
q = Poly(q, domain=domain, expand=False)
except GeneratorsNeeded:
q, q_coeff = None, (q,)
else:
q_monom, q_coeff = zip(*q.terms())
coeffs, mapping = symmetrize(p_coeff + q_coeff, formal=True)
formulas, values = viete(poly, roots), []
for (sym, _), (_, val) in zip(mapping, formulas):
values.append((sym, val))
for i, (coeff, _) in enumerate(coeffs):
coeffs[i] = coeff.subs(values)
n = len(p_coeff)
p_coeff = coeffs[:n]
q_coeff = coeffs[n:]
if p is not None:
p = Poly(dict(zip(p_monom, p_coeff)), *p.gens).as_expr()
else:
(p,) = p_coeff
if q is not None:
q = Poly(dict(zip(q_monom, q_coeff)), *q.gens).as_expr()
else:
(q,) = q_coeff
return factor(p/q)
def _hashable_content(self):
return (self.poly, self.fun)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, self.fun, self.poly.gen)
@property
def free_symbols(self):
return self.poly.free_symbols | self.fun.free_symbols
@property
def is_commutative(self):
return True
def doit(self, **hints):
if hints.get('roots', True):
return Add(*map(self.fun, self.poly.all_roots()))
else:
return self
def _eval_derivative(self, x):
var, expr = self.fun.args
func = Lambda(var, expr.diff(x))
return self.new(self.poly, func, self.auto)
|
{
"content_hash": "1873e28f63f862738995af0dc08a3529",
"timestamp": "",
"source": "github",
"line_count": 664,
"max_line_length": 101,
"avg_line_length": 28.89909638554217,
"alnum_prop": 0.5471363802178332,
"repo_name": "minrk/sympy",
"id": "a583c79dd3536aa65dd46610c08dea6f6edcabb9",
"size": "19189",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "sympy/polys/rootoftools.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "6803953"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "2734"
}
],
"symlink_target": ""
}
|
"""Auto-generated file, do not edit by hand. CM metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CM = PhoneMetadata(id='CM', country_code=237, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2368]\\d{7,8}', possible_length=(8, 9)),
fixed_line=PhoneNumberDesc(national_number_pattern='2(?:22|33|4[23])\\d{6}', example_number='222123456', possible_length=(9,)),
mobile=PhoneNumberDesc(national_number_pattern='6[5-9]\\d{7}', example_number='671234567', possible_length=(9,)),
toll_free=PhoneNumberDesc(national_number_pattern='88\\d{6}', example_number='88012345', possible_length=(8,)),
number_format=[NumberFormat(pattern='([26])(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4 \\5', leading_digits_pattern=['[26]']),
NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['[23]|88'])])
|
{
"content_hash": "d681f17da53b622bc406136361d745e0",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 145,
"avg_line_length": 96.3,
"alnum_prop": 0.6614745586708204,
"repo_name": "gencer/python-phonenumbers",
"id": "c06d55edc2a71beb9a62445039ca0a232a366a97",
"size": "963",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "python/phonenumbers/data/region_CM.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "23039485"
}
],
"symlink_target": ""
}
|
"""
Computes matrix-matrix products via specialization.
The C configuration inside ctree.cfg should include the -mavx flag in the cflags section.
For example:
[c]
cc = gcc-4.9
cflags = -mavx -O3 -mmacosx-version-min=10.6 -std=c99
This program also requires the current ucb-sejits fork of opentuner:
https://github.com/ucb-sejits/opentuner
"""
import logging
from ctree.nodes import Project
logging.basicConfig(level=60)
import copy
import numpy as np
import ctypes as ct
import inspect
from ctree.c.nodes import *
from ctree.cpp.nodes import CppComment
from ctree.types import *
from ctree.simd.macros import *
from ctree.simd.types import *
from ctree.templates.nodes import StringTemplate
from ctree.transformations import *
from ctree.jit import LazySpecializedFunction, ConcreteSpecializedFunction
from ctree.metrics.watts_up_reader import WattsUpReader
def MultiArrayRef(name, *idxs):
"""
Given a string and a list of ints, produce the chain of
array-ref expressions:
>>> MultiArrayRef('foo', Constant(1), Constant(2), Constant(3)).codegen()
'foo[1][2][3]'
"""
tree = ArrayRef(SymbolRef(name), idxs[0])
for idx in idxs[1:]:
tree = ArrayRef(tree, Constant(idx))
return tree
def dummy_func():
return
class DgemmTranslator(LazySpecializedFunction):
def __init__(self):
self._current_config = None
super(DgemmTranslator, self).__init__(ast.parse(inspect.getsource(dummy_func)), "dgemm")
def get_tuning_driver(self):
from ctree.opentuner.driver import OpenTunerDriver
from opentuner.search.manipulator import ConfigurationManipulator
from opentuner.search.manipulator import IntegerParameter
from opentuner.search.manipulator import PowerOfTwoParameter
from opentuner.search.objective import MinimizeTime, MinimizeEnergy
manip = ConfigurationManipulator()
manip.add_parameter(PowerOfTwoParameter("rx", 1, 8))
manip.add_parameter(PowerOfTwoParameter("ry", 1, 8))
manip.add_parameter(IntegerParameter("cx", 8, 32))
manip.add_parameter(IntegerParameter("cy", 8, 32))
return OpenTunerDriver(manipulator=manip, objective=MinimizeTime())
def args_to_subconfig(self, args):
"""
Analyze arguments and return a 'subconfig', a hashable object
that classifies them. Arguments with identical subconfigs
might be processed by the same generated code.
"""
C, A, B, duration = args
n = len(A)
assert C.shape == A.shape == B.shape == (n, n)
assert A.dtype == B.dtype == C.dtype
return {
'n': n,
'dtype': A.dtype,
}
def _gen_load_c_block(self, rx, ry, lda):
"""
Return a subtree that loads a block of 'c'.
"""
stmts = [CppComment("Load a block of c")]
for j in range(rx):
for i in range(ry/4):
stmt = Assign(MultiArrayRef("c", Constant(i), Constant(j)),
mm256_loadu_pd(Add(SymbolRef("C"), Constant(i*4+j*lda))))
stmts.append(stmt)
return Block(stmts)
def _gen_store_c_block(self, rx, ry, lda):
"""
Return a subtree that loads a block of 'c'.
"""
stmts = [CppComment("Store the c block")]
for j in range(rx):
for i in range(ry/4):
stmt = mm256_storeu_pd(Add(SymbolRef("C"), Constant(i*4+j*lda)),
MultiArrayRef("c", Constant(i), Constant(j)))
stmts.append(stmt)
return Block(stmts)
def _gen_rank1_update(self, i, rx, ry, cx, cy, lda):
stmts = []
for j in range(ry/4):
stmt = Assign(SymbolRef("a%d"%j),
mm256_load_pd( Add(SymbolRef("A"),
Constant(j*4+i*cy)) ))
stmts.append(stmt)
for j in range(rx):
stmt = Assign(SymbolRef("b"),
mm256_set1_pd(ArrayRef(SymbolRef("B"),
Constant(i+j*lda))))
stmts.append(stmt)
for k in range(ry/4):
stmt = Assign(MultiArrayRef("c", Constant(k), Constant(j)),
mm256_add_pd( MultiArrayRef("c", Constant(k), Constant(j)),
mm256_mul_pd(SymbolRef("a%d"%k), SymbolRef("b")) ))
stmts.append(stmt)
return Block(stmts)
def _gen_k_rank1_updates(self, rx, ry, cx, cy, unroll, lda):
stmts = [CppComment("do K rank-1 updates")]
for i in range(ry/4):
stmts.append(SymbolRef("a%d" % i, m256d()))
stmts.append(SymbolRef("b", m256d()))
stmts.extend(self._gen_rank1_update(i, rx, ry, cx, cy, lda) for i in range(unroll))
return Block(stmts)
def transform(self, py_ast, program_config):
"""
Convert the Python AST to a C AST according to the directions
given in program_config.
"""
self._current_config = program_config
arg_config, tuner_config = program_config
n, dtype = arg_config['n'], arg_config['dtype']
rx, ry = tuner_config['rx']*4, tuner_config['ry']*4
cx, cy = tuner_config['cx']*4, tuner_config['cy']*4
unroll = tuner_config['ry']*4
array_type = np.ctypeslib.ndpointer(dtype, 2, (n, n))
A = SymbolRef("A", array_type())
B = SymbolRef("B", array_type())
C = SymbolRef("C", array_type())
N = Constant(n)
RX, RY = Constant(rx), Constant(ry)
CX, CY = Constant(cx), Constant(cy)
UNROLL = Constant(unroll)
template_args = {
"A_decl": A.copy(declare=True),
"B_decl": B.copy(declare=True),
"C_decl": C.copy(declare=True),
"RX": RX,
"RY": RY,
"CX": CX,
"CY": CY,
"UNROLL": UNROLL,
"lda": N,
}
preamble = StringTemplate("""
#include <immintrin.h>
#include <stdio.h>
#define min(x,y) (((x)<(y))?(x):(y))
""", copy.deepcopy(template_args))
reg_template_args = {
'load_c_block': self._gen_load_c_block(rx, ry, n),
'store_c_block': self._gen_store_c_block(rx, ry, n),
'k_rank1_updates': self._gen_k_rank1_updates(rx, ry, cx, cy, unroll, n),
}
reg_template_args.update(copy.deepcopy(template_args))
register_dgemm = StringTemplate("""
void register_dgemm( $A_decl, $B_decl, $C_decl, int K ) {
__m256d c[$RY/4][$RX];
$load_c_block
while ( K >= $UNROLL ) {
$k_rank1_updates
A += $UNROLL*$CY;
B += $UNROLL;
K -= $UNROLL;
}
$store_c_block
}
""", reg_template_args)
fast_dgemm = StringTemplate("""
void fast_dgemm( int M, int N, int K, $A_decl, $B_decl, $C_decl ) {
static double a[$CX*$CY] __attribute__ ((aligned (32)));
// make a local aligned copy of A's block
for( int j = 0; j < K; j++ )
for( int i = 0; i < M; i++ )
a[i+j*$CY] = A[i+j*$lda];
// multiply using the copy
for( int j = 0; j < N; j += $RX )
for( int i = 0; i < M; i += $RY )
register_dgemm( a + i, B + j*$lda, C + i + j*$lda, K );
}""", template_args)
fringe_dgemm = StringTemplate("""
void fringe_dgemm( int M, int N, int K, $A_decl, $B_decl, $C_decl )
{
for( int j = 0; j < N; j++ )
for( int i = 0; i < M; i++ )
for( int k = 0; k < K; k++ )
C[i+j*$lda] += A[i+k*$lda] * B[k+j*$lda];
}
""", copy.deepcopy(template_args))
wall_time = StringTemplate("""
#include <sys/time.h>
double wall_time () {
struct timeval t;
gettimeofday (&t, NULL);
return 1.*t.tv_sec + 1.e-6*t.tv_usec;
}
""", {})
dgemm = StringTemplate("""
int align( int x, int y ) { return x <= y ? x : (x/y)*y; }
void dgemm($C_decl, $A_decl, $B_decl, double *duration) {
double start_time = wall_time();
for( int i = 0; i < $lda; ) {
int I = align( min( $lda-i, $CY ), $RY );
for( int j = 0; j < $lda; ) {
int J = align( $lda-j, $RX );
for( int k = 0; k < $lda; ) {
int K = align( min( $lda-k, $CX ), $UNROLL );
if( (I%$RY) == 0 && (J%$RX) == 0 && (K%$UNROLL) == 0 )
fast_dgemm ( I, J, K, A + i + k*$lda, B + k + j*$lda, C + i + j*$lda );
else
fringe_dgemm( I, J, K, A + i + k*$lda, B + k + j*$lda, C + i + j*$lda );
k += K;
}
j += J;
}
i += I;
}
// report time back for tuner
*duration = wall_time() - start_time;
}
""", copy.deepcopy(template_args))
tree = CFile("generated", [
preamble,
wall_time,
register_dgemm,
fast_dgemm,
fringe_dgemm,
dgemm,
])
c_dgemm = CFile("generated", [tree])
return [c_dgemm]
def finalize(self, transform_result, program_config):
c_dgemm = transform_result[0]
proj = Project([c_dgemm])
arg_config, tuner_config = program_config
n, dtype = arg_config['n'], arg_config['dtype']
array_type = np.ctypeslib.ndpointer(dtype, 2, (n, n))
entry_type = ct.CFUNCTYPE(None, array_type, array_type, array_type, POINTER(c_double))
concrete_Fn = ConcreteDgemm()
return concrete_Fn.finalize("dgemm", proj, entry_type)
class ConcreteDgemm(ConcreteSpecializedFunction):
def finalize(self, entry_point_name, project_node, entry_typesig):
self._c_function = self._compile(entry_point_name, project_node, entry_typesig)
return self
def __call__(self, C, A, B, duration):
return self._c_function(C, A, B, duration)
class SquareDgemm(object):
def __init__(self):
"""Instantiate translator."""
self.c_dgemm = DgemmTranslator()
def __call__(self, A, B):
"""C = A * B"""
C = np.zeros(shape=A.shape, dtype=A.dtype)
meter = Meter()
meter.start_recording()
self.c_dgemm(C, A, B, ct.byref(meter.time_meter))
joules = meter.energy_value()
seconds = meter.time_value()
self.c_dgemm.report(time=seconds, energy=joules)
return C, seconds, joules, self.c_dgemm._current_config
class Meter(object):
def __init__(self, use_energy=False):
self.time_meter = c_double()
self.use_energy = use_energy
self.energy_meter = WattsUpReader() if self.use_energy else None
def start_recording(self):
if self.use_energy:
self.energy_meter.start_recording()
def time_value(self):
return self.time_meter.value
def energy_value(self):
if self.use_energy:
return self.energy_meter.get_recording()[0].joules
else:
return 0.0
def main():
n = 2048
c_dot = SquareDgemm()
A = np.random.rand(n, n)
B = np.random.rand(n, n)
C_expected = np.dot(A.T, B.T)
best_joules = float('inf')
for i in range(1000):
C_actual, seconds, joules, config = c_dot(A, B)
np.testing.assert_almost_equal(C_actual.T, C_expected)
best_indicator = "*** new best ***" if joules < best_joules else ""
best_joules = min(best_joules, joules)
ticks = min(40, int(joules / 10.0))
print("trial %s %s took %f sec, used %s joules: %s %s" %
(str(i).rjust(3), str(config[1]).ljust(38), seconds, str(joules).rjust(5),
('#' * ticks).ljust(40), best_indicator))
del C_actual
print("Done.")
if __name__ == '__main__':
main()
|
{
"content_hash": "6a619158c4d65c47bc4397b94fcb53aa",
"timestamp": "",
"source": "github",
"line_count": 371,
"max_line_length": 100,
"avg_line_length": 33.07277628032345,
"alnum_prop": 0.5247758761206194,
"repo_name": "ucb-sejits/ctree",
"id": "2c99d45d64ca7314a9506ae5d2b1d52ec17d74e1",
"size": "12270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/dgemm.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Mako",
"bytes": "820"
},
{
"name": "Python",
"bytes": "249654"
},
{
"name": "Shell",
"bytes": "1396"
}
],
"symlink_target": ""
}
|
from threaded_messages.models import cached_inbox_count_for
def inbox(request):
if request.user.is_authenticated():
return {'messages_inbox_count': cached_inbox_count_for(request.user)}
else:
return {}
|
{
"content_hash": "85156013690a37e572588d36182c6ea9",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 77,
"avg_line_length": 28.5,
"alnum_prop": 0.6929824561403509,
"repo_name": "GreatBizTools/django-threaded-messages",
"id": "41b7d80bf812279597e3cc75022cf0f2c63f4ef2",
"size": "228",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "threaded_messages/context_processors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2351"
},
{
"name": "HTML",
"bytes": "60255"
},
{
"name": "JavaScript",
"bytes": "41545"
},
{
"name": "Python",
"bytes": "95335"
}
],
"symlink_target": ""
}
|
import os
import seafileapi
import unittest
from contextlib import contextmanager
from tests.utils import randstring
SERVER = os.environ.get('SEAFILE_TEST_SERVER_ADDRESS', 'http://127.0.0.1:8000')
USER = os.environ.get('SEAFILE_TEST_USERNAME', 'test@seafiletest.com')
PASSWORD = os.environ.get('SEAFILE_TEST_PASSWORD', 'testtest')
ADMIN_USER = os.environ.get('SEAFILE_TEST_ADMIN_USERNAME', 'admin@seafiletest.com')
ADMIN_PASSWORD = os.environ.get('SEAFILE_TEST_ADMIN_PASSWORD', 'adminadmin')
def _create_client():
return seafileapi.connect(SERVER, USER, PASSWORD)
class SeafileApiTestCase(unittest.TestCase):
"""Base class for all python-seafile test cases"""
client = _create_client()
def assertHasLen(self, obj, expected_length):
actuallen = len(obj)
msg = 'Expected length is %s, but actual lenght is %s' % (expected_length, actuallen)
self.assertEqual(actuallen, expected_length, msg)
def assertEmpty(self, obj):
self.assertHasLen(obj, 0)
@contextmanager
def create_tmp_repo(self):
repos = self.client.repos
repo_name = 'tmp-测试资料库-%s' % randstring()
repo_desc = 'tmp, 一个测试资料库-%s' % randstring()
repo = repos.create_repo(repo_name, repo_desc)
try:
yield repo
finally:
repo.delete()
|
{
"content_hash": "0be9ffdbc1015d5cc1494e6fbe5c8840",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 93,
"avg_line_length": 33.97435897435897,
"alnum_prop": 0.6784905660377358,
"repo_name": "cloudcopy/python-seafile",
"id": "7e4f8ff96044bb8ef1f51709b50f01c8897ccd0f",
"size": "1365",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "22787"
},
{
"name": "Shell",
"bytes": "1017"
}
],
"symlink_target": ""
}
|
import SimpleITK as sitk
import sys
import os
def command_iteration(method):
print(f"{method.GetOptimizerIteration():3} = {method.GetMetricValue():10.5f}")
if len(sys.argv) < 4:
print("Usage:", sys.argv[0], "<fixedImageFilter> <movingImageFile>",
"<outputTransformFile>")
sys.exit(1)
fixed = sitk.ReadImage(sys.argv[1], sitk.sitkFloat32)
moving = sitk.ReadImage(sys.argv[2], sitk.sitkFloat32)
transformDomainMeshSize = [8] * moving.GetDimension()
tx = sitk.BSplineTransformInitializer(fixed,
transformDomainMeshSize)
print("Initial Parameters:")
print(tx.GetParameters())
R = sitk.ImageRegistrationMethod()
R.SetMetricAsCorrelation()
R.SetOptimizerAsLBFGSB(gradientConvergenceTolerance=1e-5,
numberOfIterations=100,
maximumNumberOfCorrections=5,
maximumNumberOfFunctionEvaluations=1000,
costFunctionConvergenceFactor=1e+7)
R.SetInitialTransform(tx, True)
R.SetInterpolator(sitk.sitkLinear)
R.AddCommand(sitk.sitkIterationEvent, lambda: command_iteration(R))
outTx = R.Execute(fixed, moving)
print("-------")
print(outTx)
print(f"Optimizer stop condition: {R.GetOptimizerStopConditionDescription()}")
print(f" Iteration: {R.GetOptimizerIteration()}")
print(f" Metric value: {R.GetMetricValue()}")
sitk.WriteTransform(outTx, sys.argv[3])
if ("SITK_NOSHOW" not in os.environ):
resampler = sitk.ResampleImageFilter()
resampler.SetReferenceImage(fixed)
resampler.SetInterpolator(sitk.sitkLinear)
resampler.SetDefaultPixelValue(100)
resampler.SetTransform(outTx)
out = resampler.Execute(moving)
simg1 = sitk.Cast(sitk.RescaleIntensity(fixed), sitk.sitkUInt8)
simg2 = sitk.Cast(sitk.RescaleIntensity(out), sitk.sitkUInt8)
cimg = sitk.Compose(simg1, simg2, simg1 // 2. + simg2 // 2.)
sitk.Show(cimg, "ImageRegistration1 Composition")
|
{
"content_hash": "b0b1a7d5f6903ed5595d08b567fcddf1",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 82,
"avg_line_length": 32.233333333333334,
"alnum_prop": 0.704756980351603,
"repo_name": "richardbeare/SimpleITK",
"id": "54c32787d13ab2866d94addc000147d567c02be3",
"size": "2697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Examples/ImageRegistrationMethodBSpline1/ImageRegistrationMethodBSpline1.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "32664"
},
{
"name": "C#",
"bytes": "5324"
},
{
"name": "C++",
"bytes": "1933234"
},
{
"name": "CMake",
"bytes": "265951"
},
{
"name": "CSS",
"bytes": "31103"
},
{
"name": "Dockerfile",
"bytes": "1074"
},
{
"name": "HTML",
"bytes": "3744"
},
{
"name": "Java",
"bytes": "7242"
},
{
"name": "Lua",
"bytes": "25805"
},
{
"name": "Makefile",
"bytes": "145"
},
{
"name": "Python",
"bytes": "199006"
},
{
"name": "R",
"bytes": "54684"
},
{
"name": "SWIG",
"bytes": "2602002"
},
{
"name": "Shell",
"bytes": "109644"
},
{
"name": "Tcl",
"bytes": "3501"
}
],
"symlink_target": ""
}
|
import collections
from unittest import mock
from oslo_serialization import jsonutils
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import uuidutils
from oslo_versionedobjects import base as ovo_base
from nova import context
from nova import exception
from nova.network import model as network_model
from nova import objects
from nova.objects import base
from nova.objects import request_spec
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network_cache_model
from nova.tests.unit import fake_request_spec
from nova.tests.unit.objects import test_objects
class _TestRequestSpecObject(object):
def test_image_meta_from_image_as_object(self):
# Just isolating the test for the from_dict() method
image_meta = objects.ImageMeta(name='foo')
spec = objects.RequestSpec()
spec._image_meta_from_image(image_meta)
self.assertEqual(image_meta, spec.image)
@mock.patch.object(objects.ImageMeta, 'from_dict')
def test_image_meta_from_image_as_dict(self, from_dict):
# Just isolating the test for the from_dict() method
image_meta = objects.ImageMeta(name='foo')
from_dict.return_value = image_meta
spec = objects.RequestSpec()
spec._image_meta_from_image({'name': 'foo'})
self.assertEqual(image_meta, spec.image)
def test_image_meta_from_image_as_none(self):
# just add a dumb check to have a full coverage
spec = objects.RequestSpec()
spec._image_meta_from_image(None)
self.assertIsNone(spec.image)
@mock.patch.object(base, 'obj_to_primitive')
def test_to_legacy_image(self, obj_to_primitive):
spec = objects.RequestSpec(image=objects.ImageMeta())
fake_dict = mock.Mock()
obj_to_primitive.return_value = fake_dict
self.assertEqual(fake_dict, spec._to_legacy_image())
obj_to_primitive.assert_called_once_with(spec.image)
@mock.patch.object(base, 'obj_to_primitive')
def test_to_legacy_image_with_none(self, obj_to_primitive):
spec = objects.RequestSpec(image=None)
self.assertEqual({}, spec._to_legacy_image())
self.assertFalse(obj_to_primitive.called)
def test_from_instance_as_object(self):
instance = objects.Instance()
instance.uuid = uuidutils.generate_uuid()
instance.numa_topology = None
instance.pci_requests = None
instance.project_id = fakes.FAKE_PROJECT_ID
instance.user_id = fakes.FAKE_USER_ID
instance.availability_zone = 'nova'
spec = objects.RequestSpec()
spec._from_instance(instance)
instance_fields = ['numa_topology', 'pci_requests', 'uuid',
'project_id', 'user_id', 'availability_zone']
for field in instance_fields:
if field == 'uuid':
self.assertEqual(getattr(instance, field),
getattr(spec, 'instance_uuid'))
else:
self.assertEqual(getattr(instance, field),
getattr(spec, field))
def test_from_instance_as_dict(self):
instance = dict(uuid=uuidutils.generate_uuid(),
numa_topology=None,
pci_requests=None,
project_id=fakes.FAKE_PROJECT_ID,
user_id=fakes.FAKE_USER_ID,
availability_zone='nova')
spec = objects.RequestSpec()
spec._from_instance(instance)
instance_fields = ['numa_topology', 'pci_requests', 'uuid',
'project_id', 'user_id', 'availability_zone']
for field in instance_fields:
if field == 'uuid':
self.assertEqual(instance.get(field),
getattr(spec, 'instance_uuid'))
else:
self.assertEqual(instance.get(field), getattr(spec, field))
def test_from_instance_with_pci_requests(self):
fake_pci_requests = objects.InstancePCIRequests(
instance_uuid=uuids.instance,
requests=[
objects.InstancePCIRequest(
count=1,
spec=[{'vendor_id': '8086'}],
),
],
)
instance = dict(
uuid=uuids.instance,
root_gb=10,
ephemeral_gb=0,
memory_mb=10,
vcpus=1,
numa_topology=None,
project_id=fakes.FAKE_PROJECT_ID,
user_id=fakes.FAKE_USER_ID,
availability_zone='nova',
pci_requests=fake_pci_requests.obj_to_primitive(),
)
spec = objects.RequestSpec()
spec._from_instance(instance)
self.assertEqual(
fake_pci_requests.requests[0].spec,
spec.pci_requests.requests[0].spec,
)
def test_from_instance_with_numa_stuff(self):
instance = dict(
uuid=uuidutils.generate_uuid(),
root_gb=10,
ephemeral_gb=0,
memory_mb=10,
vcpus=1,
project_id=fakes.FAKE_PROJECT_ID,
user_id=fakes.FAKE_USER_ID,
availability_zone='nova',
pci_requests=None,
numa_topology=fake_request_spec.INSTANCE_NUMA_TOPOLOGY,
)
spec = objects.RequestSpec()
spec._from_instance(instance)
self.assertIsInstance(spec.numa_topology, objects.InstanceNUMATopology)
cells = spec.numa_topology.cells
self.assertEqual(2, len(cells))
self.assertIsInstance(cells[0], objects.InstanceNUMACell)
def test_from_flavor_as_object(self):
flavor = objects.Flavor()
spec = objects.RequestSpec()
spec._from_flavor(flavor)
self.assertEqual(flavor, spec.flavor)
def test_from_flavor_as_dict(self):
flavor_dict = dict(id=1)
ctxt = context.RequestContext('fake', 'fake')
spec = objects.RequestSpec(ctxt)
spec._from_flavor(flavor_dict)
self.assertIsInstance(spec.flavor, objects.Flavor)
self.assertEqual({'id': 1}, spec.flavor.obj_get_changes())
def test_to_legacy_instance(self):
spec = objects.RequestSpec()
spec.flavor = objects.Flavor(root_gb=10,
ephemeral_gb=0,
memory_mb=10,
vcpus=1)
spec.numa_topology = None
spec.pci_requests = None
spec.project_id = fakes.FAKE_PROJECT_ID
spec.user_id = fakes.FAKE_USER_ID
spec.availability_zone = 'nova'
instance = spec._to_legacy_instance()
self.assertEqual({'root_gb': 10,
'ephemeral_gb': 0,
'memory_mb': 10,
'vcpus': 1,
'numa_topology': None,
'pci_requests': None,
'project_id': fakes.FAKE_PROJECT_ID,
'user_id': fakes.FAKE_USER_ID,
'availability_zone': 'nova'}, instance)
def test_to_legacy_instance_with_unset_values(self):
spec = objects.RequestSpec()
self.assertEqual({}, spec._to_legacy_instance())
def test_from_retry(self):
retry_dict = {'num_attempts': 1,
'hosts': [['fake1', 'node1']]}
ctxt = context.RequestContext('fake', 'fake')
spec = objects.RequestSpec(ctxt)
spec._from_retry(retry_dict)
self.assertIsInstance(spec.retry, objects.SchedulerRetries)
self.assertEqual(1, spec.retry.num_attempts)
self.assertIsInstance(spec.retry.hosts, objects.ComputeNodeList)
self.assertEqual(1, len(spec.retry.hosts))
self.assertEqual('fake1', spec.retry.hosts[0].host)
self.assertEqual('node1', spec.retry.hosts[0].hypervisor_hostname)
def test_from_retry_missing_values(self):
retry_dict = {}
ctxt = context.RequestContext('fake', 'fake')
spec = objects.RequestSpec(ctxt)
spec._from_retry(retry_dict)
self.assertIsNone(spec.retry)
def test_populate_group_info(self):
filt_props = {}
filt_props['group_updated'] = True
filt_props['group_policies'] = set(['affinity'])
filt_props['group_hosts'] = set(['fake1'])
filt_props['group_members'] = set(['fake-instance1'])
# Make sure it can handle group uuid not being present.
for group_uuid in (None, uuids.group_uuid):
if group_uuid:
filt_props['group_uuid'] = group_uuid
spec = objects.RequestSpec()
spec._populate_group_info(filt_props)
self.assertIsInstance(spec.instance_group, objects.InstanceGroup)
self.assertEqual('affinity', spec.instance_group.policy)
self.assertEqual(['fake1'], spec.instance_group.hosts)
self.assertEqual(['fake-instance1'], spec.instance_group.members)
if group_uuid:
self.assertEqual(uuids.group_uuid, spec.instance_group.uuid)
def test_populate_group_info_missing_values(self):
filt_props = {}
spec = objects.RequestSpec()
spec._populate_group_info(filt_props)
self.assertIsNone(spec.instance_group)
def test_from_limits(self):
limits_dict = {'numa_topology': None,
'vcpu': 1.0,
'disk_gb': 1.0,
'memory_mb': 1.0}
spec = objects.RequestSpec()
spec._from_limits(limits_dict)
self.assertIsInstance(spec.limits, objects.SchedulerLimits)
self.assertIsNone(spec.limits.numa_topology)
self.assertEqual(1, spec.limits.vcpu)
self.assertEqual(1, spec.limits.disk_gb)
self.assertEqual(1, spec.limits.memory_mb)
def test_from_limits_missing_values(self):
limits_dict = {}
spec = objects.RequestSpec()
spec._from_limits(limits_dict)
self.assertIsInstance(spec.limits, objects.SchedulerLimits)
self.assertIsNone(spec.limits.numa_topology)
self.assertIsNone(spec.limits.vcpu)
self.assertIsNone(spec.limits.disk_gb)
self.assertIsNone(spec.limits.memory_mb)
def test_from_hints(self):
hints_dict = {'foo_str': '1',
'bar_list': ['2']}
spec = objects.RequestSpec()
spec._from_hints(hints_dict)
expected = {'foo_str': ['1'],
'bar_list': ['2']}
self.assertEqual(expected, spec.scheduler_hints)
def test_from_hints_with_no_hints(self):
spec = objects.RequestSpec()
spec._from_hints(None)
self.assertIsNone(spec.scheduler_hints)
@mock.patch.object(objects.SchedulerLimits, 'from_dict')
def test_from_primitives(self, mock_limits):
spec_dict = {'instance_type': objects.Flavor(),
'instance_properties': objects.Instance(
uuid=uuidutils.generate_uuid(),
numa_topology=None,
pci_requests=None,
project_id=1,
user_id=2,
availability_zone='nova')}
filt_props = {}
# We seriously don't care about the return values, we just want to make
# sure that all the fields are set
mock_limits.return_value = None
ctxt = context.RequestContext('fake', 'fake')
spec = objects.RequestSpec.from_primitives(ctxt, spec_dict, filt_props)
mock_limits.assert_called_once_with({})
# Make sure that all fields are set using that helper method
skip = ['id', 'security_groups', 'network_metadata', 'is_bfv',
'request_level_params', 'requested_networks']
for field in [f for f in spec.obj_fields if f not in skip]:
self.assertTrue(spec.obj_attr_is_set(field),
'Field: %s is not set' % field)
# just making sure that the context is set by the method
self.assertEqual(ctxt, spec._context)
def test_from_primitives_with_requested_destination(self):
destination = objects.Destination(host='foo')
spec_dict = {}
filt_props = {'requested_destination': destination}
ctxt = context.RequestContext('fake', 'fake')
spec = objects.RequestSpec.from_primitives(ctxt, spec_dict, filt_props)
self.assertEqual(destination, spec.requested_destination)
def test_from_components(self):
ctxt = context.RequestContext('fake-user', 'fake-project')
destination = objects.Destination(host='foo')
self.assertFalse(destination.allow_cross_cell_move)
instance = fake_instance.fake_instance_obj(ctxt)
image = {'id': uuids.image_id, 'properties': {'mappings': []},
'status': 'fake-status', 'location': 'far-away'}
flavor = fake_flavor.fake_flavor_obj(ctxt)
filter_properties = {'requested_destination': destination}
instance_group = None
spec = objects.RequestSpec.from_components(ctxt, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, instance_group, instance.availability_zone,
objects.SecurityGroupList())
# Make sure that all fields are set using that helper method
skip = ['id', 'network_metadata', 'is_bfv', 'request_level_params',
'requested_networks']
for field in [f for f in spec.obj_fields if f not in skip]:
self.assertTrue(spec.obj_attr_is_set(field),
'Field: %s is not set' % field)
# just making sure that the context is set by the method
self.assertEqual(ctxt, spec._context)
self.assertEqual(destination, spec.requested_destination)
self.assertFalse(spec.requested_destination.allow_cross_cell_move)
@mock.patch('nova.objects.RequestSpec._populate_group_info')
def test_from_components_with_instance_group(self, mock_pgi):
# This test makes sure that we don't overwrite instance group passed
# to from_components
ctxt = context.RequestContext('fake-user', 'fake-project')
instance = fake_instance.fake_instance_obj(ctxt)
image = {'id': uuids.image_id, 'properties': {'mappings': []},
'status': 'fake-status', 'location': 'far-away'}
flavor = fake_flavor.fake_flavor_obj(ctxt)
filter_properties = {'fake': 'property'}
instance_group = objects.InstanceGroup()
objects.RequestSpec.from_components(ctxt, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, instance_group, instance.availability_zone)
self.assertFalse(mock_pgi.called)
@mock.patch('nova.objects.RequestSpec._populate_group_info')
def test_from_components_without_instance_group(self, mock_pgi):
# This test makes sure that we populate instance group if not
# present
ctxt = context.RequestContext(fakes.FAKE_USER_ID,
fakes.FAKE_PROJECT_ID)
instance = fake_instance.fake_instance_obj(ctxt)
image = {'id': uuids.image_id, 'properties': {'mappings': []},
'status': 'fake-status', 'location': 'far-away'}
flavor = fake_flavor.fake_flavor_obj(ctxt)
filter_properties = {'fake': 'property'}
objects.RequestSpec.from_components(ctxt, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, None, instance.availability_zone)
mock_pgi.assert_called_once_with(filter_properties)
@mock.patch('nova.objects.RequestSpec._populate_group_info')
def test_from_components_without_security_groups(self, mock_pgi):
# This test makes sure that we populate instance group if not
# present
ctxt = context.RequestContext(fakes.FAKE_USER_ID,
fakes.FAKE_PROJECT_ID)
instance = fake_instance.fake_instance_obj(ctxt)
image = {'id': uuids.image_id, 'properties': {'mappings': []},
'status': 'fake-status', 'location': 'far-away'}
flavor = fake_flavor.fake_flavor_obj(ctxt)
filter_properties = {'fake': 'property'}
spec = objects.RequestSpec.from_components(ctxt, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, None, instance.availability_zone)
self.assertNotIn('security_groups', spec)
def test_from_components_with_port_resource_request(self, ):
ctxt = context.RequestContext(fakes.FAKE_USER_ID,
fakes.FAKE_PROJECT_ID)
instance = fake_instance.fake_instance_obj(ctxt)
image = {'id': uuids.image_id, 'properties': {'mappings': []},
'status': 'fake-status', 'location': 'far-away'}
flavor = fake_flavor.fake_flavor_obj(ctxt)
filter_properties = {'fake': 'property'}
rg = request_spec.RequestGroup()
req_lvl_params = request_spec.RequestLevelParams()
spec = objects.RequestSpec.from_components(
ctxt, instance.uuid, image,
flavor, instance.numa_topology, instance.pci_requests,
filter_properties, None, instance.availability_zone,
port_resource_requests=[rg], request_level_params=req_lvl_params
)
self.assertListEqual([rg], spec.requested_resources)
self.assertEqual(req_lvl_params, spec.request_level_params)
def test_get_scheduler_hint(self):
spec_obj = objects.RequestSpec(scheduler_hints={'foo_single': ['1'],
'foo_mul': ['1', '2']})
self.assertEqual('1', spec_obj.get_scheduler_hint('foo_single'))
self.assertEqual(['1', '2'], spec_obj.get_scheduler_hint('foo_mul'))
self.assertIsNone(spec_obj.get_scheduler_hint('oops'))
self.assertEqual('bar', spec_obj.get_scheduler_hint('oops',
default='bar'))
def test_get_scheduler_hint_with_no_hints(self):
spec_obj = objects.RequestSpec()
self.assertEqual('bar', spec_obj.get_scheduler_hint('oops',
default='bar'))
@mock.patch.object(objects.RequestSpec, '_to_legacy_instance')
@mock.patch.object(base, 'obj_to_primitive')
def test_to_legacy_request_spec_dict(self, image_to_primitive,
spec_to_legacy_instance):
fake_image_dict = mock.Mock()
image_to_primitive.return_value = fake_image_dict
fake_instance = {'root_gb': 1.0,
'ephemeral_gb': 1.0,
'memory_mb': 1.0,
'vcpus': 1,
'numa_topology': None,
'pci_requests': None,
'project_id': fakes.FAKE_PROJECT_ID,
'availability_zone': 'nova',
'uuid': '1'}
spec_to_legacy_instance.return_value = fake_instance
fake_flavor = objects.Flavor(root_gb=10,
ephemeral_gb=0,
memory_mb=512,
vcpus=1)
spec = objects.RequestSpec(num_instances=1,
image=objects.ImageMeta(),
# instance properties
numa_topology=None,
pci_requests=None,
project_id=1,
availability_zone='nova',
instance_uuid=uuids.instance,
flavor=fake_flavor)
spec_dict = spec.to_legacy_request_spec_dict()
expected = {'num_instances': 1,
'image': fake_image_dict,
'instance_properties': fake_instance,
'instance_type': fake_flavor}
self.assertEqual(expected, spec_dict)
def test_to_legacy_request_spec_dict_with_unset_values(self):
spec = objects.RequestSpec()
self.assertEqual({'num_instances': 1,
'image': {},
'instance_properties': {},
'instance_type': {}},
spec.to_legacy_request_spec_dict())
def test_to_legacy_filter_properties_dict(self):
fake_numa_limits = objects.NUMATopologyLimits()
fake_computes_obj = objects.ComputeNodeList(
objects=[objects.ComputeNode(host='fake1',
hypervisor_hostname='node1')])
fake_dest = objects.Destination(host='fakehost')
spec = objects.RequestSpec(
ignore_hosts=['ignoredhost'],
force_hosts=['fakehost'],
force_nodes=['fakenode'],
retry=objects.SchedulerRetries(num_attempts=1,
hosts=fake_computes_obj),
limits=objects.SchedulerLimits(numa_topology=fake_numa_limits,
vcpu=1.0,
disk_gb=10.0,
memory_mb=8192.0),
instance_group=objects.InstanceGroup(hosts=['fake1'],
policy='affinity',
members=['inst1', 'inst2'],
uuid=uuids.group_uuid),
scheduler_hints={'foo': ['bar']},
requested_destination=fake_dest)
expected = {'ignore_hosts': ['ignoredhost'],
'force_hosts': ['fakehost'],
'force_nodes': ['fakenode'],
'retry': {'num_attempts': 1,
'hosts': [['fake1', 'node1']]},
'limits': {'numa_topology': fake_numa_limits,
'vcpu': 1.0,
'disk_gb': 10.0,
'memory_mb': 8192.0},
'group_updated': True,
'group_hosts': set(['fake1']),
'group_policies': set(['affinity']),
'group_members': set(['inst1', 'inst2']),
'group_uuid': uuids.group_uuid,
'scheduler_hints': {'foo': 'bar'},
'requested_destination': fake_dest}
self.assertEqual(expected, spec.to_legacy_filter_properties_dict())
def test_to_legacy_filter_properties_dict_with_nullable_values(self):
spec = objects.RequestSpec(force_hosts=None,
force_nodes=None,
retry=None,
limits=None,
instance_group=None,
scheduler_hints=None)
self.assertEqual({}, spec.to_legacy_filter_properties_dict())
def test_to_legacy_filter_properties_dict_with_unset_values(self):
spec = objects.RequestSpec()
self.assertEqual({}, spec.to_legacy_filter_properties_dict())
def test_ensure_network_information(self):
network_a = fake_network_cache_model.new_network({
'physical_network': 'foo', 'tunneled': False})
vif_a = fake_network_cache_model.new_vif({'network': network_a})
network_b = fake_network_cache_model.new_network({
'physical_network': 'foo', 'tunneled': False})
vif_b = fake_network_cache_model.new_vif({'network': network_b})
network_c = fake_network_cache_model.new_network({
'physical_network': 'bar', 'tunneled': False})
vif_c = fake_network_cache_model.new_vif({'network': network_c})
network_d = fake_network_cache_model.new_network({
'physical_network': None, 'tunneled': True})
vif_d = fake_network_cache_model.new_vif({'network': network_d})
nw_info = network_model.NetworkInfo([vif_a, vif_b, vif_c, vif_d])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid=uuids.instance)
instance = objects.Instance(id=3, uuid=uuids.instance,
info_cache=info_cache)
spec = objects.RequestSpec()
self.assertNotIn('network_metadata', spec)
self.assertNotIn('requested_networks', spec)
spec.ensure_network_information(instance)
self.assertIn('network_metadata', spec)
self.assertIsInstance(spec.network_metadata, objects.NetworkMetadata)
self.assertEqual(spec.network_metadata.physnets, set(['foo', 'bar']))
self.assertTrue(spec.network_metadata.tunneled)
self.assertEqual(4, len(spec.requested_networks))
for idx, reqnet in enumerate(spec.requested_networks):
self.assertEqual(nw_info[idx]['network']['id'], reqnet.network_id)
self.assertEqual(nw_info[idx]['id'], reqnet.port_id)
def test_ensure_network_information_missing(self):
nw_info = network_model.NetworkInfo([])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid=uuids.instance)
instance = objects.Instance(id=3, uuid=uuids.instance,
info_cache=info_cache)
spec = objects.RequestSpec()
self.assertNotIn('network_metadata', spec)
self.assertNotIn('requested_networks', spec)
spec.ensure_network_information(instance)
self.assertNotIn('network_metadata', spec)
self.assertNotIn('requested_networks', spec)
@mock.patch.object(request_spec.RequestSpec,
'_get_by_instance_uuid_from_db')
@mock.patch('nova.objects.InstanceGroup.get_by_uuid')
def test_get_by_instance_uuid(self, mock_get_ig, get_by_uuid):
fake_spec = fake_request_spec.fake_db_spec()
get_by_uuid.return_value = fake_spec
mock_get_ig.return_value = objects.InstanceGroup(name='fresh')
req_obj = request_spec.RequestSpec.get_by_instance_uuid(self.context,
fake_spec['instance_uuid'])
self.assertEqual(1, req_obj.num_instances)
# ignore_hosts is not persisted
self.assertIsNone(req_obj.ignore_hosts)
self.assertEqual('fake', req_obj.project_id)
self.assertEqual({'hint': ['over-there']}, req_obj.scheduler_hints)
self.assertEqual(['host1', 'host3'], req_obj.force_hosts)
self.assertIsNone(req_obj.availability_zone)
self.assertEqual(['node1', 'node2'], req_obj.force_nodes)
self.assertIsInstance(req_obj.image, objects.ImageMeta)
self.assertIsInstance(req_obj.numa_topology,
objects.InstanceNUMATopology)
self.assertIsInstance(req_obj.pci_requests,
objects.InstancePCIRequests)
self.assertIsInstance(req_obj.flavor, objects.Flavor)
# The 'retry' field is not persistent.
self.assertIsNone(req_obj.retry)
self.assertIsInstance(req_obj.limits, objects.SchedulerLimits)
self.assertIsInstance(req_obj.instance_group, objects.InstanceGroup)
self.assertEqual('fresh', req_obj.instance_group.name)
@mock.patch.object(
request_spec.RequestSpec, '_get_by_instance_uuid_from_db'
)
@mock.patch('nova.objects.InstanceGroup.get_by_uuid')
def test_get_by_instance_uuid_deleted_group(
self, mock_get_ig, get_by_uuid
):
fake_spec_obj = fake_request_spec.fake_spec_obj()
fake_spec_obj.scheduler_hints['group'] = ['fresh']
fake_spec = fake_request_spec.fake_db_spec(fake_spec_obj)
get_by_uuid.return_value = fake_spec
mock_get_ig.side_effect = exception.InstanceGroupNotFound(
group_uuid=uuids.instgroup
)
req_obj = request_spec.RequestSpec.get_by_instance_uuid(
self.context, fake_spec['instance_uuid']
)
# assert that both the instance_group object and scheduler hint
# are cleared if the instance_group was deleted since the request
# spec was last saved to the db.
self.assertIsNone(req_obj.instance_group, objects.InstanceGroup)
self.assertEqual({'hint': ['over-there']}, req_obj.scheduler_hints)
@mock.patch('nova.objects.request_spec.RequestSpec.save')
@mock.patch.object(
request_spec.RequestSpec, '_get_by_instance_uuid_from_db')
@mock.patch('nova.objects.InstanceGroup.get_by_uuid')
def test_get_by_instance_uuid_numa_topology_migration(
self, mock_get_ig, get_by_uuid, mock_save
):
# Simulate a pre-Victoria RequestSpec where the pcpuset field is not
# defined for the embedded InstanceNUMACell objects but the cpu_policy
# is dedicated meaning that cores in cpuset defines pinned cpus. So
# in Victoria or later these InstanceNUMACell objects should be
# translated to hold the cores in the pcpuset field instead.
numa_topology = objects.InstanceNUMATopology(
instance_uuid=uuids.instance_uuid,
cells=[
objects.InstanceNUMACell(
id=0, cpuset={1, 2}, memory=512, cpu_policy="dedicated"),
objects.InstanceNUMACell(
id=1, cpuset={3, 4}, memory=512, cpu_policy="dedicated"),
]
)
spec_obj = fake_request_spec.fake_spec_obj()
spec_obj.numa_topology = numa_topology
fake_spec = fake_request_spec.fake_db_spec(spec_obj)
fake_spec['instance_uuid'] = uuids.instance_uuid
get_by_uuid.return_value = fake_spec
mock_get_ig.return_value = objects.InstanceGroup(name='fresh')
req_obj = request_spec.RequestSpec.get_by_instance_uuid(
self.context, fake_spec['instance_uuid'])
self.assertEqual(2, len(req_obj.numa_topology.cells))
self.assertEqual({1, 2}, req_obj.numa_topology.cells[0].pcpuset)
self.assertEqual({3, 4}, req_obj.numa_topology.cells[1].pcpuset)
mock_save.assert_called_once()
def _check_update_primitive(self, req_obj, changes):
self.assertEqual(req_obj.instance_uuid, changes['instance_uuid'])
serialized_obj = objects.RequestSpec.obj_from_primitive(
jsonutils.loads(changes['spec']))
# primitive fields
for field in ['instance_uuid', 'num_instances',
'project_id', 'scheduler_hints', 'force_hosts',
'availability_zone', 'force_nodes']:
self.assertEqual(getattr(req_obj, field),
getattr(serialized_obj, field))
# object fields
for field in ['image', 'numa_topology', 'pci_requests', 'flavor',
'limits', 'network_metadata']:
self.assertEqual(
getattr(req_obj, field).obj_to_primitive(),
getattr(serialized_obj, field).obj_to_primitive())
self.assertIsNone(serialized_obj.instance_group.members)
self.assertIsNone(serialized_obj.instance_group.hosts)
self.assertIsNone(serialized_obj.retry)
self.assertIsNone(serialized_obj.requested_destination)
self.assertIsNone(serialized_obj.ignore_hosts)
def test_create(self):
req_obj = fake_request_spec.fake_spec_obj(remove_id=True)
def _test_create_args(self2, context, changes):
self._check_update_primitive(req_obj, changes)
# DB creation would have set an id
changes['id'] = 42
return changes
with mock.patch.object(request_spec.RequestSpec, '_create_in_db',
_test_create_args):
req_obj.create()
def test_create_id_set(self):
req_obj = request_spec.RequestSpec(self.context)
req_obj.id = 3
self.assertRaises(exception.ObjectActionError, req_obj.create)
def test_create_does_not_persist_requested_fields(self):
req_obj = fake_request_spec.fake_spec_obj(remove_id=True)
expected_network_metadata = objects.NetworkMetadata(
physnets=set(['foo', 'bar']), tunneled=True)
req_obj.network_metadata = expected_network_metadata
expected_destination = request_spec.Destination(host='sample-host')
req_obj.requested_destination = expected_destination
rg = request_spec.RequestGroup(resources={'fake-rc': 13})
req_obj.requested_resources = [rg]
expected_retry = objects.SchedulerRetries(
num_attempts=2,
hosts=objects.ComputeNodeList(objects=[
objects.ComputeNode(host='host1', hypervisor_hostname='node1'),
objects.ComputeNode(host='host2', hypervisor_hostname='node2'),
]))
req_obj.retry = expected_retry
nr = objects.NetworkRequest()
req_obj.requested_networks = objects.NetworkRequestList(objects=[nr])
req_lvl_params = objects.RequestLevelParams(
root_required={"CUSTOM_FOO"})
req_obj.request_level_params = req_lvl_params
orig_create_in_db = request_spec.RequestSpec._create_in_db
with mock.patch.object(request_spec.RequestSpec, '_create_in_db') \
as mock_create_in_db:
mock_create_in_db.side_effect = orig_create_in_db
req_obj.create()
mock_create_in_db.assert_called_once()
updates = mock_create_in_db.mock_calls[0][1][1]
# assert that the following fields are not stored in the db
# 1. network_metadata
# 2. requested_destination
# 3. requested_resources
# 4. retry
# 5. requested_networks
# 6. request_level_params
data = jsonutils.loads(updates['spec'])['nova_object.data']
self.assertNotIn('network_metadata', data)
self.assertIsNone(data['requested_destination'])
self.assertIsNone(data['requested_resources'])
self.assertIsNone(data['retry'])
self.assertNotIn('requested_networks', data)
self.assertNotIn('request_level_params', data)
self.assertIsNotNone(data['instance_uuid'])
# also we expect that the following fields are not reset after create
# 1. network_metadata
# 2. requested_destination
# 3. requested_resources
# 4. retry
# 5. requested_networks
# 6. request_level_params
self.assertIsNotNone(req_obj.network_metadata)
self.assertJsonEqual(expected_network_metadata.obj_to_primitive(),
req_obj.network_metadata.obj_to_primitive())
self.assertIsNotNone(req_obj.requested_destination)
self.assertJsonEqual(expected_destination.obj_to_primitive(),
req_obj.requested_destination.obj_to_primitive())
self.assertIsNotNone(req_obj.requested_resources)
self.assertEqual(
13, req_obj.requested_resources[0].resources['fake-rc'])
self.assertIsNotNone(req_obj.retry)
self.assertJsonEqual(expected_retry.obj_to_primitive(),
req_obj.retry.obj_to_primitive())
self.assertIsNotNone(req_obj.requested_networks)
self.assertJsonEqual(nr.obj_to_primitive(),
req_obj.requested_networks[0].obj_to_primitive())
self.assertIsNotNone(req_obj.request_level_params)
self.assertJsonEqual(
req_lvl_params.obj_to_primitive(),
req_obj.request_level_params.obj_to_primitive())
def test_save_does_not_persist_requested_fields(self):
req_obj = fake_request_spec.fake_spec_obj(remove_id=True)
req_obj.create()
# change something to make sure _save_in_db is called
expected_network_metadata = objects.NetworkMetadata(
physnets=set(['foo', 'bar']), tunneled=True)
req_obj.network_metadata = expected_network_metadata
expected_destination = request_spec.Destination(host='sample-host')
req_obj.requested_destination = expected_destination
rg = request_spec.RequestGroup(resources={'fake-rc': 13})
req_obj.requested_resources = [rg]
expected_retry = objects.SchedulerRetries(
num_attempts=2,
hosts=objects.ComputeNodeList(objects=[
objects.ComputeNode(host='host1', hypervisor_hostname='node1'),
objects.ComputeNode(host='host2', hypervisor_hostname='node2'),
]))
req_obj.retry = expected_retry
req_obj.num_instances = 2
req_obj.ignore_hosts = [uuids.ignored_host]
nr = objects.NetworkRequest()
req_obj.requested_networks = objects.NetworkRequestList(objects=[nr])
orig_save_in_db = request_spec.RequestSpec._save_in_db
with mock.patch.object(request_spec.RequestSpec, '_save_in_db') \
as mock_save_in_db:
mock_save_in_db.side_effect = orig_save_in_db
req_obj.save()
mock_save_in_db.assert_called_once()
updates = mock_save_in_db.mock_calls[0][1][2]
# assert that the following fields are not stored in the db
# 1. network_metadata
# 2. requested_destination
# 3. requested_resources
# 4. retry
# 5. ignore_hosts
# 6. requested_networks
data = jsonutils.loads(updates['spec'])['nova_object.data']
self.assertNotIn('network_metadata', data)
self.assertIsNone(data['requested_destination'])
self.assertIsNone(data['requested_resources'])
self.assertIsNone(data['retry'])
self.assertIsNone(data['ignore_hosts'])
self.assertIsNotNone(data['instance_uuid'])
self.assertNotIn('requested_networks', data)
# also we expect that the following fields are not reset after save
# 1. network_metadata
# 2. requested_destination
# 3. requested_resources
# 4. retry
# 5. ignore_hosts
# 6. requested_networks
self.assertIsNotNone(req_obj.network_metadata)
self.assertJsonEqual(expected_network_metadata.obj_to_primitive(),
req_obj.network_metadata.obj_to_primitive())
self.assertIsNotNone(req_obj.requested_destination)
self.assertJsonEqual(expected_destination.obj_to_primitive(),
req_obj.requested_destination.obj_to_primitive())
self.assertIsNotNone(req_obj.requested_resources)
self.assertEqual(13, req_obj.requested_resources[0].resources
['fake-rc'])
self.assertIsNotNone(req_obj.retry)
self.assertJsonEqual(expected_retry.obj_to_primitive(),
req_obj.retry.obj_to_primitive())
self.assertIsNotNone(req_obj.ignore_hosts)
self.assertEqual([uuids.ignored_host], req_obj.ignore_hosts)
self.assertIsNotNone(req_obj.requested_networks)
self.assertJsonEqual(nr.obj_to_primitive(),
req_obj.requested_networks[0].obj_to_primitive())
def test_save(self):
req_obj = fake_request_spec.fake_spec_obj()
# Make sure the requested_destination is not persisted since it is
# only valid per request/operation.
req_obj.requested_destination = objects.Destination(host='fake')
def _test_save_args(self2, context, instance_uuid, changes):
self._check_update_primitive(req_obj, changes)
# DB creation would have set an id
changes['id'] = 42
return changes
with mock.patch.object(request_spec.RequestSpec, '_save_in_db',
_test_save_args):
req_obj.save()
@mock.patch.object(request_spec.RequestSpec, '_destroy_in_db')
def test_destroy(self, destroy_in_db):
req_obj = fake_request_spec.fake_spec_obj()
req_obj.destroy()
destroy_in_db.assert_called_once_with(req_obj._context,
req_obj.instance_uuid)
@mock.patch.object(request_spec.RequestSpec, '_destroy_bulk_in_db')
def test_destroy_bulk(self, destroy_bulk_in_db):
uuids_to_be_deleted = []
for i in range(0, 5):
uuid = uuidutils.generate_uuid()
uuids_to_be_deleted.append(uuid)
destroy_bulk_in_db.return_value = 5
result = objects.RequestSpec.destroy_bulk(self.context,
uuids_to_be_deleted)
destroy_bulk_in_db.assert_called_once_with(self.context,
uuids_to_be_deleted)
self.assertEqual(5, result)
def test_reset_forced_destinations(self):
req_obj = fake_request_spec.fake_spec_obj()
# Making sure the fake object has forced hosts and nodes
self.assertIsNotNone(req_obj.force_hosts)
self.assertIsNotNone(req_obj.force_nodes)
with mock.patch.object(req_obj, 'obj_reset_changes') as mock_reset:
req_obj.reset_forced_destinations()
self.assertIsNone(req_obj.force_hosts)
self.assertIsNone(req_obj.force_nodes)
mock_reset.assert_called_once_with(['force_hosts', 'force_nodes'])
def test_compat_requested_destination(self):
req_obj = objects.RequestSpec(
requested_destination=objects.Destination())
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.5',
version_manifest=versions)
self.assertNotIn(
'requested_destination', primitive['nova_object.data'])
primitive = req_obj.obj_to_primitive(target_version='1.6',
version_manifest=versions)
self.assertIn('requested_destination', primitive['nova_object.data'])
def test_compat_security_groups(self):
sgl = objects.SecurityGroupList(objects=[])
req_obj = objects.RequestSpec(security_groups=sgl)
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.7',
version_manifest=versions)
self.assertNotIn('security_groups', primitive['nova_object.data'])
primitive = req_obj.obj_to_primitive(target_version='1.8',
version_manifest=versions)
self.assertIn('security_groups', primitive['nova_object.data'])
def test_compat_user_id(self):
req_obj = objects.RequestSpec(project_id=fakes.FAKE_PROJECT_ID,
user_id=fakes.FAKE_USER_ID)
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.8',
version_manifest=versions)
primitive = primitive['nova_object.data']
self.assertNotIn('user_id', primitive)
self.assertIn('project_id', primitive)
def test_compat_network_metadata(self):
network_metadata = objects.NetworkMetadata(physnets=set(),
tunneled=False)
req_obj = objects.RequestSpec(network_metadata=network_metadata,
user_id=fakes.FAKE_USER_ID)
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.9',
version_manifest=versions)
primitive = primitive['nova_object.data']
self.assertNotIn('network_metadata', primitive)
self.assertIn('user_id', primitive)
def test_compat_requested_resources(self):
req_obj = objects.RequestSpec(requested_resources=[],
instance_uuid=uuids.instance)
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.11',
version_manifest=versions)
primitive = primitive['nova_object.data']
self.assertNotIn('requested_resources', primitive)
self.assertIn('instance_uuid', primitive)
def test_compat_requested_networks(self):
req_obj = objects.RequestSpec(
requested_networks=objects.NetworkRequestList(objects=[]),
instance_uuid=uuids.instance)
versions = ovo_base.obj_tree_get_versions('RequestSpec')
primitive = req_obj.obj_to_primitive(target_version='1.13',
version_manifest=versions)
primitive = primitive['nova_object.data']
self.assertNotIn('requested_networks', primitive)
self.assertIn('instance_uuid', primitive)
def test_default_requested_destination(self):
req_obj = objects.RequestSpec()
self.assertIsNone(req_obj.requested_destination)
def test_security_groups_load(self):
req_obj = objects.RequestSpec()
self.assertNotIn('security_groups', req_obj)
self.assertIsInstance(req_obj.security_groups,
objects.SecurityGroupList)
self.assertIn('security_groups', req_obj)
def test_network_requests_load(self):
req_obj = objects.RequestSpec()
self.assertNotIn('network_metadata', req_obj)
self.assertIsInstance(req_obj.network_metadata,
objects.NetworkMetadata)
self.assertIn('network_metadata', req_obj)
def test_requested_networks_load(self):
req_obj = objects.RequestSpec()
self.assertNotIn('requested_networks', req_obj)
self.assertIsInstance(req_obj.requested_networks,
objects.NetworkRequestList)
self.assertIn('requested_networks', req_obj)
def test_create_raises_on_unchanged_object(self):
ctxt = context.RequestContext(uuids.user_id, uuids.project_id)
req_obj = request_spec.RequestSpec(context=ctxt)
self.assertRaises(exception.ObjectActionError, req_obj.create)
def test_save_can_be_called_on_unchanged_object(self):
req_obj = fake_request_spec.fake_spec_obj(remove_id=True)
req_obj.create()
req_obj.save()
def test_get_request_group_mapping_no_request(self):
req_obj = request_spec.RequestSpec()
self.assertIsNone(req_obj.get_request_group_mapping())
def test_get_request_group_mapping(self):
req_obj = request_spec.RequestSpec(
requested_resources=[
request_spec.RequestGroup(
requester_id='requester1',
provider_uuids=[uuids.pr1, uuids.pr2]),
request_spec.RequestGroup(
requester_id='requester2',
provider_uuids=[]),
])
self.assertEqual(
{'requester1': [uuids.pr1, uuids.pr2],
'requester2': []},
req_obj.get_request_group_mapping())
class TestRequestSpecObject(test_objects._LocalTest,
_TestRequestSpecObject):
pass
class TestRemoteRequestSpecObject(test_objects._RemoteTest,
_TestRequestSpecObject):
pass
class TestRequestGroupObject(test.NoDBTestCase):
def setUp(self):
super(TestRequestGroupObject, self).setUp()
self.user_id = uuids.user_id
self.project_id = uuids.project_id
self.context = context.RequestContext(uuids.user_id, uuids.project_id)
def test_fields_defaulted_at_create(self):
rg = request_spec.RequestGroup(self.context)
self.assertTrue(rg.use_same_provider)
self.assertEqual({}, rg.resources)
self.assertEqual(set(), rg.required_traits)
self.assertEqual(set(), rg.forbidden_traits)
self.assertEqual([], rg.aggregates)
self.assertIsNone(rg.requester_id)
self.assertEqual([], rg.provider_uuids)
self.assertIsNone(rg.in_tree)
def test_from_port_request(self):
port_resource_request = {
"resources": {
"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000},
"required": ["CUSTOM_PHYSNET_2",
"CUSTOM_VNIC_TYPE_NORMAL"]
}
rg = request_spec.RequestGroup.from_port_request(
self.context, uuids.port_id, port_resource_request)
self.assertTrue(rg.use_same_provider)
self.assertEqual(
{"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000},
rg.resources)
self.assertEqual({"CUSTOM_PHYSNET_2", "CUSTOM_VNIC_TYPE_NORMAL"},
rg.required_traits)
self.assertEqual(uuids.port_id, rg.requester_id)
# and the rest is defaulted
self.assertEqual(set(), rg.forbidden_traits)
self.assertEqual([], rg.aggregates)
self.assertEqual([], rg.provider_uuids)
def test_from_port_request_without_traits(self):
port_resource_request = {
"resources": {
"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000}}
rg = request_spec.RequestGroup.from_port_request(
self.context, uuids.port_id, port_resource_request)
self.assertTrue(rg.use_same_provider)
self.assertEqual(
{"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000},
rg.resources)
self.assertEqual(uuids.port_id, rg.requester_id)
# and the rest is defaulted
self.assertEqual(set(), rg.required_traits)
self.assertEqual(set(), rg.forbidden_traits)
self.assertEqual([], rg.aggregates)
self.assertEqual([], rg.provider_uuids)
def test_from_extended_port_request(self):
port_resource_request = {
"request_groups": [
{
"id": uuids.group_id1,
"resources": {
"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000},
"required": [
"CUSTOM_PHYSNET2", "CUSTOM_VNIC_TYPE_NORMAL"]
},
{
"id": uuids.group_id2,
"resources": {
"NET_PACKET_RATE_KILOPACKET_PER_SEC": 1000
},
"required": ["CUSTOM_VNIC_TYPE_NORMAL"]
}
],
"same_subtree": [
uuids.group_id1,
uuids.group_id2,
],
}
rgs = request_spec.RequestGroup.from_extended_port_request(
self.context, port_resource_request)
# two separate groups are returned
self.assertEqual(2, len(rgs))
self.assertTrue(rgs[0].use_same_provider)
self.assertEqual(
{"NET_BW_IGR_KILOBIT_PER_SEC": 1000,
"NET_BW_EGR_KILOBIT_PER_SEC": 1000},
rgs[0].resources)
self.assertEqual(
{"CUSTOM_PHYSNET2", "CUSTOM_VNIC_TYPE_NORMAL"},
rgs[0].required_traits)
self.assertEqual(uuids.group_id1, rgs[0].requester_id)
# and the rest is defaulted
self.assertEqual(set(), rgs[0].forbidden_traits)
self.assertEqual([], rgs[0].aggregates)
self.assertEqual([], rgs[0].provider_uuids)
self.assertTrue(rgs[1].use_same_provider)
self.assertEqual(
{"NET_PACKET_RATE_KILOPACKET_PER_SEC": 1000},
rgs[1].resources)
self.assertEqual(
{"CUSTOM_VNIC_TYPE_NORMAL"},
rgs[1].required_traits)
self.assertEqual(uuids.group_id2, rgs[1].requester_id)
# and the rest is defaulted
self.assertEqual(set(), rgs[1].forbidden_traits)
self.assertEqual([], rgs[1].aggregates)
self.assertEqual([], rgs[1].provider_uuids)
def test_compat_requester_and_provider(self):
req_obj = objects.RequestGroup(
requester_id=uuids.requester, provider_uuids=[uuids.rp1],
required_traits=set(['CUSTOM_PHYSNET_2']),
forbidden_aggregates=set(['agg3', 'agg4']))
versions = ovo_base.obj_tree_get_versions('RequestGroup')
primitive = req_obj.obj_to_primitive(
target_version='1.3',
version_manifest=versions)['nova_object.data']
self.assertIn('forbidden_aggregates', primitive)
self.assertIn('in_tree', primitive)
self.assertIn('requester_id', primitive)
self.assertIn('provider_uuids', primitive)
self.assertIn('required_traits', primitive)
self.assertCountEqual(
primitive['forbidden_aggregates'], set(['agg3', 'agg4']))
primitive = req_obj.obj_to_primitive(
target_version='1.2',
version_manifest=versions)['nova_object.data']
self.assertNotIn('forbidden_aggregates', primitive)
self.assertIn('in_tree', primitive)
self.assertIn('requester_id', primitive)
self.assertIn('provider_uuids', primitive)
self.assertIn('required_traits', primitive)
primitive = req_obj.obj_to_primitive(
target_version='1.1',
version_manifest=versions)['nova_object.data']
self.assertNotIn('forbidden_aggregates', primitive)
self.assertNotIn('in_tree', primitive)
self.assertIn('requester_id', primitive)
self.assertIn('provider_uuids', primitive)
self.assertIn('required_traits', primitive)
primitive = req_obj.obj_to_primitive(
target_version='1.0',
version_manifest=versions)['nova_object.data']
self.assertNotIn('forbidden_aggregates', primitive)
self.assertNotIn('in_tree', primitive)
self.assertNotIn('requester_id', primitive)
self.assertNotIn('provider_uuids', primitive)
self.assertIn('required_traits', primitive)
class TestDestinationObject(test.NoDBTestCase):
def setUp(self):
super(TestDestinationObject, self).setUp()
self.user_id = uuids.user_id
self.project_id = uuids.project_id
self.context = context.RequestContext(uuids.user_id, uuids.project_id)
def test_destination_aggregates_default(self):
destination = objects.Destination()
self.assertIsNone(destination.aggregates)
def test_destination_require_aggregates(self):
destination = objects.Destination()
destination.require_aggregates(['foo', 'bar'])
destination.require_aggregates(['baz'])
self.assertEqual(['foo,bar', 'baz'], destination.aggregates)
def test_destination_forbidden_aggregates_default(self):
destination = objects.Destination()
self.assertIsNone(destination.forbidden_aggregates)
def test_destination_append_forbidden_aggregates(self):
destination = objects.Destination()
destination.append_forbidden_aggregates(set(['foo', 'bar']))
self.assertEqual(
set(['foo', 'bar']), destination.forbidden_aggregates)
destination.append_forbidden_aggregates(set(['bar', 'baz']))
self.assertEqual(
set(['foo', 'bar', 'baz']), destination.forbidden_aggregates)
def test_obj_make_compatible(self):
values = {
'host': 'fake_host',
'node': 'fake_node',
'cell': objects.CellMapping(uuid=uuids.cell1),
'aggregates': ['agg1', 'agg2'],
'allow_cross_cell_move': False,
'forbidden_aggregates': set(['agg3', 'agg4'])}
obj = objects.Destination(self.context, **values)
data = lambda x: x['nova_object.data']
manifest = ovo_base.obj_tree_get_versions(obj.obj_name())
obj_primitive = data(obj.obj_to_primitive(target_version='1.4',
version_manifest=manifest))
self.assertIn('forbidden_aggregates', obj_primitive)
self.assertCountEqual(obj_primitive['forbidden_aggregates'],
set(['agg3', 'agg4']))
self.assertIn('aggregates', obj_primitive)
obj_primitive = data(obj.obj_to_primitive(target_version='1.3',
version_manifest=manifest))
self.assertNotIn('forbidden_aggregates', obj_primitive)
self.assertIn('allow_cross_cell_move', obj_primitive)
obj_primitive = data(obj.obj_to_primitive(target_version='1.2',
version_manifest=manifest))
self.assertIn('aggregates', obj_primitive)
self.assertNotIn('allow_cross_cell_move', obj_primitive)
obj_primitive = data(obj.obj_to_primitive(target_version='1.1',
version_manifest=manifest))
self.assertIn('cell', obj_primitive)
self.assertNotIn('aggregates', obj_primitive)
obj_primitive = data(obj.obj_to_primitive(target_version='1.0',
version_manifest=manifest))
self.assertNotIn('forbidden_aggregates', obj_primitive)
self.assertNotIn('cell', obj_primitive)
self.assertEqual('fake_host', obj_primitive['host'])
class TestMappingRequestGroupsToProviders(test.NoDBTestCase):
def setUp(self):
super(TestMappingRequestGroupsToProviders, self).setUp()
self.spec = request_spec.RequestSpec()
def test_no_groups(self):
allocations = None
provider_traits = {}
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
# we cannot assert much, at least we see that the above call doesn't
# blow
self.assertIsNone(self.spec.requested_resources)
def test_unnumbered_group_not_supported(self):
allocations = {}
provider_traits = {}
group1 = request_spec.RequestGroup(
use_same_provider=False)
self.spec.requested_resources = [group1]
self.assertRaises(
NotImplementedError,
self.spec.map_requested_resources_to_providers, allocations,
provider_traits)
def test_forbidden_traits_not_supported(self):
allocations = {}
provider_traits = {}
group1 = request_spec.RequestGroup(
forbidden_traits={'STORAGE_DISK_HDD'})
self.spec.requested_resources = [group1]
self.assertRaises(
NotImplementedError,
self.spec.map_requested_resources_to_providers, allocations,
provider_traits)
def test_aggregates_not_supported(self):
allocations = {}
provider_traits = {}
group1 = request_spec.RequestGroup(
aggregates=[[uuids.agg1]])
self.spec.requested_resources = [group1]
self.assertRaises(
NotImplementedError,
self.spec.map_requested_resources_to_providers, allocations,
provider_traits)
def test_one_group(self):
allocations = {
uuids.compute1_rp: {
"resources": {
'VCPU': 1
}
},
uuids.net_dev1_rp: {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0',
'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1]
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev1_rp], group1.provider_uuids)
def test_one_group_no_matching_allocation(self):
# NOTE(gibi): This negative test scenario should not happen in real
# end to end test as we assume that placement only returns candidates
# that are valid. But still we want to cover the error case in our
# implementation
allocations = {
uuids.compute1_rp: {
"resources": {
'VCPU': 1
}
},
uuids.net_dev1_rp: {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
}
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1]
self.assertRaises(
ValueError, self.spec.map_requested_resources_to_providers,
allocations, provider_traits)
def test_one_group_no_matching_trait(self):
# NOTE(gibi): This negative test scenario should not happen in real
# end to end test as we assume that placement only returns candidates
# that are valid. But still we want to cover the error case in our
# implementation
allocations = {
uuids.compute1_rp: {
"resources": {
'VCPU': 1
}
},
uuids.net_dev1_rp: {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET1',
'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1]
self.assertRaises(
ValueError, self.spec.map_requested_resources_to_providers,
allocations, provider_traits)
def test_two_groups_same_provider(self):
allocations = {
uuids.compute1_rp: {
"resources": {
'VCPU': 1
}
},
uuids.net_dev1_rp: {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 3,
'NET_BW_EGR_KILOBIT_PER_SEC': 3,
}
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0',
'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 2,
"NET_BW_EGR_KILOBIT_PER_SEC": 2,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2]
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev1_rp], group1.provider_uuids)
self.assertEqual([uuids.net_dev1_rp], group2.provider_uuids)
def test_two_groups_different_providers(self):
# NOTE(gibi): we use OrderedDict here to make the test deterministic
allocations = collections.OrderedDict()
allocations[uuids.compute1_rp] = {
"resources": {
'VCPU': 1
}
}
allocations[uuids.net_dev1_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 2,
'NET_BW_EGR_KILOBIT_PER_SEC': 2,
}
}
allocations[uuids.net_dev2_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0',
'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev2_rp: [
'CUSTOM_PHYSNET_PHYSNET0',
'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 2,
"NET_BW_EGR_KILOBIT_PER_SEC": 2,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2]
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev2_rp], group1.provider_uuids)
self.assertEqual([uuids.net_dev1_rp], group2.provider_uuids)
def test_two_groups_different_providers_reverse(self):
"""Similar as test_two_groups_different_providers but reorder the
groups to exercises another code path
"""
# NOTE(gibi): we use OrderedDict here to make the test deterministic
allocations = collections.OrderedDict()
allocations[uuids.compute1_rp] = {
"resources": {
'VCPU': 1
}
}
allocations[uuids.net_dev1_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 2,
'NET_BW_EGR_KILOBIT_PER_SEC': 2,
}
}
allocations[uuids.net_dev2_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev2_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 2,
"NET_BW_EGR_KILOBIT_PER_SEC": 2,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2]
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev1_rp], group1.provider_uuids)
self.assertEqual([uuids.net_dev2_rp], group2.provider_uuids)
def test_two_groups_different_providers_different_traits(self):
allocations = collections.OrderedDict()
allocations[uuids.compute1_rp] = {
"resources": {
'VCPU': 1
}
}
allocations[uuids.net_dev1_physnet1_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
allocations[uuids.net_dev2_physnet0_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_physnet1_rp: [
'CUSTOM_PHYSNET_PHYSNET1', 'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev2_physnet0_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET1",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2]
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev2_physnet0_rp], group1.provider_uuids)
self.assertEqual([uuids.net_dev1_physnet1_rp], group2.provider_uuids)
def test_three_groups(self):
"""A complex example where a lot of mappings are tried before the
solution is found.
"""
allocations = collections.OrderedDict()
allocations[uuids.compute1_rp] = {
"resources": {
'VCPU': 1
}
}
allocations[uuids.net_dev1_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 3,
'NET_BW_EGR_KILOBIT_PER_SEC': 3,
}
}
allocations[uuids.net_dev2_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 2,
'NET_BW_EGR_KILOBIT_PER_SEC': 2,
}
}
allocations[uuids.net_dev3_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 3,
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev2_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev3_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
}
# this fits to 2 RPs
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 3,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
# this fits to 2 RPs
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 2,
"NET_BW_EGR_KILOBIT_PER_SEC": 2,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
# this fits to only one RPs
group3 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 3,
"NET_BW_EGR_KILOBIT_PER_SEC": 3,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2, group3]
orig_validator = self.spec._is_valid_group_rp_mapping
with mock.patch.object(
self.spec, '_is_valid_group_rp_mapping',
side_effect=orig_validator
) as mock_validator:
self.spec.map_requested_resources_to_providers(
allocations, provider_traits)
self.assertEqual([uuids.net_dev3_rp], group1.provider_uuids)
self.assertEqual([uuids.net_dev2_rp], group2.provider_uuids)
self.assertEqual([uuids.net_dev1_rp], group3.provider_uuids)
# the algorithm tried out many possible mappings before found the
# the solution
self.assertEqual(58, mock_validator.call_count)
@mock.patch.object(request_spec.LOG, 'debug')
def test_two_groups_matches_but_allocation_leftover(self, mock_debug):
# NOTE(gibi): This negative test scenario should not happen in real
# end to end test as we assume that placement only returns candidates
# that are valid and this candidate is not valid as it provides more
# resources than the ports are requesting. Still we want to cover the
# error case in our implementation
allocations = collections.OrderedDict()
allocations[uuids.compute1_rp] = {
"resources": {
'VCPU': 1
}
}
allocations[uuids.net_dev1_physnet0_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 2,
'NET_BW_EGR_KILOBIT_PER_SEC': 2,
}
}
allocations[uuids.net_dev2_physnet0_rp] = {
"resources": {
'NET_BW_IGR_KILOBIT_PER_SEC': 1,
'NET_BW_EGR_KILOBIT_PER_SEC': 1,
}
}
provider_traits = {
uuids.compute1_rp: [],
uuids.net_dev1_physnet0_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
uuids.net_dev2_physnet0_rp: [
'CUSTOM_PHYSNET_PHYSNET0', 'CUSTOM_VNIC_TYPE_NORMAL'
],
}
group1 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
group2 = request_spec.RequestGroup(
resources={
"NET_BW_IGR_KILOBIT_PER_SEC": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 1,
},
required_traits={
"CUSTOM_PHYSNET_PHYSNET0",
"CUSTOM_VNIC_TYPE_NORMAL",
})
self.spec.requested_resources = [group1, group2]
self.assertRaises(
ValueError, self.spec.map_requested_resources_to_providers,
allocations, provider_traits)
self.assertIn('allocations leftover', mock_debug.mock_calls[3][1][0])
class TestRequestLevelParams(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.user_id = uuids.user_id
self.project_id = uuids.project_id
self.context = context.RequestContext(uuids.user_id, uuids.project_id)
def test_obj_make_compatible(self):
obj = request_spec.RequestLevelParams(
self.context,
root_required={"CUSTOM_FOO", "CUSTOM_BAR"},
root_forbidden={"CUSTOM_BAZ"},
same_subtree=[["group1", "group2"], ["group3", "group4"]])
manifest = ovo_base.obj_tree_get_versions(obj.obj_name())
obj_primitive = obj.obj_to_primitive(
target_version='1.1',
version_manifest=manifest)['nova_object.data']
self.assertIn('root_required', obj_primitive)
self.assertIn('root_forbidden', obj_primitive)
self.assertIn('same_subtree', obj_primitive)
obj_primitive = obj.obj_to_primitive(
target_version='1.0',
version_manifest=manifest)['nova_object.data']
self.assertIn('root_required', obj_primitive)
self.assertIn('root_forbidden', obj_primitive)
self.assertNotIn('same_subtree', obj_primitive)
def test_extend_with(self):
obj1 = request_spec.RequestLevelParams(
self.context,
root_required={"CUSTOM_FOO"},
root_forbidden={"CUSTOM_BAZ"},
same_subtree=[["group1", "group2"]])
obj2 = request_spec.RequestLevelParams(
self.context,
root_required={"CUSTOM_BAR"},
root_forbidden={"CUSTOM_FOOBAR"},
same_subtree=[["group3", "group4"]])
obj1.extend_with(obj2)
self.assertEqual({"CUSTOM_FOO", "CUSTOM_BAR"}, obj1.root_required)
self.assertEqual({"CUSTOM_BAZ", "CUSTOM_FOOBAR"}, obj1.root_forbidden)
self.assertEqual(
[["group1", "group2"], ["group3", "group4"]],
obj1.same_subtree)
|
{
"content_hash": "0cc306d5d58206bbe733b51b1013ebba",
"timestamp": "",
"source": "github",
"line_count": 1862,
"max_line_length": 79,
"avg_line_length": 42.07733619763695,
"alnum_prop": 0.5691913003522745,
"repo_name": "openstack/nova",
"id": "d91015a699be6a9206b5cff440701be218ff966d",
"size": "78956",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/tests/unit/objects/test_request_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
}
|
import re
import uuid as py_uuid
from common_fixtures import * # NOQA
from test_volume import VOLUME_CLEANUP_LABEL
TEST_IMAGE = 'ibuildthecloud/helloworld'
TEST_IMAGE_LATEST = TEST_IMAGE + ':latest'
TEST_IMAGE_UUID = 'docker:' + TEST_IMAGE
if_docker = pytest.mark.skipif("os.environ.get('DOCKER_TEST') == 'false'",
reason='DOCKER_TEST is not set')
@pytest.fixture(scope='session')
def docker_client(super_client):
for host in super_client.list_host(state='active', remove_null=True,
kind='docker'):
key = super_client.create_api_key(accountId=host.accountId)
super_client.wait_success(key)
wait_for(lambda: host.agent().state == 'active')
wait_for(lambda: len(host.storagePools()) > 0 and
host.storagePools()[0].state == 'active')
return api_client(key.publicValue, key.secretValue)
raise Exception('Failed to find docker host, please register one')
@if_docker
def test_docker_create_only(docker_client, super_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid,
startOnCreate=False)
try:
container = docker_client.wait_success(container)
assert container is not None
assert 'container' == container.type
image = super_client.reload(container).image()
assert image.instanceKind == 'container'
image_mapping = filter(
lambda m: m.storagePool().external,
image.imageStoragePoolMaps()
)
assert len(image_mapping) == 0
assert not image.isPublic
assert image.name == '{}'.format(image.data.dockerImage.fullName,
image.data.dockerImage.id)
assert image.name == TEST_IMAGE_LATEST
assert image.data.dockerImage.repository == 'helloworld'
assert image.data.dockerImage.namespace == 'ibuildthecloud'
assert image.data.dockerImage.tag == 'latest'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_create_only_from_sha(docker_client, super_client):
image_name = 'tianon/true@sha256:662fc60808e6d5628a090e39' \
'b4bcae694add28a626031cc889109c2cf2af5d73'
uuid = 'docker:' + image_name
container = docker_client.create_container(name='test-sha256',
imageUuid=uuid,
startOnCreate=False)
try:
container = docker_client.wait_success(container)
assert container is not None
assert 'container' == container.type
image = super_client.reload(container).image()
assert image.instanceKind == 'container'
image_mapping = filter(
lambda m: m.storagePool().external,
image.imageStoragePoolMaps()
)
assert len(image_mapping) == 0
assert not image.isPublic
assert image.name == '{}'.format(image.data.dockerImage.fullName,
image.data.dockerImage.id)
assert image.name == image_name
assert image.data.dockerImage.repository == 'true'
assert image.data.dockerImage.namespace == 'tianon'
assert image.data.dockerImage.tag == 'sha256:662fc60808e6d5628a090e' \
'39b4bcae694add28a626031cc8891' \
'09c2cf2af5d73'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_create_with_start(docker_client, super_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid)
try:
assert container.state == 'creating'
container = super_client.wait_success(container)
assert container.state == 'running'
assert container.data.dockerContainer.Image == TEST_IMAGE_LATEST
assert len(container.volumes()) == 1
image = container.volumes()[0].image()
image = super_client.reload(image)
image_mapping = filter(
lambda m: not m.storagePool().external,
image.imageStoragePoolMaps()
)
assert len(image_mapping) == 1
assert image_mapping[0].imageId == image.id
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_build(docker_client, super_client):
uuid = 'image-' + random_str()
url = 'https://github.com/rancherio/tiny-build/raw/master/build.tar'
container = docker_client.create_container(imageUuid='docker:' + uuid,
build={
'context': url,
})
try:
assert container.state == 'creating'
container = super_client.wait_success(container)
# This builds tianon/true which just dies
assert container.state == 'running' or container.state == 'stopped'
assert container.transitioning == 'no'
assert container.data.dockerContainer.Image == uuid + ':latest'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_create_with_start_using_docker_io(docker_client, super_client):
image = 'docker.io/' + TEST_IMAGE
uuid = 'docker:' + image
container = docker_client.create_container(imageUuid=uuid)
container = super_client.wait_success(container)
assert container.state == 'running'
assert container.data.dockerContainer.Image == image + ':latest'
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_command(docker_client, super_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid,
command=['sleep', '42'])
try:
container = super_client.wait_success(container)
assert container.data.dockerContainer.Command == 'sleep 42'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_command_args(docker_client, super_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid,
command=['sleep', '1', '2',
'3'])
try:
container = super_client.wait_success(container)
assert container.data.dockerContainer.Command == 'sleep 1 2 3'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_short_lived_container(docker_client, super_client):
container = docker_client.create_container(imageUuid="docker:tianon/true")
container = wait_for_condition(
docker_client, container,
lambda x: x.state == 'stopped',
lambda x: 'State is: ' + x.state)
assert container.state == 'stopped'
assert container.transitioning == 'no'
@if_docker
def test_docker_stop(docker_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid)
assert container.state == 'creating'
container = docker_client.wait_success(container)
assert container.state == 'running'
start = time.time()
container = container.stop(timeout=0)
assert container.state == 'stopping'
container = docker_client.wait_success(container)
delta = time.time() - start
assert container.state == 'stopped'
assert delta < 10
@if_docker
def test_docker_purge(docker_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid)
assert container.state == 'creating'
container = docker_client.wait_success(container)
assert container.state == 'running'
container = container.stop(timeout=0)
assert container.state == 'stopping'
container = docker_client.wait_success(container)
assert container.state == 'stopped'
docker_client.delete(container)
container = docker_client.wait_success(container)
assert container.removed is not None
container = docker_client.wait_success(container.purge())
assert container.state == 'purged'
volumes = container.volumes()
assert len(volumes) == 0
@if_docker
def test_docker_image_format(docker_client, super_client):
uuid = TEST_IMAGE_UUID
container = docker_client.create_container(imageUuid=uuid)
try:
container = docker_client.wait_success(container)
container = super_client.reload(container)
assert container.image().format == 'docker'
assert container.volumes()[0].image().format == 'docker'
assert container.volumes()[0].format == 'docker'
finally:
if container is not None:
docker_client.delete(container)
@if_docker
def test_docker_ports_from_container_publish_all(docker_client):
uuid = TEST_IMAGE_UUID
c = docker_client.create_container(networkMode='bridge',
publishAllPorts=True,
imageUuid=uuid)
c = docker_client.wait_success(c)
assert c.state == 'running'
ports = c.ports_link()
assert len(ports) == 1
port = ports[0]
assert port.publicPort is not None
assert port.privatePort == 8080
assert port.publicIpAddressId is not None
assert port.kind == 'imagePort'
docker_client.delete(c)
@if_docker
def test_docker_ports_from_container_no_publish(docker_client):
uuid = TEST_IMAGE_UUID
c = docker_client.create_container(imageUuid=uuid)
c = docker_client.wait_success(c)
assert c.state == 'running'
ports = c.ports_link()
assert len(ports) == 1
port = ports[0]
assert port.publicPort is None
assert port.privatePort == 8080
assert port.publicIpAddressId is not None
assert port.kind == 'imagePort'
docker_client.delete(c)
@if_docker
def test_docker_ports_from_container(docker_client, super_client):
def reload(x):
return super_client.reload(x)
_ = reload
uuid = TEST_IMAGE_UUID
c = docker_client.create_container(networkMode='bridge',
startOnCreate=False,
publishAllPorts=True,
imageUuid=uuid,
ports=[
'8081',
'8082/tcp',
'8083/udp'])
c = docker_client.wait_success(c)
assert c.state == 'stopped'
count = 0
for port in c.ports_link():
count += 1
assert port.kind == 'userPort'
assert port.publicPort is None
assert port.privateIpAddressId is None
assert port.publicIpAddressId is None
if port.privatePort == 8081:
assert port.protocol == 'tcp'
elif port.privatePort == 8082:
assert port.protocol == 'tcp'
elif port.privatePort == 8083:
assert port.protocol == 'udp'
else:
assert False
assert count == 3
c = docker_client.wait_success(c.start())
assert c.state == 'running'
network = super_client.reload(c).nics()[0].network()
count = 0
ip = None
privateIp = None
for port in c.ports_link():
count += 1
assert port.privateIpAddressId is not None
privateIp = port.privateIpAddress()
assert privateIp.kind == 'docker'
assert privateIp.networkId == network.id
assert privateIp.network() is not None
assert _(privateIp).subnetId is None
assert port.publicPort is not None
assert port.publicIpAddressId is not None
if ip is None:
ip = port.publicIpAddressId
assert port.publicIpAddressId == ip
if port.privatePort == 8081:
assert port.kind == 'userPort'
assert port.protocol == 'tcp'
elif port.privatePort == 8082:
assert port.kind == 'userPort'
assert port.protocol == 'tcp'
elif port.privatePort == 8083:
assert port.kind == 'userPort'
assert port.protocol == 'udp'
elif port.privatePort == 8080:
assert port.kind == 'imagePort'
else:
assert False
assert count == 4
assert c.primaryIpAddress == privateIp.address
c = docker_client.wait_success(c.stop(timeout=0))
assert c.state == 'stopped'
count = 0
for nic in _(c).nics():
for ip in nic.ipAddresses():
count += 1
assert ip.kind == 'docker'
assert ip.state == 'inactive'
assert ip.address is None
assert count == 1
c = docker_client.wait_success(c.start())
assert c.state == 'running'
count = 0
for nic in _(c).nics():
for ip in nic.ipAddresses():
count += 1
assert ip.kind == 'docker'
assert ip.state == 'active'
assert ip.address is not None
assert count == 1
docker_client.delete(c)
@if_docker
def test_docker_bind_address(docker_client, super_client):
c = docker_client.create_container(name='bindAddrTest',
imageUuid=TEST_IMAGE_UUID,
ports=['127.0.0.1:89:8999'])
c = docker_client.wait_success(c)
assert c.state == 'running'
c = super_client.reload(c)
bindings = c.data['dockerInspect']['HostConfig']['PortBindings']
assert bindings['8999/tcp'] == [{'HostIp': '127.0.0.1', 'HostPort': '89'}]
c = docker_client.create_container(name='bindAddrTest2',
imageUuid=TEST_IMAGE_UUID,
ports=['127.2.2.2:89:8999'])
c = docker_client.wait_success(c)
assert c.state == 'running'
c = super_client.reload(c)
bindings = c.data['dockerInspect']['HostConfig']['PortBindings']
assert bindings['8999/tcp'] == [{'HostIp': '127.2.2.2', 'HostPort': '89'}]
c = docker_client.create_container(name='bindAddrTest3',
imageUuid=TEST_IMAGE_UUID,
ports=['127.2.2.2:89:8999'])
c = docker_client.wait_transitioning(c)
assert c.transitioning == 'error'
assert c.transitioningMessage == \
'Scheduling failed: host needs ports 89/tcp available'
assert c.state == 'error'
@if_docker
def test_no_port_override(docker_client, super_client):
c = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
ports=['8083:8080'])
try:
c = super_client.wait_success(c, timeout=240)
assert c.state == 'running'
ports = c.ports_link()
assert len(ports) == 1
assert ports[0].kind == 'userPort'
assert ports[0].publicPort == 8083
assert ports[0].privatePort == 8080
finally:
if c is not None:
super_client.delete(c)
@if_docker
def test_docker_volumes(docker_client, super_client):
def reload(x):
return super_client.reload(x)
_ = reload
uuid = TEST_IMAGE_UUID
bind_mount_uuid = py_uuid.uuid4().hex
bar_host_path = '/tmp/bar%s' % bind_mount_uuid
bar_bind_mount = '%s:/bar' % bar_host_path
c = docker_client.create_container(imageUuid=uuid,
startOnCreate=False,
dataVolumes=['/foo',
bar_bind_mount])
c = docker_client.wait_success(c)
assert len(c.dataVolumes) == 2
assert set(c.dataVolumes) == set(['/foo', bar_bind_mount])
c = super_client.wait_success(c.start())
volumes = c.volumes()
assert len(volumes) == 1
mounts = c.mounts()
assert len(mounts) == 2
foo_mount, bar_mount = None, None
foo_vol, bar_vol = None, None
for mount in mounts:
assert mount.instance().id == c.id
if mount.path == '/foo':
foo_mount = mount
foo_vol = mount.volume()
elif mount.path == '/bar':
bar_mount = mount
bar_vol = mount.volume()
foo_vol = wait_for_condition(
docker_client, foo_vol, lambda x: x.state == 'active')
assert foo_mount is not None
assert foo_mount.permissions == 'rw'
assert foo_vol is not None
assert not foo_vol.isHostPath
assert _(foo_vol).attachedState == 'inactive'
bar_vol = wait_for_condition(
docker_client, bar_vol, lambda x: x.state == 'active')
assert bar_mount is not None
assert bar_mount.permissions == 'rw'
assert bar_vol is not None
assert _(bar_vol).attachedState == 'inactive'
assert bar_vol.isHostPath
# We use 'in' instead of '==' because Docker uses the fully qualified
# non-linked path and it might look something like: /mnt/sda1/<path>
assert bar_host_path in bar_vol.uri
c2 = docker_client.create_container(name="volumes_from_test",
imageUuid=uuid,
startOnCreate=False,
dataVolumesFrom=[c.id])
c2 = docker_client.wait_success(c2)
assert len(c2.dataVolumesFrom) == 1
assert set(c2.dataVolumesFrom) == set([c.id])
c2 = super_client.wait_success(c2.start())
c2_mounts = c2.mounts()
assert len(c2_mounts) == 2
for mount in c2_mounts:
assert mount.instance().id == c2.id
if mount.path == '/foo':
assert mount.volumeId == foo_vol.id
elif mount.path == '/bar':
assert mount.volumeId == bar_vol.id
c = docker_client.wait_success(c.stop(remove=True, timeout=0))
c2 = docker_client.wait_success(c2.stop(remove=True, timeout=0))
_check_path(foo_vol, True, docker_client, super_client)
_check_path(bar_vol, True, docker_client, super_client)
@if_docker
def test_volumes_from_more_than_one_container(docker_client):
c = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
dataVolumes=['/foo'])
docker_client.wait_success(c)
c2 = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
dataVolumes=['/bar'])
docker_client.wait_success(c2)
c3 = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
dataVolumesFrom=[c.id, c2.id])
c3 = docker_client.wait_success(c3)
mounts = c3.mounts()
assert len(mounts) == 2
paths = ['/foo', '/bar']
for m in mounts:
assert m.path in paths
@if_docker
def test_container_fields(docker_client, super_client):
caps = ["SYS_MODULE", "SYS_RAWIO", "SYS_PACCT", "SYS_ADMIN",
"SYS_NICE", "SYS_RESOURCE", "SYS_TIME", "SYS_TTY_CONFIG",
"MKNOD", "AUDIT_WRITE", "AUDIT_CONTROL", "MAC_OVERRIDE",
"MAC_ADMIN", "NET_ADMIN", "SYSLOG", "CHOWN", "NET_RAW",
"DAC_OVERRIDE", "FOWNER", "DAC_READ_SEARCH", "FSETID",
"KILL", "SETGID", "SETUID", "LINUX_IMMUTABLE",
"NET_BIND_SERVICE", "NET_BROADCAST", "IPC_LOCK",
"IPC_OWNER", "SYS_CHROOT", "SYS_PTRACE", "SYS_BOOT",
"LEASE", "SETFCAP", "WAKE_ALARM", "BLOCK_SUSPEND", "ALL"]
test_name = 'container_test'
image_uuid = 'docker:ibuildthecloud/helloworld'
restart_policy = {"maximumRetryCount": 2, "name": "on-failure"}
c = docker_client.create_container(name=test_name,
imageUuid=image_uuid,
capAdd=caps,
capDrop=caps,
dnsSearch=['8.8.8.8', '1.2.3.4'],
dns=['8.8.8.8', '1.2.3.4'],
privileged=True,
domainName="rancher.io",
memory=12000000,
memorySwap=16000000,
cpuSet="0,1",
stdinOpen=True,
tty=True,
command=["true"],
entryPoint=["/bin/sh", "-c"],
cpuShares=400,
restartPolicy=restart_policy,
devices="/dev/null:/dev/xnull:rw")
c = super_client.wait_success(c)
wait_for(lambda: super_client.reload(c).data['dockerInspect'] is not None)
wait_for(lambda: super_client.
reload(c).data['dockerInspect']['HostConfig'] is not None)
assert set(c.data['dockerInspect']['HostConfig']['CapAdd']) == set(caps)
assert set(c.data['dockerInspect']['HostConfig']['CapDrop']) == set(caps)
actual_dns = c.data['dockerInspect']['HostConfig']['Dns']
assert set(actual_dns) == set(['8.8.8.8', '1.2.3.4', '169.254.169.250'])
actual_dns = c.data['dockerInspect']['HostConfig']['DnsSearch']
assert set(actual_dns) == set(['8.8.8.8', '1.2.3.4', 'rancher.internal'])
assert c.data['dockerInspect']['HostConfig']['Privileged']
assert c.data['dockerInspect']['Config']['Domainname'] == "rancher.io"
assert c.data['dockerInspect']['HostConfig']['Memory'] == 12000000
# assert c.data['dockerInspect']['Config']['MemorySwap'] == 16000000
assert c.data['dockerInspect']['HostConfig']['CpusetCpus'] == "0,1"
assert c.data['dockerInspect']['Config']['Tty']
assert c.data['dockerInspect']['Config']['OpenStdin']
actual_entry_point = set(c.data['dockerInspect']['Config']['Entrypoint'])
assert actual_entry_point == set(["/bin/sh", "-c"])
assert c.data['dockerInspect']['HostConfig']['CpuShares'] == 400
act_restart_pol = c.data['dockerInspect']['HostConfig']['RestartPolicy']
assert act_restart_pol['MaximumRetryCount'] == 2
assert act_restart_pol['Name'] == "on-failure"
actual_devices = c.data['dockerInspect']['HostConfig']['Devices']
assert len(actual_devices) == 1
assert actual_devices[0]['CgroupPermissions'] == "rw"
assert actual_devices[0]['PathOnHost'] == "/dev/null"
assert actual_devices[0]['PathInContainer'] == "/dev/xnull"
def get_mounts(resource):
return [x for x in resource.mounts() if x.state != 'inactive']
def check_mounts(client, resource, count):
def wait_for_mount_count(res):
m = get_mounts(res)
return len(m) == count
wait_for_condition(client, resource, wait_for_mount_count)
mounts = get_mounts(resource)
return mounts
def volume_cleanup_setup(docker_client, uuid, strategy=None):
labels = {}
if strategy:
labels[VOLUME_CLEANUP_LABEL] = strategy
vol_name = random_str()
c = docker_client.create_container(name="volume_cleanup_test",
imageUuid=uuid,
dataVolumes=['/tmp/foo',
'%s:/foo' % vol_name],
labels=labels)
c = docker_client.wait_success(c)
if strategy:
assert c.labels[VOLUME_CLEANUP_LABEL] == strategy
mounts = check_mounts(docker_client, c, 2)
v1 = mounts[0].volume()
v2 = mounts[1].volume()
wait_for_condition(docker_client, v1, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
wait_for_condition(docker_client, v2, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
named_vol = v1 if v1.name == vol_name else v2
unnamed_vol = v1 if v1.name != vol_name else v2
c = docker_client.wait_success(c.stop(remove=True, timeout=0))
c = docker_client.wait_success(c.purge())
check_mounts(docker_client, c, 0)
return c, named_vol, unnamed_vol
@if_docker
def test_cleanup_volume_strategy(docker_client):
c, named_vol, unnamed_vol = volume_cleanup_setup(docker_client,
TEST_IMAGE_UUID)
assert docker_client.wait_success(named_vol).state == 'detached'
assert docker_client.wait_success(unnamed_vol).removed is not None
c, named_vol, unnamed_vol = volume_cleanup_setup(docker_client,
TEST_IMAGE_UUID,
strategy='unnamed')
assert docker_client.wait_success(named_vol).state == 'detached'
assert docker_client.wait_success(unnamed_vol).removed is not None
c, named_vol, unnamed_vol = volume_cleanup_setup(docker_client,
TEST_IMAGE_UUID,
strategy='none')
assert docker_client.wait_success(named_vol).state == 'detached'
assert docker_client.wait_success(unnamed_vol).state == 'detached'
c, named_vol, unnamed_vol = volume_cleanup_setup(docker_client,
TEST_IMAGE_UUID,
strategy='all')
assert docker_client.wait_success(named_vol).removed is not None
assert docker_client.wait_success(unnamed_vol).removed is not None
@if_docker
def test_docker_mount_life_cycle(docker_client):
# Using nginx because it has a baked in volume, which is a good test case
uuid = 'docker:nginx:1.9.0'
bind_mount_uuid = py_uuid.uuid4().hex
bar_host_path = '/tmp/bar%s' % bind_mount_uuid
bar_bind_mount = '%s:/bar' % bar_host_path
c = docker_client.create_container(imageUuid=uuid,
startOnCreate=False,
dataVolumes=['%s:/foo' % random_str(),
bar_bind_mount])
c = docker_client.wait_success(c)
c = docker_client.wait_success(c.start())
mounts = check_mounts(docker_client, c, 3)
v1 = mounts[0].volume()
v2 = mounts[1].volume()
v3 = mounts[2].volume()
wait_for_condition(docker_client, v1, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
wait_for_condition(docker_client, v2, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
wait_for_condition(docker_client, v3, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
c = docker_client.wait_success(c.stop(timeout=0))
assert c.state == 'stopped'
wait_for_condition(docker_client, v1, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
wait_for_condition(docker_client, v2, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
wait_for_condition(docker_client, v3, lambda x: x.state == 'active',
lambda x: 'state is %s' % x)
c = docker_client.wait_success(c.remove())
check_mounts(docker_client, c, 0)
assert docker_client.wait_success(v1).state == 'detached'
assert docker_client.wait_success(v2).state == 'detached'
assert docker_client.wait_success(v3).state == 'detached'
@if_docker
def test_docker_labels(docker_client):
# 1.8 broke this behavior where labels would come from the images
# one day maybe they will bring it back.
# image_uuid = 'docker:ranchertest/labelled:v0.1.0'
image_uuid = TEST_IMAGE_UUID
c = docker_client.create_container(name="labels_test",
imageUuid=image_uuid,
labels={'io.rancher.testlabel.'
'fromapi': 'yes'})
c = docker_client.wait_success(c)
def labels_callback():
labels = c.instanceLabels()
if len(labels) >= 3:
return labels
return None
labels = wait_for(labels_callback)
actual_labels = {}
for l in labels:
actual_labels[l.key] = l.value
expected_labels = {
# 'io.rancher.testlabel': 'value1',
# 'io.rancher.testlabel.space': 'value 1',
'io.rancher.testlabel.fromapi': 'yes',
'io.rancher.container.uuid': c.uuid,
'io.rancher.container.name': c.name,
'io.rancher.container.ip': c.primaryIpAddress + '/16',
}
assert actual_labels == expected_labels
docker_client.delete(c)
@if_docker
def test_container_odd_fields(super_client, docker_client):
c = docker_client.create_container(pidMode=None,
imageUuid=TEST_IMAGE_UUID,
logConfig={
'driver': None,
'config': None,
})
c = docker_client.wait_success(c)
assert c.state == 'running'
assert c.pidMode is None
assert c.logConfig == {'type': 'logConfig', 'driver': None, 'config': None}
c = super_client.reload(c)
assert c.data.dockerInspect.HostConfig.LogConfig['Type'] == 'json-file'
assert not c.data.dockerInspect.HostConfig.LogConfig['Config']
@if_docker
def test_container_bad_build(super_client, docker_client):
c = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
build={
'context': None,
'remote': None
})
c = docker_client.wait_success(c)
assert c.state == 'running'
assert c.pidMode is None
assert c.build == {'context': None, 'remote': None, 'type': 'dockerBuild'}
c = super_client.reload(c)
assert c.data.dockerInspect.Config.Image == TEST_IMAGE_LATEST
@if_docker
def test_service_link_emu_docker_link(super_client, docker_client):
env_name = random_str()
env = docker_client.create_environment(name=env_name)
env = docker_client.wait_success(env)
assert env.state == "active"
server = docker_client.create_service(name='server', launchConfig={
'imageUuid': TEST_IMAGE_UUID
}, environmentId=env.id)
service = docker_client.create_service(name='client', launchConfig={
'imageUuid': TEST_IMAGE_UUID
}, environmentId=env.id)
service_link = {"serviceId": server.id, "name": "other"}
service.setservicelinks(serviceLinks=[service_link])
server = docker_client.wait_success(server)
service = docker_client.wait_success(service)
server = docker_client.wait_success(server.activate())
assert server.state == 'active'
service = docker_client.wait_success(service.activate())
assert service.state == 'active'
instance = find_one(service.instances)
instance = super_client.reload(instance)
link = find_one(instance.instanceLinks)
target_instance = find_one(server.instances)
assert len(link.ports) == 1
assert link.ports[0].privatePort == 8080
assert link.ports[0].publicPort == 8080
assert link.ports[0].protocol == 'tcp'
assert link.ports[0].ipAddress is not None
assert link.targetInstanceId == target_instance.id
assert link.instanceNames == ['{}-server-1'.format(env_name)]
docker_client.delete(env)
@if_docker
def test_service_links_with_no_ports(docker_client):
env = docker_client.create_environment(name=random_str())
env = docker_client.wait_success(env)
assert env.state == "active"
server = docker_client.create_service(name='server', launchConfig={
'imageUuid': TEST_IMAGE_UUID,
'stdinOpen': True,
'tty': True,
}, environmentId=env.id)
server = docker_client.wait_success(server)
assert server.state == 'inactive'
service = docker_client.create_service(name='client', launchConfig={
'imageUuid': TEST_IMAGE_UUID,
'stdinOpen': True,
'tty': True,
}, environmentId=env.id)
service = docker_client.wait_success(service)
assert service.state == 'inactive'
service_link = {"serviceId": server.id, "name": "bb"}
service.setservicelinks(serviceLinks=[service_link])
server = docker_client.wait_success(server.activate())
assert server.state == 'active'
service = docker_client.wait_success(service.activate())
assert service.state == 'active'
@if_docker
def test_blkio_device_options(super_client, docker_client):
dev_opts = {
'/dev/sda': {
'readIops': 1000,
'writeIops': 2000,
},
'/dev/null': {
'readBps': 3000,
}
}
c = docker_client.create_container(imageUuid=TEST_IMAGE_UUID,
networkMode=None,
blkioDeviceOptions=dev_opts)
c = docker_client.wait_success(c)
assert c.state == 'running'
super_c = super_client.reload(c)
hc = super_c.data.dockerInspect['HostConfig']
assert hc['BlkioDeviceReadIOps'] == [{'Path': '/dev/sda', 'Rate': 1000}]
assert hc['BlkioDeviceWriteIOps'] == [{'Path': '/dev/sda', 'Rate': 2000}]
assert hc['BlkioDeviceReadBps'] == [{'Path': '/dev/null', 'Rate': 3000}]
def _check_path(volume, should_exist, client, super_client):
path = _path_to_volume(volume)
c = client. \
create_container(name="volume_check",
imageUuid="docker:ranchertest/volume-test:v0.1.0",
networkMode=None,
environment={'TEST_PATH': path},
command='/opt/tools/check_path_exists.sh',
dataVolumes=[
'/var/lib/docker:/host/var/lib/docker',
'/tmp:/host/tmp'])
c = super_client.wait_success(c)
assert c.state == 'running'
c = super_client.wait_success(c.stop())
assert c.state == 'stopped'
code = c.data.dockerInspect.State.ExitCode
if should_exist:
# The exit code of the container should be a 10 if the path existed
assert code == 10
else:
# And 11 if the path did not exist
assert code == 11
c.remove()
def _path_to_volume(volume):
path = volume.uri.replace('file://', '')
mounted_path = re.sub('^.*?/var/lib/docker', '/host/var/lib/docker',
path)
if not mounted_path.startswith('/host/var/lib/docker'):
mounted_path = re.sub('^.*?/tmp', '/host/tmp',
path)
return mounted_path
|
{
"content_hash": "91df0783ba3dac15f55698997e7c3425",
"timestamp": "",
"source": "github",
"line_count": 971,
"max_line_length": 79,
"avg_line_length": 35.86508753861998,
"alnum_prop": 0.5767695620961952,
"repo_name": "jimengliu/cattle",
"id": "a6ea10681738e4179af2d50f1c9d2af4ed210ab6",
"size": "34825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration-v1/cattletest/core/test_docker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FreeMarker",
"bytes": "15475"
},
{
"name": "Java",
"bytes": "6117577"
},
{
"name": "Python",
"bytes": "809691"
},
{
"name": "Shell",
"bytes": "48232"
}
],
"symlink_target": ""
}
|
import re
from . import TestCase
from server import Router, IndexHandler
class TestRouter(TestCase):
def setUp(self):
"""
Create an instance each time for testing.
"""
self.instance = Router({
'/': IndexHandler(),
'': IndexHandler(),
'/test/location[s]?$': IndexHandler(),
})
def test_creation(self):
"""
Verify creation of the Router works as expected.
"""
router = Router({})
assert router._rules == {}
assert len(self.instance._rules) == 3
assert 'regex' in self.instance._rules['/'].keys()
assert 'app' in self.instance._rules['/'].keys()
assert callable(self.instance._rules['/']['app'])
assert self.instance._rules['/']['regex'].findall('/')
def test_call(self):
"""
Verify the router routes properly on valid URLs.
"""
environ = {'PATH_INFO': '/'}
buffer = {}
def start_response(code, headers):
buffer['code'] = code
buffer['headers'] = headers
result = self.instance.__call__(environ, start_response)
assert type(result) == str
assert buffer['code'] == '200 OK'
assert buffer['headers'] == [("Content-Type", "text/html")]
environ = {'PATH_INFO': 'bad'}
result_404 = self.instance.__call__(environ, start_response)
assert buffer['code'] == '404 File Not Found'
assert buffer['headers'] == [("Content-Type", "text/html")]
assert type(result_404) == str
# RegEx matching
environ = {'PATH_INFO': '/test/location'}
result_regex = self.instance.__call__(environ, start_response)
assert type(result_regex) == str
assert buffer['code'] == '200 OK'
assert buffer['headers'] == [("Content-Type", "text/html")]
# Verify we skip regex checks on ''
environ = {'PATH_INFO': ''}
result_empty_str = self.instance.__call__(environ, start_response)
assert type(result_empty_str) == str
assert buffer['code'] == '200 OK'
assert buffer['headers'] == [("Content-Type", "text/html")]
|
{
"content_hash": "8fd763b99b9177ea18738b8e21ec26ec",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 74,
"avg_line_length": 33.676923076923075,
"alnum_prop": 0.5495660118775697,
"repo_name": "RHInception/talook",
"id": "1b8afe4993af42ac7ab910c928d3bc4e5df626d7",
"size": "2190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_router.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "314"
},
{
"name": "JavaScript",
"bytes": "4450"
},
{
"name": "Python",
"bytes": "34823"
},
{
"name": "Shell",
"bytes": "1531"
}
],
"symlink_target": ""
}
|
import os
from os.path import join
import subprocess
from os.path import join
import numpy as np
import pyopencl as cl
with open('build/bin/cocl', 'r') as f:
for line in f:
if line.strip().startswith('export CLANG_HOME='):
CLANG_HOME = line.strip().split('=')[1]
# CLANG_HOME = f.read().split('\n')
clang_path = join(CLANG_HOME, 'bin', 'clang++')
cocl_path = 'build/bin/cocl-internal'
# opt_level = 0
# opt_passes = ['-mem2reg', '-inline', '-instcombine']
def run_process(cmdline_list, cwd=None, env=None):
print('running [%s]' % ' '.join(cmdline_list))
fout = open('/tmp/pout.txt', 'w')
res = subprocess.run(cmdline_list, stdout=fout, stderr=subprocess.STDOUT, cwd=cwd, env=env)
fout.close()
with open('/tmp/pout.txt', 'r') as f:
output = f.read()
print(output)
assert res.returncode == 0
return output
# partial read/write seem not implemented in the version of pyopencl I have
# logged an issue at https://github.com/pyopencl/pyopencl/issues/153
def enqueue_write_buffer_ext(cl, queue, mem, hostbuf, device_offset=0, size=None,
wait_for=None, is_blocking=True):
ptr_event = cl.cffi_cl._ffi.new('clobj_t*')
c_buf, actual_size, c_ref = cl.cffi_cl._c_buffer_from_obj(hostbuf, retain=True)
if size is None:
size = actual_size
c_wait_for, num_wait_for = cl.cffi_cl._clobj_list(wait_for)
nanny_event = cl.cffi_cl.NannyEvent._handle(hostbuf, c_ref)
cl.cffi_cl._handle_error(cl.cffi_cl._lib.enqueue_write_buffer(
ptr_event, queue.ptr, mem.ptr, c_buf, size, device_offset, c_wait_for, num_wait_for, bool(True),
nanny_event))
return cl.cffi_cl.NannyEvent._create(ptr_event[0])
def enqueue_read_buffer_ext(cl, queue, mem, hostbuf, device_offset=0, size=None,
wait_for=None, is_blocking=True):
ptr_event = cl.cffi_cl._ffi.new('clobj_t*')
c_buf, actual_size, c_ref = cl.cffi_cl._c_buffer_from_obj(hostbuf, retain=True)
if size is None:
size = actual_size
c_wait_for, num_wait_for = cl.cffi_cl._clobj_list(wait_for)
nanny_event = cl.cffi_cl.NannyEvent._handle(hostbuf, c_ref)
cl.cffi_cl._handle_error(cl.cffi_cl._lib.enqueue_read_buffer(
ptr_event, queue.ptr, mem.ptr, c_buf, size, device_offset, c_wait_for, num_wait_for, bool(True),
nanny_event))
return cl.cffi_cl.NannyEvent._create(ptr_event[0])
# def cocl_options
# def cocl_options():
# options = []
# # if os.environ.get('COCL_BRANCHES_AS_SWITCH', '0') != '0':
# # options.append('--branches_as_switch')
# # if os.environ.get('COCL_RUN_TRANSFORMS', '0') != '0':
# # options.append('--run_transforms')
# options = os.environ.get('COCL_OPTIONS', '').split()
# print('options', options)
# return options
def offset_type(offset):
if os.environ.get('COCL_OFFSETS_32BIT', None) == '1':
print('using 32bit offsets')
return np.uint32(offset)
return np.int64(offset)
def mangle(name, param_types):
mangled = '_Z%s%s' % (len(name), name)
for param in param_types:
if param.replace(' ', '') == 'float*':
mangled += 'Pf'
elif param.replace(' ', '') == 'double*':
mangled += 'Pd'
elif param.replace(' ', '') == 'int*':
mangled += 'Pi'
elif param.replace(' ', '') == 'int':
mangled += 'i'
elif param.replace(' ', '') == 'long':
mangled += 'l'
elif param.replace(' ', '') == 'float':
mangled += 'f'
elif param.endswith('*'):
# assume pointer to struct
param = param.replace(' ', '').replace('*', '')
mangled += 'P%s%s' % (len(param), param)
else:
raise Exception('not implemented %s' % param)
return mangled
def compile_code(cl, context, kernelSource, kernelName, num_clmems):
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(kernelSource)
# args = get_cl_generation_options()
# if not branching_transformations:
# args.append('--no_branching_transforms')
clmemIndexes = ','.join([str(i) for i in range(num_clmems)])
env = os.environ
env['COCL_BIN'] = 'build'
env['COCL_LIB'] = 'build'
run_process([
'bash',
cocl_path,
'-c',
'/tmp/testprog.cu'
],
env=env)
run_process([
'build/ir-to-opencl',
'--inputfile', '/tmp/testprog-device.ll',
'--outputfile', '/tmp/testprog-device.cl',
'--kernelname', kernelName,
'--cmem-indexes', clmemIndexes,
'--add_ir_to_cl'
])
with open('/tmp/testprog-device.cl', 'r') as f:
cl_sourcecode = f.read()
prog = cl.Program(context, cl_sourcecode).build()
return prog
def compile_code_v2(cl, context, kernelSource, kernelName, num_clmems):
"""
returns dict
"""
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(kernelSource)
clmemIndexes = ','.join([str(i) for i in range(num_clmems)])
env = os.environ
env['COCL_BIN'] = 'build'
env['COCL_LIB'] = 'build'
run_process([
'bash',
cocl_path,
'-c',
'/tmp/testprog.cu'
], env=env)
run_process([
'build/ir-to-opencl',
'--inputfile', '/tmp/testprog-device.ll',
'--outputfile', '/tmp/testprog-device.cl',
'--kernelname', kernelName,
'--cmem-indexes', clmemIndexes,
'--add_ir_to_cl'
])
with open('/tmp/testprog-device.cl', 'r') as f:
cl_sourcecode = f.read()
prog = cl.Program(context, cl_sourcecode).build()
return {'prog': prog, 'cl_sourcecode': cl_sourcecode}
def compile_code_v3(cl, context, kernelSource, kernelName, num_clmems):
"""
returns dict
"""
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(kernelSource)
clmemIndexes = ','.join([str(i) for i in range(num_clmems)])
env = os.environ
env['COCL_BIN'] = 'build'
env['COCL_LIB'] = 'build'
run_process([
'bash',
cocl_path,
'-c',
'/tmp/testprog.cu'
], env=env)
run_process([
'build/ir-to-opencl',
'--inputfile', '/tmp/testprog-device.ll',
'--outputfile', '/tmp/testprog-device.cl',
'--kernelname', kernelName,
'--cmem-indexes', clmemIndexes,
'--add_ir_to_cl'
])
with open('/tmp/testprog-device.cl', 'r') as f:
cl_sourcecode = f.read()
prog = cl.Program(context, cl_sourcecode).build()
kernel = prog.__getattr__(kernelName)
return {'kernel': kernel, 'cl_sourcecode': cl_sourcecode}
def ll_to_cl(ll_sourcecode, kernelName, num_clmems):
with open('/tmp/testprog-device.ll', 'w') as f:
f.write(ll_sourcecode)
clmemIndexes = ','.join([str(i) for i in range(num_clmems)])
run_process([
'build/ir-to-opencl',
'--inputfile', '/tmp/testprog-device.ll',
'--outputfile', '/tmp/testprog-device.cl',
'--kernelname', kernelName,
'--cmem-indexes', clmemIndexes,
'--add_ir_to_cl'
])
with open('/tmp/testprog-device.cl', 'r') as f:
cl_sourcecode = f.read()
return cl_sourcecode
def cu_to_ll(cu_sourcecode):
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(cu_sourcecode)
env = os.environ
env['COCL_BIN'] = 'build'
env['COCL_LIB'] = 'build'
run_process([
'bash',
cocl_path,
'-c',
'/tmp/testprog.cu'
], env=env)
with open('/tmp/testprog-device.ll', 'r') as f:
ll_sourcecode = f.read()
return ll_sourcecode
def cu_to_devicell_explicit_opt(cu_sourcecode, opt=0):
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(cu_sourcecode)
print(subprocess.check_output([
clang_path,
'-x', 'cuda',
'-include', 'include/cocl/cocl_attributes.h',
'--cuda-device-only',
'-nocudainc',
'-nocudalib',
'-emit-llvm',
'/tmp/testprog.cu',
'-S',
'-O%s' % opt,
'-o', '/tmp/testprog.ll'
]).decode('utf-8'))
with open('/tmp/testprog.ll', 'r') as f:
return f.read()
def cu_to_devicell_noopt(cu_sourcecode):
return cu_to_devicell_explicit_opt(cu_sourcecode, opt=0)
def cu_to_cl(cu_sourcecode, kernelName, num_clmems):
for file in os.listdir('/tmp'):
if file.startswith('testprog'):
os.unlink('/tmp/%s' % file)
with open('/tmp/testprog.cu', 'w') as f:
f.write(cu_sourcecode)
clmemIndexes = ','.join([str(i) for i in range(num_clmems)])
env = os.environ
env['COCL_BIN'] = 'build'
env['COCL_LIB'] = 'build'
run_process([
'bash',
cocl_path,
'-c',
'/tmp/testprog.cu'
], env=env)
run_process([
'build/ir-to-opencl',
'--inputfile', '/tmp/testprog-device.ll',
'--outputfile', '/tmp/testprog-device.cl',
'--kernelname', kernelName,
'--cmem-indexes', clmemIndexes,
'--add_ir_to_cl'
])
with open('/tmp/testprog-device.cl', 'r') as f:
cl_sourcecode = f.read()
return cl_sourcecode
def build_kernel(context, cl_sourcecode, kernelName):
print('building sourcecode')
print('cl_sourcecode', cl_sourcecode)
prog = cl.Program(context, cl_sourcecode).build()
print('built prog')
kernel = prog.__getattr__(kernelName)
return kernel
|
{
"content_hash": "363a50ec235475263f50a9c1538bd448",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 104,
"avg_line_length": 30.671779141104295,
"alnum_prop": 0.5681568156815682,
"repo_name": "hughperkins/cuda-on-cl",
"id": "c1fab6c2539415212e1b012cb1e88069b5cea985",
"size": "9999",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "38056"
},
{
"name": "C++",
"bytes": "790966"
},
{
"name": "CMake",
"bytes": "18720"
},
{
"name": "Cuda",
"bytes": "77281"
},
{
"name": "LLVM",
"bytes": "86269"
},
{
"name": "Objective-C",
"bytes": "3025"
},
{
"name": "Python",
"bytes": "78860"
},
{
"name": "Shell",
"bytes": "15033"
}
],
"symlink_target": ""
}
|
import datetime
VERSION=({{ version.major }}, {{ version.minor }}, {{ version.patch }})
COMPILATION_DATE = datetime.datetime.fromtimestamp({{ compile_timestamp }})
|
{
"content_hash": "f815ae63d8f33b7a956beb438cbf017f",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 75,
"avg_line_length": 41,
"alnum_prop": 0.7134146341463414,
"repo_name": "NazarethCollege/wharf",
"id": "1414a8c4465e637b5955e7cbdef3ccffb53b5a00",
"size": "164",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/wharf/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29700"
},
{
"name": "Shell",
"bytes": "454"
},
{
"name": "Smarty",
"bytes": "302"
}
],
"symlink_target": ""
}
|
"""Tests for the management commands `send_message_digest`."""
from django.core import mail
from django.core.management import call_command
from django.test import TestCase
from django.utils.timezone import now, timedelta
from mixer.backend.django import mixer
class SendMessageDigestTestCase(TestCase):
longMessage = True
def test_validates_and_saves_input(self):
two_days_ago = now() - timedelta(days=2)
user = mixer.blend('auth.User')
conversation = mixer.blend('conversation.Conversation')
conversation.users.add(user)
conversation.unread_by.add(user)
call_command('send_message_digest')
self.assertEqual(len(mail.outbox), 0, msg=(
'No digest should have been sent.'))
conversation.read_by_all = two_days_ago
conversation.save()
call_command('send_message_digest')
self.assertEqual(len(mail.outbox), 1, msg=(
'One digest should have been sent.'))
with self.settings(CONVERSATION_ENABLE_DIGEST=False):
call_command('send_message_digest')
self.assertEqual(len(mail.outbox), 1, msg=(
'No new digest should have been sent.'))
|
{
"content_hash": "a677362e2afccb71f521eb8f1623cf05",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 63,
"avg_line_length": 38.67741935483871,
"alnum_prop": 0.6663886572143453,
"repo_name": "bitmazk/django-conversation",
"id": "ef69feb2ec43cb7848010443c26973623f9948c0",
"size": "1199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conversation/tests/management_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "168"
},
{
"name": "HTML",
"bytes": "6185"
},
{
"name": "JavaScript",
"bytes": "355"
},
{
"name": "Python",
"bytes": "42379"
}
],
"symlink_target": ""
}
|
import os
import subprocess
import tempfile
import unittest
import lightgbm as lgb
import numpy as np
from sklearn.datasets import load_breast_cancer, dump_svmlight_file
from sklearn.model_selection import train_test_split
class TestBasic(unittest.TestCase):
def test(self):
X_train, X_test, y_train, y_test = train_test_split(*load_breast_cancer(True), test_size=0.1, random_state=2)
train_data = lgb.Dataset(X_train, max_bin=255, label=y_train)
valid_data = train_data.create_valid(X_test, label=y_test)
params = {
"objective": "binary",
"metric": "auc",
"min_data": 10,
"num_leaves": 15,
"verbose": -1,
"num_threads": 1
}
bst = lgb.Booster(params, train_data)
bst.add_valid(valid_data, "valid_1")
for i in range(30):
bst.update()
if i % 10 == 0:
print(bst.eval_train(), bst.eval_valid())
bst.save_model("model.txt")
pred_from_matr = bst.predict(X_test)
with tempfile.NamedTemporaryFile() as f:
tname = f.name
with open(tname, "w+b") as f:
dump_svmlight_file(X_test, y_test, f)
pred_from_file = bst.predict(tname)
os.remove(tname)
self.assertEqual(len(pred_from_matr), len(pred_from_file))
for preds in zip(pred_from_matr, pred_from_file):
self.assertAlmostEqual(*preds, places=15)
# check saved model persistence
bst = lgb.Booster(params, model_file="model.txt")
pred_from_model_file = bst.predict(X_test)
self.assertEqual(len(pred_from_matr), len(pred_from_model_file))
for preds in zip(pred_from_matr, pred_from_model_file):
# we need to check the consistency of model file here, so test for exact equal
self.assertEqual(*preds)
# check early stopping is working. Make it stop very early, so the scores should be very close to zero
pred_parameter = {"pred_early_stop": True, "pred_early_stop_freq": 5, "pred_early_stop_margin": 1.5}
pred_early_stopping = bst.predict(X_test, pred_parameter=pred_parameter)
self.assertEqual(len(pred_from_matr), len(pred_early_stopping))
for preds in zip(pred_early_stopping, pred_from_matr):
# scores likely to be different, but prediction should still be the same
self.assertEqual(preds[0] > 0, preds[1] > 0)
|
{
"content_hash": "57ecf4cdfd42538f4de217338d79f208",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 117,
"avg_line_length": 41.06666666666667,
"alnum_prop": 0.6168831168831169,
"repo_name": "olofer/LightGBM",
"id": "45dce45f8371538fad78e05ba15a515b79eb368b",
"size": "2500",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/python_package_test/test_basic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "112901"
},
{
"name": "C++",
"bytes": "896110"
},
{
"name": "CMake",
"bytes": "4661"
},
{
"name": "Python",
"bytes": "226843"
},
{
"name": "R",
"bytes": "205716"
},
{
"name": "Shell",
"bytes": "6143"
}
],
"symlink_target": ""
}
|
import pygame
from pygame.sprite import Sprite
from pygame.sprite import Group
from block import Block
class Explosion(Sprite):
"""A class representing an explosion."""
def __init__(self, settings, screen, x, y, ship_shot, invader_shot, ship, invader, mystery):
super(Explosion, self).__init__()
self.settings = settings
self.screen = screen
self.x = x
self.y = y
self.set_image(ship_shot, invader_shot, ship, invader, mystery)
# Set time of explosion
self.timer = pygame.time.get_ticks()
# TODO: create other kinds of explosions
def set_image(self, ship_shot, invader_shot, ship, invader, mystery):
if ship_shot:
self.image = self.create_ship_shot_explosion(self.settings,
self.screen, self.x, self.y)
elif invader_shot:
self.image = self.create_invader_shot_explosion(self.settings,
self.screen, self.x, self.y)
elif invader:
self.image = pygame.image.load("images/explosions/invader_explosion.png")
def create_ship_shot_explosion(self, settings, screen, x, y):
"""Create and return explosion "sprite" as a group of Blocks. Top-left point of group is set to x, y."""
explode_blocks = Group()
for row in range(settings.player_shot_explode_rows):
for column in range(settings.player_shot_explode_columns):
if settings.player_shot_explode_array[row][column] == 'b':
new_block = Block(settings, screen, settings.white,
x + (column * settings.block_size),
y + (row * settings.block_size))
explode_blocks.add(new_block)
return explode_blocks
def create_invader_shot_explosion(self, settings, screen, x, y):
"""Create and return explosion "sprite" as a group of Blocks. Top-left point of group is set to x, y."""
explode_blocks = Group()
for row in range(settings.invader_shot_explode_rows):
for column in range(settings.invader_shot_explode_columns):
if settings.invader_shot_explode_array[row][column] == 'b':
new_block = Block(settings, screen, settings.white,
x + (column * settings.block_size),
y + (row * settings.block_size))
explode_blocks.add(new_block)
return explode_blocks
|
{
"content_hash": "db94b58e1ebe2bc4db880b7a70c3a152",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 106,
"avg_line_length": 35.46666666666667,
"alnum_prop": 0.6973684210526315,
"repo_name": "hlynurstef/Space_Invaders",
"id": "c26622af9161668bd4e99777b04b75971220ad78",
"size": "2128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "explosion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42527"
}
],
"symlink_target": ""
}
|
"""Tests for saving/loading function for keras Model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from absl.testing import parameterized
import numpy as np
from tensorflow.contrib.saved_model.python.saved_model import keras_saved_model
from tensorflow.python import keras
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.estimator import model_fn as model_fn_lib
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.utils import tf_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
from tensorflow.python.saved_model import constants
from tensorflow.python.saved_model import loader_impl
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import training as training_module
class TestModelSavingandLoading(test.TestCase):
def _save_model_dir(self, dirname='saved_model'):
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
return os.path.join(temp_dir, dirname)
def test_saving_sequential_model(self):
with self.cached_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.RepeatVector(3))
model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))
model.compile(
loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy],
sample_weight_mode='temporal')
x = np.random.random((1, 3))
y = np.random.random((1, 3, 3))
model.train_on_batch(x, y)
ref_y = model.predict(x)
temp_saved_model = self._save_model_dir()
output_path = keras_saved_model.save_keras_model(model, temp_saved_model)
loaded_model = keras_saved_model.load_keras_model(output_path)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
@test_util.run_in_graph_and_eager_modes
def test_saving_sequential_model_without_compile(self):
with self.cached_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.RepeatVector(3))
model.add(keras.layers.TimeDistributed(keras.layers.Dense(3)))
x = np.random.random((1, 3))
ref_y = model.predict(x)
temp_saved_model = self._save_model_dir()
output_path = keras_saved_model.save_keras_model(model, temp_saved_model)
loaded_model = keras_saved_model.load_keras_model(output_path)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
def test_saving_functional_model(self):
with self.cached_session():
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
output = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, output)
model.compile(
loss=keras.losses.MSE,
optimizer=keras.optimizers.RMSprop(lr=0.0001),
metrics=[keras.metrics.categorical_accuracy])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
ref_y = model.predict(x)
temp_saved_model = self._save_model_dir()
output_path = keras_saved_model.save_keras_model(model, temp_saved_model)
loaded_model = keras_saved_model.load_keras_model(output_path)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
@test_util.run_in_graph_and_eager_modes
def test_saving_functional_model_without_compile(self):
with self.cached_session():
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
output = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, output)
x = np.random.random((1, 3))
y = np.random.random((1, 3))
ref_y = model.predict(x)
temp_saved_model = self._save_model_dir()
output_path = keras_saved_model.save_keras_model(model, temp_saved_model)
loaded_model = keras_saved_model.load_keras_model(output_path)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
@test_util.run_in_graph_and_eager_modes
def test_saving_with_tf_optimizer(self):
with self.cached_session():
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(
loss='mse',
optimizer=training_module.RMSPropOptimizer(0.1),
metrics=['acc'])
x = np.random.random((1, 3))
y = np.random.random((1, 3))
model.train_on_batch(x, y)
model.train_on_batch(x, y)
ref_y = model.predict(x)
temp_saved_model = self._save_model_dir()
output_path = keras_saved_model.save_keras_model(model, temp_saved_model)
loaded_model = keras_saved_model.load_keras_model(output_path)
loaded_model.compile(
loss='mse',
optimizer=training_module.RMSPropOptimizer(0.1),
metrics=['acc'])
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
# test that new updates are the same with both models
x = np.random.random((1, 3))
y = np.random.random((1, 3))
ref_loss = model.train_on_batch(x, y)
loss = loaded_model.train_on_batch(x, y)
self.assertAllClose(ref_loss, loss, atol=1e-05)
ref_y = model.predict(x)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
# test saving/loading again
temp_saved_model2 = self._save_model_dir('saved_model_2')
output_path2 = keras_saved_model.save_keras_model(
loaded_model, temp_saved_model2)
loaded_model = keras_saved_model.load_keras_model(output_path2)
y = loaded_model.predict(x)
self.assertAllClose(ref_y, y, atol=1e-05)
def test_saving_subclassed_model_raise_error(self):
# For now, saving subclassed model should raise an error. It should be
# avoided later with loading from SavedModel.pb.
class SubclassedModel(training.Model):
def __init__(self):
super(SubclassedModel, self).__init__()
self.layer1 = keras.layers.Dense(3)
self.layer2 = keras.layers.Dense(1)
def call(self, inp):
return self.layer2(self.layer1(inp))
model = SubclassedModel()
temp_saved_model = self._save_model_dir()
with self.assertRaises(NotImplementedError):
keras_saved_model.save_keras_model(model, temp_saved_model)
class LayerWithLearningPhase(keras.engine.base_layer.Layer):
def call(self, x):
phase = keras.backend.learning_phase()
output = tf_utils.smart_cond(
phase, lambda: x * 0, lambda: array_ops.identity(x))
if not context.executing_eagerly():
output._uses_learning_phase = True # pylint: disable=protected-access
return output
def compute_output_shape(self, input_shape):
return input_shape
def functional_model(uses_learning_phase):
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
x = keras.layers.Dense(3)(x)
if uses_learning_phase:
x = LayerWithLearningPhase()(x)
return keras.models.Model(inputs, x)
def sequential_model(uses_learning_phase):
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
if uses_learning_phase:
model.add(LayerWithLearningPhase())
return model
def load_model(sess, path, mode):
tags = model_fn_lib.EXPORT_TAG_MAP[mode]
sig_def_key = (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
if mode == model_fn_lib.ModeKeys.PREDICT else mode)
meta_graph_def = loader_impl.load(sess, tags, path)
inputs = {
k: sess.graph.get_tensor_by_name(v.name)
for k, v in meta_graph_def.signature_def[sig_def_key].inputs.items()}
outputs = {
k: sess.graph.get_tensor_by_name(v.name)
for k, v in meta_graph_def.signature_def[sig_def_key].outputs.items()}
return inputs, outputs
@test_util.run_all_in_graph_and_eager_modes
class TestModelSavedModelExport(test.TestCase, parameterized.TestCase):
def _save_model_dir(self, dirname='saved_model'):
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
return os.path.join(temp_dir, dirname)
@parameterized.parameters(
(functional_model, True, training_module.AdadeltaOptimizer(), True),
(functional_model, True, training_module.AdadeltaOptimizer(), False),
(functional_model, False, None, False),
(sequential_model, True, training_module.AdadeltaOptimizer(), True),
(sequential_model, True, training_module.AdadeltaOptimizer(), False),
(sequential_model, False, None, False))
def testSaveAndLoadSavedModelExport(
self, model_builder, uses_learning_phase, optimizer, train_before_export):
saved_model_path = self._save_model_dir()
with self.session(graph=ops.Graph()):
input_arr = np.random.random((1, 3))
target_arr = np.random.random((1, 3))
model = model_builder(uses_learning_phase)
if optimizer is not None:
model.compile(
loss='mse',
optimizer=optimizer,
metrics=['mae'])
if train_before_export:
model.train_on_batch(input_arr, target_arr)
ref_loss, ref_mae = model.evaluate(input_arr, target_arr)
ref_predict = model.predict(input_arr)
# Export SavedModel
output_path = keras_saved_model.save_keras_model(model, saved_model_path)
input_name = model.input_names[0]
output_name = model.output_names[0]
target_name = output_name + '_target'
# Load predict graph, and test predictions
with session.Session(graph=ops.Graph()) as sess:
inputs, outputs = load_model(sess, output_path,
model_fn_lib.ModeKeys.PREDICT)
predictions = sess.run(outputs[output_name],
{inputs[input_name]: input_arr})
self.assertAllClose(ref_predict, predictions, atol=1e-05)
if optimizer:
# Load eval graph, and test predictions, loss and metric values
with session.Session(graph=ops.Graph()) as sess:
inputs, outputs = load_model(sess, output_path,
model_fn_lib.ModeKeys.EVAL)
eval_results = sess.run(outputs, {inputs[input_name]: input_arr,
inputs[target_name]: target_arr})
self.assertEqual(int(train_before_export),
sess.run(training_module.get_global_step()))
self.assertAllClose(ref_loss, eval_results['loss'], atol=1e-05)
self.assertAllClose(
ref_mae, eval_results['metrics/mae/update_op'], atol=1e-05)
self.assertAllClose(
ref_predict, eval_results['predictions/' + output_name], atol=1e-05)
# Load train graph, and check for the train op, and prediction values
with session.Session(graph=ops.Graph()) as sess:
inputs, outputs = load_model(sess, output_path,
model_fn_lib.ModeKeys.TRAIN)
self.assertEqual(int(train_before_export),
sess.run(training_module.get_global_step()))
self.assertIn('loss', outputs)
self.assertIn('metrics/mae/update_op', outputs)
self.assertIn('metrics/mae/value', outputs)
self.assertIn('predictions/' + output_name, outputs)
# Train for a step
train_op = ops.get_collection(constants.TRAIN_OP_KEY)
train_outputs, _ = sess.run(
[outputs, train_op], {inputs[input_name]: input_arr,
inputs[target_name]: target_arr})
self.assertEqual(int(train_before_export) + 1,
sess.run(training_module.get_global_step()))
if uses_learning_phase:
self.assertAllClose(
[[0, 0, 0]], train_outputs['predictions/' + output_name],
atol=1e-05)
else:
self.assertNotAllClose(
[[0, 0, 0]], train_outputs['predictions/' + output_name],
atol=1e-05)
def testSaveAndLoadSavedModelWithCustomObject(self):
saved_model_path = self._save_model_dir()
with session.Session(graph=ops.Graph()) as sess:
def relu6(x):
return keras.backend.relu(x, max_value=6)
inputs = keras.layers.Input(shape=(1,))
outputs = keras.layers.Activation(relu6)(inputs)
model = keras.models.Model(inputs, outputs)
output_path = keras_saved_model.save_keras_model(
model, saved_model_path, custom_objects={'relu6': relu6})
with session.Session(graph=ops.Graph()) as sess:
inputs, outputs = load_model(sess, output_path,
model_fn_lib.ModeKeys.PREDICT)
input_name = model.input_names[0]
output_name = model.output_names[0]
predictions = sess.run(
outputs[output_name], {inputs[input_name]: [[7], [-3], [4]]})
self.assertAllEqual([[6], [0], [4]], predictions)
def testAssertModelCloneSameObjectsIgnoreOptimizer(self):
input_arr = np.random.random((1, 3))
target_arr = np.random.random((1, 3))
model_graph = ops.Graph()
clone_graph = ops.Graph()
# Create two models with the same layers but different optimizers.
with session.Session(graph=model_graph):
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
x = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, x)
model.compile(loss='mse', optimizer=training_module.AdadeltaOptimizer())
model.train_on_batch(input_arr, target_arr)
with session.Session(graph=clone_graph):
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
x = keras.layers.Dense(3)(x)
clone = keras.models.Model(inputs, x)
clone.compile(loss='mse', optimizer=keras.optimizers.RMSprop(lr=0.0001))
clone.train_on_batch(input_arr, target_arr)
keras_saved_model._assert_same_non_optimizer_objects(
model, model_graph, clone, clone_graph)
def testAssertModelCloneSameObjectsThrowError(self):
input_arr = np.random.random((1, 3))
target_arr = np.random.random((1, 3))
model_graph = ops.Graph()
clone_graph = ops.Graph()
# Create two models with the same layers but different optimizers.
with session.Session(graph=model_graph):
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
x = keras.layers.Dense(3)(x)
model = keras.models.Model(inputs, x)
model.compile(loss='mse', optimizer=training_module.AdadeltaOptimizer())
model.train_on_batch(input_arr, target_arr)
with session.Session(graph=clone_graph):
inputs = keras.layers.Input(shape=(3,))
x = keras.layers.Dense(2)(inputs)
x = keras.layers.Dense(4)(x)
x = keras.layers.Dense(3)(x)
clone = keras.models.Model(inputs, x)
clone.compile(loss='mse', optimizer=keras.optimizers.RMSprop(lr=0.0001))
clone.train_on_batch(input_arr, target_arr)
with self.assertRaisesRegexp(
errors.InternalError, 'Model and clone must use the same variables.'):
keras_saved_model._assert_same_non_optimizer_objects(
model, model_graph, clone, clone_graph)
if __name__ == '__main__':
test.main()
|
{
"content_hash": "46a00124708f163f8ffb8db037c53272",
"timestamp": "",
"source": "github",
"line_count": 415,
"max_line_length": 80,
"avg_line_length": 37.87228915662651,
"alnum_prop": 0.6559139784946236,
"repo_name": "girving/tensorflow",
"id": "060c5045235ced50adf38222a0152a1700a252e8",
"size": "16441",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/saved_model/python/saved_model/keras_saved_model_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3325"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "343258"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "50036869"
},
{
"name": "CMake",
"bytes": "196127"
},
{
"name": "Dockerfile",
"bytes": "36386"
},
{
"name": "Go",
"bytes": "1254086"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "867313"
},
{
"name": "Jupyter Notebook",
"bytes": "2604735"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "58787"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "42041620"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "477299"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
import factory
from datetime import date, timedelta
from django.contrib.auth.models import User
from core.models import Contacto, Profesional, Persona
class UserFactory(factory.django.DjangoModelFactory):
first_name = factory.Faker('first_name', locale='es')
last_name = factory.Faker('last_name', locale='es')
email = factory.LazyAttribute(lambda o: '%s.%s@example.org' % (o.first_name.lower(), o.last_name.lower()))
# username = factory.LazyAttribute(lambda o: '%s_%s' % (o.first_name.lower(), o.last_name.lower()))
username = factory.Faker("user_name", locale='es')
password = factory.PostGenerationMethodCall('set_password', 'password')
class Meta:
model = User
class ContactoFactory(factory.django.DjangoModelFactory):
nombre = factory.Faker('first_name', locale='es')
apellido = factory.Faker('last_name', locale='es')
class Meta:
model = Contacto
class PersonaFactory(factory.django.DjangoModelFactory):
nombre = factory.Faker('first_name', locale='es')
apellido = factory.Faker('last_name', locale='es')
fecha_nacimiento = factory.Sequence(lambda n: date(2000, 1, 1) + timedelta(days=n))
class Meta:
model = Persona
class ProfesionalFactory(factory.django.DjangoModelFactory):
persona = factory.SubFactory(PersonaFactory)
usuario = factory.SubFactory(UserFactory)
class Meta:
model = Profesional
|
{
"content_hash": "cd1e85eb4936d0716335904263d7aa5a",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 110,
"avg_line_length": 32.97674418604651,
"alnum_prop": 0.7038081805359662,
"repo_name": "mava-ar/sgk",
"id": "154854dc60581c91f8f08cf96f3d07c2912c12f2",
"size": "1418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/core/factories.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "20411"
},
{
"name": "HTML",
"bytes": "81338"
},
{
"name": "JavaScript",
"bytes": "34107"
},
{
"name": "Python",
"bytes": "197385"
},
{
"name": "Shell",
"bytes": "1349"
}
],
"symlink_target": ""
}
|
import sys
from setuptools import setup, find_packages
needs_pytest = {"pytest", "test"}.intersection(sys.argv)
pytest_runner = ["pytest_runner"] if needs_pytest else []
needs_wheel = {"bdist_wheel"}.intersection(sys.argv)
wheel = ["wheel"] if needs_wheel else []
with open("README.rst", "r") as f:
long_description = f.read()
test_requires = ["pytest>=2.8", "ufoNormalizer>=0.3.2", "xmldiff>=2.2"]
if sys.version_info < (3, 3):
test_requires.append("mock>=2.0.0")
setup(
name="glyphsLib",
use_scm_version={"write_to": "Lib/glyphsLib/_version.py"},
author="James Godfrey-Kittle",
author_email="jamesgk@google.com",
description="A bridge from Glyphs source files (.glyphs) to UFOs",
long_description=long_description,
url="https://github.com/googlei18n/glyphsLib",
license="Apache Software License 2.0",
package_dir={"": "Lib"},
packages=find_packages("Lib"),
package_data={"glyphsLib": ["data/*.xml", "data/GlyphData_LICENSE"]},
entry_points={
"console_scripts": [
"ufo2glyphs = glyphsLib.cli:_ufo2glyphs_entry_point",
"glyphs2ufo = glyphsLib.cli:_glyphs2ufo_entry_point",
]
},
setup_requires=pytest_runner + wheel + ["setuptools_scm"],
tests_require=test_requires,
install_requires=["fonttools>=3.24.0", "defcon>=0.3.0"],
extras_require={"ufo_normalization": ["ufonormalizer"]},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Multimedia :: Graphics",
"Topic :: Multimedia :: Graphics :: Graphics Conversion",
"Topic :: Multimedia :: Graphics :: Editors :: Vector-Based",
],
)
|
{
"content_hash": "7873972c01288310159a304469b38c72",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 73,
"avg_line_length": 38.236363636363635,
"alnum_prop": 0.6286257727056586,
"repo_name": "googlei18n/glyphsLib",
"id": "08be0e502951581ee2497805085dbac888e07da2",
"size": "2700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "715643"
}
],
"symlink_target": ""
}
|
import math
import scipy.ndimage
import numpy as np
from PIL import Image
def MakeGaussian(mean, variance):
''' Returns a callable which takes a 2-tuple (an x,y coordinate point), and
returns the value of a gaussian function with the given parameters at the
given point.
Args:
mean:
2-tuple; mu parameter to the gaussian function. Is the "center" of the
mound.
variance:
2-tuple; sigma parameter to the gaussian function.
'''
# This is (2 * standard_deviation). Avoids computing constants in loops.
double_stddev = [2.*float(v)**2 for v in variance]
mean = [float(v) for v in mean] # work in floats
def Gaussian(point):
''' Returns the value of a 2d gaussian at the given point. '''
return math.exp(-1. *
sum((point[idx] - mean[idx])**2 / double_stddev[idx] for idx in [0,1]))
return Gaussian
def MatrixFromFunction(size, scale, func):
''' Returns a 2d np matrix representing a discreteization of the given
function. (0,0) on the coordinate axis is defined as the center of the matrix.
Args:
size:
2-tuple; the size of the result matrix
scale:
2-tuple; the 'length' of the coordinate axis along each dimension
projected onto the result matrix.
'''
scale = [float(v) for v in scale] # work in floats
def MatrixGenerator():
# row/col corresponding to the (0,0) point in function space
half_size = [(size[0] - 1) / 2., (size[1] - 1) / 2.]
scale_factor = [scale[0] / float(size[0]), scale[1] / float(size[1])]
for row in xrange(size[0]):
for col in xrange(size[1]):
# Transform a row,col into an x,y coordinate in function space
point = [
(row - half_size[0]) * scale_factor[0],
(col - half_size[1]) * scale_factor[1],
]
yield func(point)
data = np.fromiter(MatrixGenerator(), float, size[0] * size[1])
return np.reshape(data, size)
def ConvolveImg(img_in, convolve_mat):
''' Convolves the given image with a the given convolution matrix. The input
matrix should be 3-dimensional with the depth dimension being equal to the
number of channels in the image. This way, each dimension of `convolve_mat` is
used to convolve a particular channel of the image.
Args:
img_in: PIL image to process
convolve_mat: numpy array to convolve it with
Returns:
A new PIL.Image corresponding to the convolved image.
'''
def NormalizeTo(arr, new_max):
''' Normalizes a numpy array to the range [0, new_max]. Modifies the array
in-place. Returns a reference to the (modified) input array.
'''
arr -= arr.min()
mx = arr.max()
if mx != 0:
arr /= mx
arr *= new_max
return arr
def Convolve(img, mat):
''' Convolves the two arrays. Return value's size is the same is `img`.
'''
convolve_func = scipy.ndimage.convolve
return np.uint8(NormalizeTo(convolve_func(img, np.float64(mat)), 255.0))
img_in_data = np.float64(np.array(img_in))
if len(img_in_data.shape) > 2:
# Image has multiple channels. Convolve each seperately and then join them
# into a single image
img_out_data = np.dstack([
Convolve(img_in_data[...,idx], convolve_mat[...,idx])
for idx in xrange(img_in_data.shape[2])
])
else:
# TODO: convolve_mat has 3 dimensions, but the depth dimension is unneeded
# when the input only has one channel. For now, I'll just use a slice to
# remove the extra dimension.
img_out_data = Convolve(img_in_data, convolve_mat[...,0])
return Image.fromarray(img_out_data, img_in.mode)
|
{
"content_hash": "8e7be158e7300a4f6bbeb2ab6a1725a5",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 80,
"avg_line_length": 35.64356435643565,
"alnum_prop": 0.6547222222222222,
"repo_name": "cookyt/teastain",
"id": "f1910e6b789d1ddeeb1a3c9f747be38135493e95",
"size": "3600",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "teastain/convolution.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6467"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import sys
print(*xrange(1,int(raw_input().strip())+1),sep='')
|
{
"content_hash": "1da6de8b23ef7545099bded796d48c78",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 51,
"avg_line_length": 20.6,
"alnum_prop": 0.6796116504854369,
"repo_name": "nabin-info/hackerrank.com",
"id": "4089f74e6e0b476650e727e1a89ea4f1773d3029",
"size": "122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python-print.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1609"
},
{
"name": "Python",
"bytes": "25223"
}
],
"symlink_target": ""
}
|
"""
Module containing Axes3D, an object which can plot 3D objects on a
2D matplotlib figure.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import math
from matplotlib.externals import six
from matplotlib.externals.six.moves import map, xrange, zip, reduce
import warnings
from operator import itemgetter
import matplotlib.axes as maxes
from matplotlib.axes import Axes, rcParams
from matplotlib import cbook
import matplotlib.transforms as mtransforms
from matplotlib.transforms import Bbox
import matplotlib.collections as mcoll
from matplotlib import docstring
import matplotlib.scale as mscale
from matplotlib.tri.triangulation import Triangulation
import numpy as np
from matplotlib.colors import Normalize, colorConverter, LightSource
from . import art3d
from . import proj3d
from . import axis3d
def unit_bbox():
box = Bbox(np.array([[0, 0], [1, 1]]))
return box
class Axes3D(Axes):
"""
3D axes object.
"""
name = '3d'
_shared_z_axes = cbook.Grouper()
def __init__(self, fig, rect=None, *args, **kwargs):
'''
Build an :class:`Axes3D` instance in
:class:`~matplotlib.figure.Figure` *fig* with
*rect=[left, bottom, width, height]* in
:class:`~matplotlib.figure.Figure` coordinates
Optional keyword arguments:
================ =========================================
Keyword Description
================ =========================================
*azim* Azimuthal viewing angle (default -60)
*elev* Elevation viewing angle (default 30)
*zscale* [%(scale)s]
*sharez* Other axes to share z-limits with
================ =========================================
.. versionadded :: 1.2.1
*sharez*
''' % {'scale': ' | '.join([repr(x) for x in mscale.get_scale_names()])}
if rect is None:
rect = [0.0, 0.0, 1.0, 1.0]
self._cids = []
self.initial_azim = kwargs.pop('azim', -60)
self.initial_elev = kwargs.pop('elev', 30)
zscale = kwargs.pop('zscale', None)
sharez = kwargs.pop('sharez', None)
self.xy_viewLim = unit_bbox()
self.zz_viewLim = unit_bbox()
self.xy_dataLim = unit_bbox()
self.zz_dataLim = unit_bbox()
# inihibit autoscale_view until the axes are defined
# they can't be defined until Axes.__init__ has been called
self.view_init(self.initial_elev, self.initial_azim)
self._ready = 0
self._sharez = sharez
if sharez is not None:
self._shared_z_axes.join(self, sharez)
self._adjustable = 'datalim'
Axes.__init__(self, fig, rect,
frameon=True,
*args, **kwargs)
# Disable drawing of axes by base class
Axes.set_axis_off(self)
# Enable drawing of axes by Axes3D class
self.set_axis_on()
self.M = None
# func used to format z -- fall back on major formatters
self.fmt_zdata = None
if zscale is not None :
self.set_zscale(zscale)
if self.zaxis is not None :
self._zcid = self.zaxis.callbacks.connect('units finalize',
self.relim)
else :
self._zcid = None
self._ready = 1
self.mouse_init()
self.set_top_view()
self.axesPatch.set_linewidth(0)
# Calculate the pseudo-data width and height
pseudo_bbox = self.transLimits.inverted().transform([(0, 0), (1, 1)])
self._pseudo_w, self._pseudo_h = pseudo_bbox[1] - pseudo_bbox[0]
self.figure.add_axes(self)
def set_axis_off(self):
self._axis3don = False
self.stale = True
def set_axis_on(self):
self._axis3don = True
self.stale = True
def have_units(self):
"""
Return *True* if units are set on the *x*, *y*, or *z* axes
"""
return (self.xaxis.have_units() or self.yaxis.have_units() or
self.zaxis.have_units())
def convert_zunits(self, z):
"""
For artists in an axes, if the zaxis has units support,
convert *z* using zaxis unit type
.. versionadded :: 1.2.1
"""
return self.zaxis.convert_units(z)
def _process_unit_info(self, xdata=None, ydata=None, zdata=None,
kwargs=None):
"""
Look for unit *kwargs* and update the axis instances as necessary
"""
Axes._process_unit_info(self, xdata=xdata, ydata=ydata, kwargs=kwargs)
if self.xaxis is None or self.yaxis is None or self.zaxis is None:
return
if zdata is not None:
# we only need to update if there is nothing set yet.
if not self.zaxis.have_units():
self.zaxis.update_units(xdata)
# process kwargs 2nd since these will override default units
if kwargs is not None:
zunits = kwargs.pop('zunits', self.zaxis.units)
if zunits != self.zaxis.units:
self.zaxis.set_units(zunits)
# If the units being set imply a different converter,
# we need to update.
if zdata is not None:
self.zaxis.update_units(zdata)
def set_top_view(self):
# this happens to be the right view for the viewing coordinates
# moved up and to the left slightly to fit labels and axes
xdwl = (0.95/self.dist)
xdw = (0.9/self.dist)
ydwl = (0.95/self.dist)
ydw = (0.9/self.dist)
# This is purposely using the 2D Axes's set_xlim and set_ylim,
# because we are trying to place our viewing pane.
Axes.set_xlim(self, -xdwl, xdw, auto=None)
Axes.set_ylim(self, -ydwl, ydw, auto=None)
def _init_axis(self):
'''Init 3D axes; overrides creation of regular X/Y axes'''
self.w_xaxis = axis3d.XAxis('x', self.xy_viewLim.intervalx,
self.xy_dataLim.intervalx, self)
self.xaxis = self.w_xaxis
self.w_yaxis = axis3d.YAxis('y', self.xy_viewLim.intervaly,
self.xy_dataLim.intervaly, self)
self.yaxis = self.w_yaxis
self.w_zaxis = axis3d.ZAxis('z', self.zz_viewLim.intervalx,
self.zz_dataLim.intervalx, self)
self.zaxis = self.w_zaxis
for ax in self.xaxis, self.yaxis, self.zaxis:
ax.init3d()
def get_children(self):
return [self.zaxis, ] + Axes.get_children(self)
def _get_axis_list(self):
return super(Axes3D, self)._get_axis_list() + (self.zaxis, )
def unit_cube(self, vals=None):
minx, maxx, miny, maxy, minz, maxz = vals or self.get_w_lims()
xs, ys, zs = ([minx, maxx, maxx, minx, minx, maxx, maxx, minx],
[miny, miny, maxy, maxy, miny, miny, maxy, maxy],
[minz, minz, minz, minz, maxz, maxz, maxz, maxz])
return list(zip(xs, ys, zs))
def tunit_cube(self, vals=None, M=None):
if M is None:
M = self.M
xyzs = self.unit_cube(vals)
tcube = proj3d.proj_points(xyzs, M)
return tcube
def tunit_edges(self, vals=None, M=None):
tc = self.tunit_cube(vals, M)
edges = [(tc[0], tc[1]),
(tc[1], tc[2]),
(tc[2], tc[3]),
(tc[3], tc[0]),
(tc[0], tc[4]),
(tc[1], tc[5]),
(tc[2], tc[6]),
(tc[3], tc[7]),
(tc[4], tc[5]),
(tc[5], tc[6]),
(tc[6], tc[7]),
(tc[7], tc[4])]
return edges
def draw(self, renderer):
# draw the background patch
self.axesPatch.draw(renderer)
self._frameon = False
# first, set the aspect
# this is duplicated from `axes._base._AxesBase.draw`
# but must be called before any of the artist are drawn as
# it adjusts the view limits and the size of the bounding box
# of the axes
locator = self.get_axes_locator()
if locator:
pos = locator(self, renderer)
self.apply_aspect(pos)
else:
self.apply_aspect()
# add the projection matrix to the renderer
self.M = self.get_proj()
renderer.M = self.M
renderer.vvec = self.vvec
renderer.eye = self.eye
renderer.get_axis_position = self.get_axis_position
# Calculate projection of collections and zorder them
zlist = [(col.do_3d_projection(renderer), col) \
for col in self.collections]
zlist.sort(key=itemgetter(0), reverse=True)
for i, (z, col) in enumerate(zlist):
col.zorder = i
# Calculate projection of patches and zorder them
zlist = [(patch.do_3d_projection(renderer), patch) \
for patch in self.patches]
zlist.sort(key=itemgetter(0), reverse=True)
for i, (z, patch) in enumerate(zlist):
patch.zorder = i
if self._axis3don:
axes = (self.xaxis, self.yaxis, self.zaxis)
# Draw panes first
for ax in axes:
ax.draw_pane(renderer)
# Then axes
for ax in axes:
ax.draw(renderer)
# Then rest
Axes.draw(self, renderer)
def get_axis_position(self):
vals = self.get_w_lims()
tc = self.tunit_cube(vals, self.M)
xhigh = tc[1][2] > tc[2][2]
yhigh = tc[3][2] > tc[2][2]
zhigh = tc[0][2] > tc[2][2]
return xhigh, yhigh, zhigh
def update_datalim(self, xys, **kwargs):
pass
def get_autoscale_on(self) :
"""
Get whether autoscaling is applied for all axes on plot commands
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
return Axes.get_autoscale_on(self) and self.get_autoscalez_on()
def get_autoscalez_on(self) :
"""
Get whether autoscaling for the z-axis is applied on plot commands
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
return self._autoscaleZon
def set_autoscale_on(self, b) :
"""
Set whether autoscaling is applied on plot commands
accepts: [ *True* | *False* ]
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
Axes.set_autoscale_on(self, b)
self.set_autoscalez_on(b)
def set_autoscalez_on(self, b) :
"""
Set whether autoscaling for the z-axis is applied on plot commands
accepts: [ *True* | *False* ]
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
self._autoscalez_on = b
def set_zmargin(self, m) :
"""
Set padding of Z data limits prior to autoscaling.
*m* times the data interval will be added to each
end of that interval before it is used in autoscaling.
accepts: float in range 0 to 1
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
if m < 0 or m > 1 :
raise ValueError("margin must be in range 0 to 1")
self._zmargin = m
self.stale = True
def margins(self, *args, **kw) :
"""
Convenience method to set or retrieve autoscaling margins.
signatures::
margins()
returns xmargin, ymargin, zmargin
::
margins(margin)
margins(xmargin, ymargin, zmargin)
margins(x=xmargin, y=ymargin, z=zmargin)
margins(..., tight=False)
All forms above set the xmargin, ymargin and zmargin
parameters. All keyword parameters are optional. A single argument
specifies xmargin, ymargin and zmargin. The *tight* parameter
is passed to :meth:`autoscale_view`, which is executed after
a margin is changed; the default here is *True*, on the
assumption that when margins are specified, no additional
padding to match tick marks is usually desired. Setting
*tight* to *None* will preserve the previous setting.
Specifying any margin changes only the autoscaling; for example,
if *xmargin* is not None, then *xmargin* times the X data
interval will be added to each end of that interval before
it is used in autoscaling.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
if not args and not kw:
return self._xmargin, self._ymargin, self._zmargin
tight = kw.pop('tight', True)
mx = kw.pop('x', None)
my = kw.pop('y', None)
mz = kw.pop('z', None)
if len(args) == 1:
mx = my = mz = args[0]
elif len(args) == 2:
# Maybe put out a warning because mz is not set?
mx, my = args
elif len(args) == 3:
mx, my, mz = args
else:
raise ValueError("more than three arguments were supplied")
if mx is not None:
self.set_xmargin(mx)
if my is not None:
self.set_ymargin(my)
if mz is not None:
self.set_zmargin(mz)
scalex = (mx is not None)
scaley = (my is not None)
scalez = (mz is not None)
self.autoscale_view(tight=tight, scalex=scalex, scaley=scaley,
scalez=scalez)
def autoscale(self, enable=True, axis='both', tight=None) :
"""
Convenience method for simple axis view autoscaling.
See :meth:`matplotlib.axes.Axes.autoscale` for full explanation.
Note that this function behaves the same, but for all
three axes. Therfore, 'z' can be passed for *axis*,
and 'both' applies to all three axes.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
if enable is None:
scalex = True
scaley = True
scalez = True
else:
scalex = False
scaley = False
scalez = False
if axis in ['x', 'both']:
self._autoscaleXon = bool(enable)
scalex = self._autoscaleXon
if axis in ['y', 'both']:
self._autoscaleYon = bool(enable)
scaley = self._autoscaleYon
if axis in ['z', 'both']:
self._autoscaleZon = bool(enable)
scalez = self._autoscaleZon
self.autoscale_view(tight=tight, scalex=scalex, scaley=scaley,
scalez=scalez)
def auto_scale_xyz(self, X, Y, Z=None, had_data=None):
x, y, z = list(map(np.asarray, (X, Y, Z)))
try:
x, y = x.flatten(), y.flatten()
if Z is not None:
z = z.flatten()
except AttributeError:
raise
# This updates the bounding boxes as to keep a record as
# to what the minimum sized rectangular volume holds the
# data.
self.xy_dataLim.update_from_data_xy(np.array([x, y]).T, not had_data)
if z is not None:
self.zz_dataLim.update_from_data_xy(np.array([z, z]).T, not had_data)
# Let autoscale_view figure out how to use this data.
self.autoscale_view()
def autoscale_view(self, tight=None, scalex=True, scaley=True,
scalez=True) :
"""
Autoscale the view limits using the data limits.
See :meth:`matplotlib.axes.Axes.autoscale_view` for documentation.
Note that this function applies to the 3D axes, and as such
adds the *scalez* to the function arguments.
.. versionchanged :: 1.1.0
Function signature was changed to better match the 2D version.
*tight* is now explicitly a kwarg and placed first.
.. versionchanged :: 1.2.1
This is now fully functional.
"""
if not self._ready:
return
# This method looks at the rectangular volume (see above)
# of data and decides how to scale the view portal to fit it.
if tight is None:
# if image data only just use the datalim
_tight = self._tight or (len(self.images)>0 and
len(self.lines)==0 and
len(self.patches)==0)
else:
_tight = self._tight = bool(tight)
if scalex and self._autoscaleXon:
xshared = self._shared_x_axes.get_siblings(self)
dl = [ax.dataLim for ax in xshared]
bb = mtransforms.BboxBase.union(dl)
x0, x1 = self.xy_dataLim.intervalx
xlocator = self.xaxis.get_major_locator()
try:
x0, x1 = xlocator.nonsingular(x0, x1)
except AttributeError:
x0, x1 = mtransforms.nonsingular(x0, x1, increasing=False,
expander=0.05)
if self._xmargin > 0:
delta = (x1 - x0) * self._xmargin
x0 -= delta
x1 += delta
if not _tight:
x0, x1 = xlocator.view_limits(x0, x1)
self.set_xbound(x0, x1)
if scaley and self._autoscaleYon:
yshared = self._shared_y_axes.get_siblings(self)
dl = [ax.dataLim for ax in yshared]
bb = mtransforms.BboxBase.union(dl)
y0, y1 = self.xy_dataLim.intervaly
ylocator = self.yaxis.get_major_locator()
try:
y0, y1 = ylocator.nonsingular(y0, y1)
except AttributeError:
y0, y1 = mtransforms.nonsingular(y0, y1, increasing=False,
expander=0.05)
if self._ymargin > 0:
delta = (y1 - y0) * self._ymargin
y0 -= delta
y1 += delta
if not _tight:
y0, y1 = ylocator.view_limits(y0, y1)
self.set_ybound(y0, y1)
if scalez and self._autoscaleZon:
zshared = self._shared_z_axes.get_siblings(self)
dl = [ax.dataLim for ax in zshared]
bb = mtransforms.BboxBase.union(dl)
z0, z1 = self.zz_dataLim.intervalx
zlocator = self.zaxis.get_major_locator()
try:
z0, z1 = zlocator.nonsingular(z0, z1)
except AttributeError:
z0, z1 = mtransforms.nonsingular(z0, z1, increasing=False,
expander=0.05)
if self._zmargin > 0:
delta = (z1 - z0) * self._zmargin
z0 -= delta
z1 += delta
if not _tight:
z0, z1 = zlocator.view_limits(z0, z1)
self.set_zbound(z0, z1)
def get_w_lims(self):
'''Get 3D world limits.'''
minx, maxx = self.get_xlim3d()
miny, maxy = self.get_ylim3d()
minz, maxz = self.get_zlim3d()
return minx, maxx, miny, maxy, minz, maxz
def _determine_lims(self, xmin=None, xmax=None, *args, **kwargs):
if xmax is None and cbook.iterable(xmin):
xmin, xmax = xmin
if xmin == xmax:
xmin -= 0.05
xmax += 0.05
return (xmin, xmax)
def set_xlim3d(self, left=None, right=None, emit=True, auto=False, **kw):
"""
Set 3D x limits.
See :meth:`matplotlib.axes.Axes.set_xlim` for full documentation.
"""
if 'xmin' in kw:
left = kw.pop('xmin')
if 'xmax' in kw:
right = kw.pop('xmax')
if kw:
raise ValueError("unrecognized kwargs: %s" % kw.keys())
if right is None and cbook.iterable(left):
left, right = left
self._process_unit_info(xdata=(left, right))
if left is not None:
left = self.convert_xunits(left)
if right is not None:
right = self.convert_xunits(right)
old_left, old_right = self.get_xlim()
if left is None:
left = old_left
if right is None:
right = old_right
if left == right:
warnings.warn(('Attempting to set identical left==right results\n'
'in singular transformations; automatically expanding.\n'
'left=%s, right=%s') % (left, right))
left, right = mtransforms.nonsingular(left, right, increasing=False)
left, right = self.xaxis.limit_range_for_scale(left, right)
self.xy_viewLim.intervalx = (left, right)
if auto is not None:
self._autoscaleXon = bool(auto)
if emit:
self.callbacks.process('xlim_changed', self)
# Call all of the other x-axes that are shared with this one
for other in self._shared_x_axes.get_siblings(self):
if other is not self:
other.set_xlim(self.xy_viewLim.intervalx,
emit=False, auto=auto)
if (other.figure != self.figure and
other.figure.canvas is not None):
other.figure.canvas.draw_idle()
self.stale = True
return left, right
set_xlim = set_xlim3d
def set_ylim3d(self, bottom=None, top=None, emit=True, auto=False, **kw):
"""
Set 3D y limits.
See :meth:`matplotlib.axes.Axes.set_ylim` for full documentation.
"""
if 'ymin' in kw:
bottom = kw.pop('ymin')
if 'ymax' in kw:
top = kw.pop('ymax')
if kw:
raise ValueError("unrecognized kwargs: %s" % kw.keys())
if top is None and cbook.iterable(bottom):
bottom, top = bottom
self._process_unit_info(ydata=(bottom, top))
if bottom is not None:
bottom = self.convert_yunits(bottom)
if top is not None:
top = self.convert_yunits(top)
old_bottom, old_top = self.get_ylim()
if bottom is None:
bottom = old_bottom
if top is None:
top = old_top
if top == bottom:
warnings.warn(('Attempting to set identical bottom==top results\n'
'in singular transformations; automatically expanding.\n'
'bottom=%s, top=%s') % (bottom, top))
bottom, top = mtransforms.nonsingular(bottom, top, increasing=False)
bottom, top = self.yaxis.limit_range_for_scale(bottom, top)
self.xy_viewLim.intervaly = (bottom, top)
if auto is not None:
self._autoscaleYon = bool(auto)
if emit:
self.callbacks.process('ylim_changed', self)
# Call all of the other y-axes that are shared with this one
for other in self._shared_y_axes.get_siblings(self):
if other is not self:
other.set_ylim(self.xy_viewLim.intervaly,
emit=False, auto=auto)
if (other.figure != self.figure and
other.figure.canvas is not None):
other.figure.canvas.draw_idle()
self.stale = True
return bottom, top
set_ylim = set_ylim3d
def set_zlim3d(self, bottom=None, top=None, emit=True, auto=False, **kw):
"""
Set 3D z limits.
See :meth:`matplotlib.axes.Axes.set_ylim` for full documentation
"""
if 'zmin' in kw:
bottom = kw.pop('zmin')
if 'zmax' in kw:
top = kw.pop('zmax')
if kw:
raise ValueError("unrecognized kwargs: %s" % kw.keys())
if top is None and cbook.iterable(bottom):
bottom, top = bottom
self._process_unit_info(zdata=(bottom, top))
if bottom is not None:
bottom = self.convert_zunits(bottom)
if top is not None:
top = self.convert_zunits(top)
old_bottom, old_top = self.get_zlim()
if bottom is None:
bottom = old_bottom
if top is None:
top = old_top
if top == bottom:
warnings.warn(('Attempting to set identical bottom==top results\n'
'in singular transformations; automatically expanding.\n'
'bottom=%s, top=%s') % (bottom, top))
bottom, top = mtransforms.nonsingular(bottom, top, increasing=False)
bottom, top = self.zaxis.limit_range_for_scale(bottom, top)
self.zz_viewLim.intervalx = (bottom, top)
if auto is not None:
self._autoscaleZon = bool(auto)
if emit:
self.callbacks.process('zlim_changed', self)
# Call all of the other y-axes that are shared with this one
for other in self._shared_z_axes.get_siblings(self):
if other is not self:
other.set_zlim(self.zz_viewLim.intervalx,
emit=False, auto=auto)
if (other.figure != self.figure and
other.figure.canvas is not None):
other.figure.canvas.draw_idle()
self.stale = True
return bottom, top
set_zlim = set_zlim3d
def get_xlim3d(self):
return self.xy_viewLim.intervalx
get_xlim3d.__doc__ = maxes.Axes.get_xlim.__doc__
get_xlim = get_xlim3d
get_xlim.__doc__ += """
.. versionchanged :: 1.1.0
This function now correctly refers to the 3D x-limits
"""
def get_ylim3d(self):
return self.xy_viewLim.intervaly
get_ylim3d.__doc__ = maxes.Axes.get_ylim.__doc__
get_ylim = get_ylim3d
get_ylim.__doc__ += """
.. versionchanged :: 1.1.0
This function now correctly refers to the 3D y-limits.
"""
def get_zlim3d(self):
'''Get 3D z limits.'''
return self.zz_viewLim.intervalx
get_zlim = get_zlim3d
def get_zscale(self) :
"""
Return the zaxis scale string %s
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
""" % (", ".join(mscale.get_scale_names()))
return self.zaxis.get_scale()
# We need to slightly redefine these to pass scalez=False
# to their calls of autoscale_view.
def set_xscale(self, value, **kwargs) :
self.xaxis._set_scale(value, **kwargs)
self.autoscale_view(scaley=False, scalez=False)
self._update_transScale()
set_xscale.__doc__ = maxes.Axes.set_xscale.__doc__ + """
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
def set_yscale(self, value, **kwargs) :
self.yaxis._set_scale(value, **kwargs)
self.autoscale_view(scalex=False, scalez=False)
self._update_transScale()
self.stale = True
set_yscale.__doc__ = maxes.Axes.set_yscale.__doc__ + """
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
@docstring.dedent_interpd
def set_zscale(self, value, **kwargs) :
"""
call signature::
set_zscale(value)
Set the scaling of the z-axis: %(scale)s
ACCEPTS: [%(scale)s]
Different kwargs are accepted, depending on the scale:
%(scale_docs)s
.. note ::
Currently, Axes3D objects only supports linear scales.
Other scales may or may not work, and support for these
is improving with each release.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
self.zaxis._set_scale(value, **kwargs)
self.autoscale_view(scalex=False, scaley=False)
self._update_transScale()
self.stale = True
def set_zticks(self, *args, **kwargs):
"""
Set z-axis tick locations.
See :meth:`matplotlib.axes.Axes.set_yticks` for more details.
.. note::
Minor ticks are not supported.
.. versionadded:: 1.1.0
"""
return self.zaxis.set_ticks(*args, **kwargs)
def get_zticks(self, minor=False):
"""
Return the z ticks as a list of locations
See :meth:`matplotlib.axes.Axes.get_yticks` for more details.
.. note::
Minor ticks are not supported.
.. versionadded:: 1.1.0
"""
return self.zaxis.get_ticklocs(minor=minor)
def get_zmajorticklabels(self) :
"""
Get the ztick labels as a list of Text instances
.. versionadded :: 1.1.0
"""
return cbook.silent_list('Text zticklabel',
self.zaxis.get_majorticklabels())
def get_zminorticklabels(self) :
"""
Get the ztick labels as a list of Text instances
.. note::
Minor ticks are not supported. This function was added
only for completeness.
.. versionadded :: 1.1.0
"""
return cbook.silent_list('Text zticklabel',
self.zaxis.get_minorticklabels())
def set_zticklabels(self, *args, **kwargs) :
"""
Set z-axis tick labels.
See :meth:`matplotlib.axes.Axes.set_yticklabels` for more details.
.. note::
Minor ticks are not supported by Axes3D objects.
.. versionadded:: 1.1.0
"""
return self.zaxis.set_ticklabels(*args, **kwargs)
def get_zticklabels(self, minor=False) :
"""
Get ztick labels as a list of Text instances.
See :meth:`matplotlib.axes.Axes.get_yticklabels` for more details.
.. note::
Minor ticks are not supported.
.. versionadded:: 1.1.0
"""
return cbook.silent_list('Text zticklabel',
self.zaxis.get_ticklabels(minor=minor))
def zaxis_date(self, tz=None) :
"""
Sets up z-axis ticks and labels that treat the z data as dates.
*tz* is a timezone string or :class:`tzinfo` instance.
Defaults to rc value.
.. note::
This function is merely provided for completeness.
Axes3D objects do not officially support dates for ticks,
and so this may or may not work as expected.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
self.zaxis.axis_date(tz)
def get_zticklines(self) :
"""
Get ztick lines as a list of Line2D instances.
Note that this function is provided merely for completeness.
These lines are re-calculated as the display changes.
.. versionadded:: 1.1.0
"""
return self.zaxis.get_ticklines()
def clabel(self, *args, **kwargs):
"""
This function is currently not implemented for 3D axes.
Returns *None*.
"""
return None
def view_init(self, elev=None, azim=None):
"""
Set the elevation and azimuth of the axes.
This can be used to rotate the axes programatically.
'elev' stores the elevation angle in the z plane.
'azim' stores the azimuth angle in the x,y plane.
if elev or azim are None (default), then the initial value
is used which was specified in the :class:`Axes3D` constructor.
"""
self.dist = 10
if elev is None:
self.elev = self.initial_elev
else:
self.elev = elev
if azim is None:
self.azim = self.initial_azim
else:
self.azim = azim
def get_proj(self):
"""
Create the projection matrix from the current viewing position.
elev stores the elevation angle in the z plane
azim stores the azimuth angle in the x,y plane
dist is the distance of the eye viewing point from the object
point.
"""
relev, razim = np.pi * self.elev/180, np.pi * self.azim/180
xmin, xmax = self.get_xlim3d()
ymin, ymax = self.get_ylim3d()
zmin, zmax = self.get_zlim3d()
# transform to uniform world coordinates 0-1.0,0-1.0,0-1.0
worldM = proj3d.world_transformation(xmin, xmax,
ymin, ymax,
zmin, zmax)
# look into the middle of the new coordinates
R = np.array([0.5, 0.5, 0.5])
xp = R[0] + np.cos(razim) * np.cos(relev) * self.dist
yp = R[1] + np.sin(razim) * np.cos(relev) * self.dist
zp = R[2] + np.sin(relev) * self.dist
E = np.array((xp, yp, zp))
self.eye = E
self.vvec = R - E
self.vvec = self.vvec / proj3d.mod(self.vvec)
if abs(relev) > np.pi/2:
# upside down
V = np.array((0, 0, -1))
else:
V = np.array((0, 0, 1))
zfront, zback = -self.dist, self.dist
viewM = proj3d.view_transformation(E, R, V)
perspM = proj3d.persp_transformation(zfront, zback)
M0 = np.dot(viewM, worldM)
M = np.dot(perspM, M0)
return M
def mouse_init(self, rotate_btn=1, zoom_btn=3):
"""Initializes mouse button callbacks to enable 3D rotation of
the axes. Also optionally sets the mouse buttons for 3D rotation
and zooming.
============ =======================================================
Argument Description
============ =======================================================
*rotate_btn* The integer or list of integers specifying which mouse
button or buttons to use for 3D rotation of the axes.
Default = 1.
*zoom_btn* The integer or list of integers specifying which mouse
button or buttons to use to zoom the 3D axes.
Default = 3.
============ =======================================================
"""
self.button_pressed = None
canv = self.figure.canvas
if canv is not None:
c1 = canv.mpl_connect('motion_notify_event', self._on_move)
c2 = canv.mpl_connect('button_press_event', self._button_press)
c3 = canv.mpl_connect('button_release_event', self._button_release)
self._cids = [c1, c2, c3]
else:
warnings.warn('Axes3D.figure.canvas is \'None\', mouse rotation disabled. Set canvas then call Axes3D.mouse_init().')
# coerce scalars into array-like, then convert into
# a regular list to avoid comparisons against None
# which breaks in recent versions of numpy.
self._rotate_btn = np.atleast_1d(rotate_btn).tolist()
self._zoom_btn = np.atleast_1d(zoom_btn).tolist()
def can_zoom(self) :
"""
Return *True* if this axes supports the zoom box button functionality.
3D axes objects do not use the zoom box button.
"""
return False
def can_pan(self) :
"""
Return *True* if this axes supports the pan/zoom button functionality.
3D axes objects do not use the pan/zoom button.
"""
return False
def cla(self):
"""
Clear axes
"""
# Disabling mouse interaction might have been needed a long
# time ago, but I can't find a reason for it now - BVR (2012-03)
#self.disable_mouse_rotation()
self.zaxis.cla()
if self._sharez is not None:
self.zaxis.major = self._sharez.zaxis.major
self.zaxis.minor = self._sharez.zaxis.minor
z0, z1 = self._sharez.get_zlim()
self.set_zlim(z0, z1, emit=False, auto=None)
self.zaxis._set_scale(self._sharez.zaxis.get_scale())
else:
self.zaxis._set_scale('linear')
self._autoscaleZon = True
self._zmargin = 0
Axes.cla(self)
self.grid(rcParams['axes3d.grid'])
def disable_mouse_rotation(self):
"""Disable mouse button callbacks.
"""
# Disconnect the various events we set.
for cid in self._cids:
self.figure.canvas.mpl_disconnect(cid)
self._cids = []
def _button_press(self, event):
if event.inaxes == self:
self.button_pressed = event.button
self.sx, self.sy = event.xdata, event.ydata
def _button_release(self, event):
self.button_pressed = None
def format_zdata(self, z):
"""
Return *z* string formatted. This function will use the
:attr:`fmt_zdata` attribute if it is callable, else will fall
back on the zaxis major formatter
"""
try: return self.fmt_zdata(z)
except (AttributeError, TypeError):
func = self.zaxis.get_major_formatter().format_data_short
val = func(z)
return val
def format_coord(self, xd, yd):
"""
Given the 2D view coordinates attempt to guess a 3D coordinate.
Looks for the nearest edge to the point and then assumes that
the point is at the same z location as the nearest point on the edge.
"""
if self.M is None:
return ''
if self.button_pressed in self._rotate_btn:
return 'azimuth=%d deg, elevation=%d deg ' % (self.azim, self.elev)
# ignore xd and yd and display angles instead
p = (xd, yd)
edges = self.tunit_edges()
#lines = [proj3d.line2d(p0,p1) for (p0,p1) in edges]
ldists = [(proj3d.line2d_seg_dist(p0, p1, p), i) for \
i, (p0, p1) in enumerate(edges)]
ldists.sort()
# nearest edge
edgei = ldists[0][1]
p0, p1 = edges[edgei]
# scale the z value to match
x0, y0, z0 = p0
x1, y1, z1 = p1
d0 = np.hypot(x0-xd, y0-yd)
d1 = np.hypot(x1-xd, y1-yd)
dt = d0+d1
z = d1/dt * z0 + d0/dt * z1
x, y, z = proj3d.inv_transform(xd, yd, z, self.M)
xs = self.format_xdata(x)
ys = self.format_ydata(y)
zs = self.format_zdata(z)
return 'x=%s, y=%s, z=%s' % (xs, ys, zs)
def _on_move(self, event):
"""Mouse moving
button-1 rotates by default. Can be set explicitly in mouse_init().
button-3 zooms by default. Can be set explicitly in mouse_init().
"""
if not self.button_pressed:
return
if self.M is None:
return
x, y = event.xdata, event.ydata
# In case the mouse is out of bounds.
if x is None:
return
dx, dy = x - self.sx, y - self.sy
w = self._pseudo_w
h = self._pseudo_h
self.sx, self.sy = x, y
# Rotation
if self.button_pressed in self._rotate_btn:
# rotate viewing point
# get the x and y pixel coords
if dx == 0 and dy == 0:
return
self.elev = art3d.norm_angle(self.elev - (dy/h)*180)
self.azim = art3d.norm_angle(self.azim - (dx/w)*180)
self.get_proj()
self.figure.canvas.draw_idle()
# elif self.button_pressed == 2:
# pan view
# project xv,yv,zv -> xw,yw,zw
# pan
# pass
# Zoom
elif self.button_pressed in self._zoom_btn:
# zoom view
# hmmm..this needs some help from clipping....
minx, maxx, miny, maxy, minz, maxz = self.get_w_lims()
df = 1-((h - dy)/h)
dx = (maxx-minx)*df
dy = (maxy-miny)*df
dz = (maxz-minz)*df
self.set_xlim3d(minx - dx, maxx + dx)
self.set_ylim3d(miny - dy, maxy + dy)
self.set_zlim3d(minz - dz, maxz + dz)
self.get_proj()
self.figure.canvas.draw_idle()
def set_zlabel(self, zlabel, fontdict=None, labelpad=None, **kwargs):
'''
Set zlabel. See doc for :meth:`set_ylabel` for description.
'''
if labelpad is not None : self.zaxis.labelpad = labelpad
return self.zaxis.set_label_text(zlabel, fontdict, **kwargs)
def get_zlabel(self) :
"""
Get the z-label text string.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
label = self.zaxis.get_label()
return label.get_text()
#### Axes rectangle characteristics
def get_frame_on(self):
"""
Get whether the 3D axes panels are drawn
.. versionadded :: 1.1.0
"""
return self._frameon
def set_frame_on(self, b):
"""
Set whether the 3D axes panels are drawn
ACCEPTS: [ *True* | *False* ]
.. versionadded :: 1.1.0
"""
self._frameon = bool(b)
self.stale = True
def get_axisbelow(self):
"""
Get whether axis below is true or not.
For axes3d objects, this will always be *True*
.. versionadded :: 1.1.0
This function was added for completeness.
"""
return True
def set_axisbelow(self, b):
"""
Set whether the axis ticks and gridlines are above or below
most artists
For axes3d objects, this will ignore any settings and just use *True*
ACCEPTS: [ *True* | *False* ]
.. versionadded :: 1.1.0
This function was added for completeness.
"""
self._axisbelow = True
self.stale = True
def grid(self, b=True, **kwargs):
'''
Set / unset 3D grid.
.. note::
Currently, this function does not behave the same as
:meth:`matplotlib.axes.Axes.grid`, but it is intended to
eventually support that behavior.
.. versionchanged :: 1.1.0
This function was changed, but not tested. Please report any bugs.
'''
# TODO: Operate on each axes separately
if len(kwargs) :
b = True
self._draw_grid = cbook._string_to_bool(b)
self.stale = True
def ticklabel_format(self, **kwargs) :
"""
Convenience method for manipulating the ScalarFormatter
used by default for linear axes in Axed3D objects.
See :meth:`matplotlib.axes.Axes.ticklabel_format` for full
documentation. Note that this version applies to all three
axes of the Axes3D object. Therefore, the *axis* argument
will also accept a value of 'z' and the value of 'both' will
apply to all three axes.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
style = kwargs.pop('style', '').lower()
scilimits = kwargs.pop('scilimits', None)
useOffset = kwargs.pop('useOffset', None)
axis = kwargs.pop('axis', 'both').lower()
if scilimits is not None:
try:
m, n = scilimits
m+n+1 # check that both are numbers
except (ValueError, TypeError):
raise ValueError("scilimits must be a sequence of 2 integers")
if style[:3] == 'sci':
sb = True
elif style in ['plain', 'comma']:
sb = False
if style == 'plain':
cb = False
else:
cb = True
raise NotImplementedError("comma style remains to be added")
elif style == '':
sb = None
else:
raise ValueError("%s is not a valid style value")
try:
if sb is not None:
if axis in ['both', 'z']:
self.xaxis.major.formatter.set_scientific(sb)
if axis in ['both', 'y']:
self.yaxis.major.formatter.set_scientific(sb)
if axis in ['both', 'z'] :
self.zaxis.major.formatter.set_scientific(sb)
if scilimits is not None:
if axis in ['both', 'x']:
self.xaxis.major.formatter.set_powerlimits(scilimits)
if axis in ['both', 'y']:
self.yaxis.major.formatter.set_powerlimits(scilimits)
if axis in ['both', 'z']:
self.zaxis.major.formatter.set_powerlimits(scilimits)
if useOffset is not None:
if axis in ['both', 'x']:
self.xaxis.major.formatter.set_useOffset(useOffset)
if axis in ['both', 'y']:
self.yaxis.major.formatter.set_useOffset(useOffset)
if axis in ['both', 'z']:
self.zaxis.major.formatter.set_useOffset(useOffset)
except AttributeError:
raise AttributeError(
"This method only works with the ScalarFormatter.")
def locator_params(self, axis='both', tight=None, **kwargs) :
"""
Convenience method for controlling tick locators.
See :meth:`matplotlib.axes.Axes.locator_params` for full
documentation Note that this is for Axes3D objects,
therefore, setting *axis* to 'both' will result in the
parameters being set for all three axes. Also, *axis*
can also take a value of 'z' to apply parameters to the
z axis.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
_x = axis in ['x', 'both']
_y = axis in ['y', 'both']
_z = axis in ['z', 'both']
if _x:
self.xaxis.get_major_locator().set_params(**kwargs)
if _y:
self.yaxis.get_major_locator().set_params(**kwargs)
if _z:
self.zaxis.get_major_locator().set_params(**kwargs)
self.autoscale_view(tight=tight, scalex=_x, scaley=_y, scalez=_z)
def tick_params(self, axis='both', **kwargs) :
"""
Convenience method for changing the appearance of ticks and
tick labels.
See :meth:`matplotlib.axes.Axes.tick_params` for more complete
documentation.
The only difference is that setting *axis* to 'both' will
mean that the settings are applied to all three axes. Also,
the *axis* parameter also accepts a value of 'z', which
would mean to apply to only the z-axis.
Also, because of how Axes3D objects are drawn very differently
from regular 2D axes, some of these settings may have
ambiguous meaning. For simplicity, the 'z' axis will
accept settings as if it was like the 'y' axis.
.. note::
While this function is currently implemented, the core part
of the Axes3D object may ignore some of these settings.
Future releases will fix this. Priority will be given to
those who file bugs.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
Axes.tick_params(self, axis, **kwargs)
if axis in ['z', 'both'] :
zkw = dict(kwargs)
zkw.pop('top', None)
zkw.pop('bottom', None)
zkw.pop('labeltop', None)
zkw.pop('labelbottom', None)
self.zaxis.set_tick_params(**zkw)
### data limits, ticks, tick labels, and formatting
def invert_zaxis(self):
"""
Invert the z-axis.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
bottom, top = self.get_zlim()
self.set_zlim(top, bottom, auto=None)
def zaxis_inverted(self):
'''
Returns True if the z-axis is inverted.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
'''
bottom, top = self.get_zlim()
return top < bottom
def get_zbound(self):
"""
Returns the z-axis numerical bounds where::
lowerBound < upperBound
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
bottom, top = self.get_zlim()
if bottom < top:
return bottom, top
else:
return top, bottom
def set_zbound(self, lower=None, upper=None):
"""
Set the lower and upper numerical bounds of the z-axis.
This method will honor axes inversion regardless of parameter order.
It will not change the :attr:`_autoscaleZon` attribute.
.. versionadded :: 1.1.0
This function was added, but not tested. Please report any bugs.
"""
if upper is None and cbook.iterable(lower):
lower,upper = lower
old_lower,old_upper = self.get_zbound()
if lower is None: lower = old_lower
if upper is None: upper = old_upper
if self.zaxis_inverted():
if lower < upper:
self.set_zlim(upper, lower, auto=None)
else:
self.set_zlim(lower, upper, auto=None)
else :
if lower < upper:
self.set_zlim(lower, upper, auto=None)
else :
self.set_zlim(upper, lower, auto=None)
def text(self, x, y, z, s, zdir=None, **kwargs):
'''
Add text to the plot. kwargs will be passed on to Axes.text,
except for the `zdir` keyword, which sets the direction to be
used as the z direction.
'''
text = Axes.text(self, x, y, s, **kwargs)
art3d.text_2d_to_3d(text, z, zdir)
return text
text3D = text
text2D = Axes.text
def plot(self, xs, ys, *args, **kwargs):
'''
Plot 2D or 3D data.
========== ================================================
Argument Description
========== ================================================
*xs*, *ys* x, y coordinates of vertices
*zs* z value(s), either one for all points or one for
each point.
*zdir* Which direction to use as z ('x', 'y' or 'z')
when plotting a 2D set.
========== ================================================
Other arguments are passed on to
:func:`~matplotlib.axes.Axes.plot`
'''
# FIXME: This argument parsing might be better handled
# when we set later versions of python for
# minimum requirements. Currently at 2.4.
# Note that some of the reason for the current difficulty
# is caused by the fact that we want to insert a new
# (semi-optional) positional argument 'Z' right before
# many other traditional positional arguments occur
# such as the color, linestyle and/or marker.
had_data = self.has_data()
zs = kwargs.pop('zs', 0)
zdir = kwargs.pop('zdir', 'z')
argsi = 0
# First argument is array of zs
if len(args) > 0 and cbook.iterable(args[0]) and \
len(xs) == len(args[0]) :
# So, we know that it is an array with
# first dimension the same as xs.
# Next, check to see if the data contained
# therein (if any) is scalar (and not another array).
if len(args[0]) == 0 or cbook.is_scalar(args[0][0]) :
zs = args[argsi]
argsi += 1
# First argument is z value
elif len(args) > 0 and cbook.is_scalar(args[0]):
zs = args[argsi]
argsi += 1
# Match length
if not cbook.iterable(zs):
zs = np.ones(len(xs)) * zs
lines = Axes.plot(self, xs, ys, *args[argsi:], **kwargs)
for line in lines:
art3d.line_2d_to_3d(line, zs=zs, zdir=zdir)
self.auto_scale_xyz(xs, ys, zs, had_data)
return lines
plot3D = plot
def plot_surface(self, X, Y, Z, *args, **kwargs):
'''
Create a surface plot.
By default it will be colored in shades of a solid color,
but it also supports color mapping by supplying the *cmap*
argument.
The `rstride` and `cstride` kwargs set the stride used to
sample the input data to generate the graph. If 1k by 1k
arrays are passed in the default values for the strides will
result in a 100x100 grid being plotted.
============= ================================================
Argument Description
============= ================================================
*X*, *Y*, *Z* Data values as 2D arrays
*rstride* Array row stride (step size), defaults to 10
*cstride* Array column stride (step size), defaults to 10
*color* Color of the surface patches
*cmap* A colormap for the surface patches.
*facecolors* Face colors for the individual patches
*norm* An instance of Normalize to map values to colors
*vmin* Minimum value to map
*vmax* Maximum value to map
*shade* Whether to shade the facecolors
============= ================================================
Other arguments are passed on to
:class:`~mpl_toolkits.mplot3d.art3d.Poly3DCollection`
'''
had_data = self.has_data()
Z = np.atleast_2d(Z)
# TODO: Support masked arrays
X, Y, Z = np.broadcast_arrays(X, Y, Z)
rows, cols = Z.shape
rstride = kwargs.pop('rstride', 10)
cstride = kwargs.pop('cstride', 10)
if 'facecolors' in kwargs:
fcolors = kwargs.pop('facecolors')
else:
color = np.array(colorConverter.to_rgba(kwargs.pop('color', 'b')))
fcolors = None
cmap = kwargs.get('cmap', None)
norm = kwargs.pop('norm', None)
vmin = kwargs.pop('vmin', None)
vmax = kwargs.pop('vmax', None)
linewidth = kwargs.get('linewidth', None)
shade = kwargs.pop('shade', cmap is None)
lightsource = kwargs.pop('lightsource', None)
# Shade the data
if shade and cmap is not None and fcolors is not None:
fcolors = self._shade_colors_lightsource(Z, cmap, lightsource)
polys = []
# Only need these vectors to shade if there is no cmap
if cmap is None and shade :
totpts = int(np.ceil(float(rows - 1) / rstride) *
np.ceil(float(cols - 1) / cstride))
v1 = np.empty((totpts, 3))
v2 = np.empty((totpts, 3))
# This indexes the vertex points
which_pt = 0
#colset contains the data for coloring: either average z or the facecolor
colset = []
for rs in xrange(0, rows-1, rstride):
for cs in xrange(0, cols-1, cstride):
ps = []
for a in (X, Y, Z) :
ztop = a[rs,cs:min(cols, cs+cstride+1)]
zleft = a[rs+1:min(rows, rs+rstride+1),
min(cols-1, cs+cstride)]
zbase = a[min(rows-1, rs+rstride), cs:min(cols, cs+cstride+1):][::-1]
zright = a[rs:min(rows-1, rs+rstride):, cs][::-1]
z = np.concatenate((ztop, zleft, zbase, zright))
ps.append(z)
# The construction leaves the array with duplicate points, which
# are removed here.
ps = list(zip(*ps))
lastp = np.array([])
ps2 = [ps[0]] + [ps[i] for i in xrange(1, len(ps)) if ps[i] != ps[i-1]]
avgzsum = sum(p[2] for p in ps2)
polys.append(ps2)
if fcolors is not None:
colset.append(fcolors[rs][cs])
else:
colset.append(avgzsum / len(ps2))
# Only need vectors to shade if no cmap
if cmap is None and shade:
i1, i2, i3 = 0, int(len(ps2)/3), int(2*len(ps2)/3)
v1[which_pt] = np.array(ps2[i1]) - np.array(ps2[i2])
v2[which_pt] = np.array(ps2[i2]) - np.array(ps2[i3])
which_pt += 1
if cmap is None and shade:
normals = np.cross(v1, v2)
else :
normals = []
polyc = art3d.Poly3DCollection(polys, *args, **kwargs)
if fcolors is not None:
if shade:
colset = self._shade_colors(colset, normals)
polyc.set_facecolors(colset)
polyc.set_edgecolors(colset)
elif cmap:
colset = np.array(colset)
polyc.set_array(colset)
if vmin is not None or vmax is not None:
polyc.set_clim(vmin, vmax)
if norm is not None:
polyc.set_norm(norm)
else:
if shade:
colset = self._shade_colors(color, normals)
else:
colset = color
polyc.set_facecolors(colset)
self.add_collection(polyc)
self.auto_scale_xyz(X, Y, Z, had_data)
return polyc
def _generate_normals(self, polygons):
'''
Generate normals for polygons by using the first three points.
This normal of course might not make sense for polygons with
more than three points not lying in a plane.
'''
normals = []
for verts in polygons:
v1 = np.array(verts[0]) - np.array(verts[1])
v2 = np.array(verts[2]) - np.array(verts[0])
normals.append(np.cross(v1, v2))
return normals
def _shade_colors(self, color, normals):
'''
Shade *color* using normal vectors given by *normals*.
*color* can also be an array of the same length as *normals*.
'''
shade = np.array([np.dot(n / proj3d.mod(n), [-1, -1, 0.5])
if proj3d.mod(n) else np.nan
for n in normals])
mask = ~np.isnan(shade)
if len(shade[mask]) > 0:
norm = Normalize(min(shade[mask]), max(shade[mask]))
shade[~mask] = min(shade[mask])
color = colorConverter.to_rgba_array(color)
# shape of color should be (M, 4) (where M is number of faces)
# shape of shade should be (M,)
# colors should have final shape of (M, 4)
alpha = color[:, 3]
colors = (0.5 + norm(shade)[:, np.newaxis] * 0.5) * color
colors[:, 3] = alpha
else:
colors = np.asanyarray(color).copy()
return colors
def _shade_colors_lightsource(self, data, cmap, lightsource):
if lightsource is None:
lightsource = LightSource(azdeg=135, altdeg=55)
return lightsource.shade(data, cmap)
def plot_wireframe(self, X, Y, Z, *args, **kwargs):
'''
Plot a 3D wireframe.
The `rstride` and `cstride` kwargs set the stride used to
sample the input data to generate the graph. If either is 0
the input data in not sampled along this direction producing a
3D line plot rather than a wireframe plot.
========== ================================================
Argument Description
========== ================================================
*X*, *Y*, Data values as 2D arrays
*Z*
*rstride* Array row stride (step size), defaults to 1
*cstride* Array column stride (step size), defaults to 1
========== ================================================
Keyword arguments are passed on to
:class:`~matplotlib.collections.LineCollection`.
Returns a :class:`~mpl_toolkits.mplot3d.art3d.Line3DCollection`
'''
rstride = kwargs.pop("rstride", 1)
cstride = kwargs.pop("cstride", 1)
had_data = self.has_data()
Z = np.atleast_2d(Z)
# FIXME: Support masked arrays
X, Y, Z = np.broadcast_arrays(X, Y, Z)
rows, cols = Z.shape
# We want two sets of lines, one running along the "rows" of
# Z and another set of lines running along the "columns" of Z.
# This transpose will make it easy to obtain the columns.
tX, tY, tZ = np.transpose(X), np.transpose(Y), np.transpose(Z)
if rstride:
rii = list(xrange(0, rows, rstride))
# Add the last index only if needed
if rows > 0 and rii[-1] != (rows - 1) :
rii += [rows-1]
else:
rii = []
if cstride:
cii = list(xrange(0, cols, cstride))
# Add the last index only if needed
if cols > 0 and cii[-1] != (cols - 1) :
cii += [cols-1]
else:
cii = []
if rstride == 0 and cstride == 0:
raise ValueError("Either rstride or cstride must be non zero")
# If the inputs were empty, then just
# reset everything.
if Z.size == 0 :
rii = []
cii = []
xlines = [X[i] for i in rii]
ylines = [Y[i] for i in rii]
zlines = [Z[i] for i in rii]
txlines = [tX[i] for i in cii]
tylines = [tY[i] for i in cii]
tzlines = [tZ[i] for i in cii]
lines = [list(zip(xl, yl, zl)) for xl, yl, zl in \
zip(xlines, ylines, zlines)]
lines += [list(zip(xl, yl, zl)) for xl, yl, zl in \
zip(txlines, tylines, tzlines)]
linec = art3d.Line3DCollection(lines, *args, **kwargs)
self.add_collection(linec)
self.auto_scale_xyz(X, Y, Z, had_data)
return linec
def plot_trisurf(self, *args, **kwargs):
"""
============= ================================================
Argument Description
============= ================================================
*X*, *Y*, *Z* Data values as 1D arrays
*color* Color of the surface patches
*cmap* A colormap for the surface patches.
*norm* An instance of Normalize to map values to colors
*vmin* Minimum value to map
*vmax* Maximum value to map
*shade* Whether to shade the facecolors
============= ================================================
The (optional) triangulation can be specified in one of two ways;
either::
plot_trisurf(triangulation, ...)
where triangulation is a :class:`~matplotlib.tri.Triangulation`
object, or::
plot_trisurf(X, Y, ...)
plot_trisurf(X, Y, triangles, ...)
plot_trisurf(X, Y, triangles=triangles, ...)
in which case a Triangulation object will be created. See
:class:`~matplotlib.tri.Triangulation` for a explanation of
these possibilities.
The remaining arguments are::
plot_trisurf(..., Z)
where *Z* is the array of values to contour, one per point
in the triangulation.
Other arguments are passed on to
:class:`~mpl_toolkits.mplot3d.art3d.Poly3DCollection`
**Examples:**
.. plot:: mpl_examples/mplot3d/trisurf3d_demo.py
.. plot:: mpl_examples/mplot3d/trisurf3d_demo2.py
.. versionadded:: 1.2.0
This plotting function was added for the v1.2.0 release.
"""
had_data = self.has_data()
# TODO: Support custom face colours
color = np.array(colorConverter.to_rgba(kwargs.pop('color', 'b')))
cmap = kwargs.get('cmap', None)
norm = kwargs.pop('norm', None)
vmin = kwargs.pop('vmin', None)
vmax = kwargs.pop('vmax', None)
linewidth = kwargs.get('linewidth', None)
shade = kwargs.pop('shade', cmap is None)
lightsource = kwargs.pop('lightsource', None)
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(*args, **kwargs)
if 'Z' in kwargs:
z = np.asarray(kwargs.pop('Z'))
else:
z = np.asarray(args[0])
# We do this so Z doesn't get passed as an arg to PolyCollection
args = args[1:]
triangles = tri.get_masked_triangles()
xt = tri.x[triangles][..., np.newaxis]
yt = tri.y[triangles][..., np.newaxis]
zt = z[triangles][..., np.newaxis]
verts = np.concatenate((xt, yt, zt), axis=2)
# Only need these vectors to shade if there is no cmap
if cmap is None and shade:
totpts = len(verts)
v1 = np.empty((totpts, 3))
v2 = np.empty((totpts, 3))
# This indexes the vertex points
which_pt = 0
colset = []
for i in xrange(len(verts)):
avgzsum = verts[i,0,2] + verts[i,1,2] + verts[i,2,2]
colset.append(avgzsum / 3.0)
# Only need vectors to shade if no cmap
if cmap is None and shade:
v1[which_pt] = np.array(verts[i,0]) - np.array(verts[i,1])
v2[which_pt] = np.array(verts[i,1]) - np.array(verts[i,2])
which_pt += 1
if cmap is None and shade:
normals = np.cross(v1, v2)
else:
normals = []
polyc = art3d.Poly3DCollection(verts, *args, **kwargs)
if cmap:
colset = np.array(colset)
polyc.set_array(colset)
if vmin is not None or vmax is not None:
polyc.set_clim(vmin, vmax)
if norm is not None:
polyc.set_norm(norm)
else:
if shade:
colset = self._shade_colors(color, normals)
else:
colset = color
polyc.set_facecolors(colset)
self.add_collection(polyc)
self.auto_scale_xyz(tri.x, tri.y, z, had_data)
return polyc
def _3d_extend_contour(self, cset, stride=5):
'''
Extend a contour in 3D by creating
'''
levels = cset.levels
colls = cset.collections
dz = (levels[1] - levels[0]) / 2
for z, linec in zip(levels, colls):
topverts = art3d.paths_to_3d_segments(linec.get_paths(), z - dz)
botverts = art3d.paths_to_3d_segments(linec.get_paths(), z + dz)
color = linec.get_color()[0]
polyverts = []
normals = []
nsteps = round(len(topverts[0]) / stride)
if nsteps <= 1:
if len(topverts[0]) > 1:
nsteps = 2
else:
continue
stepsize = (len(topverts[0]) - 1) / (nsteps - 1)
for i in range(int(round(nsteps)) - 1):
i1 = int(round(i * stepsize))
i2 = int(round((i + 1) * stepsize))
polyverts.append([topverts[0][i1],
topverts[0][i2],
botverts[0][i2],
botverts[0][i1]])
v1 = np.array(topverts[0][i1]) - np.array(topverts[0][i2])
v2 = np.array(topverts[0][i1]) - np.array(botverts[0][i1])
normals.append(np.cross(v1, v2))
colors = self._shade_colors(color, normals)
colors2 = self._shade_colors(color, normals)
polycol = art3d.Poly3DCollection(polyverts,
facecolors=colors,
edgecolors=colors2)
polycol.set_sort_zpos(z)
self.add_collection3d(polycol)
for col in colls:
self.collections.remove(col)
def add_contour_set(self, cset, extend3d=False, stride=5, zdir='z', offset=None):
zdir = '-' + zdir
if extend3d:
self._3d_extend_contour(cset, stride)
else:
for z, linec in zip(cset.levels, cset.collections):
if offset is not None:
z = offset
art3d.line_collection_2d_to_3d(linec, z, zdir=zdir)
def add_contourf_set(self, cset, zdir='z', offset=None) :
zdir = '-' + zdir
for z, linec in zip(cset.levels, cset.collections) :
if offset is not None :
z = offset
art3d.poly_collection_2d_to_3d(linec, z, zdir=zdir)
linec.set_sort_zpos(z)
def contour(self, X, Y, Z, *args, **kwargs):
'''
Create a 3D contour plot.
========== ================================================
Argument Description
========== ================================================
*X*, *Y*, Data values as numpy.arrays
*Z*
*extend3d* Whether to extend contour in 3D (default: False)
*stride* Stride (step size) for extending contour
*zdir* The direction to use: x, y or z (default)
*offset* If specified plot a projection of the contour
lines on this position in plane normal to zdir
========== ================================================
The positional and other keyword arguments are passed on to
:func:`~matplotlib.axes.Axes.contour`
Returns a :class:`~matplotlib.axes.Axes.contour`
'''
extend3d = kwargs.pop('extend3d', False)
stride = kwargs.pop('stride', 5)
zdir = kwargs.pop('zdir', 'z')
offset = kwargs.pop('offset', None)
had_data = self.has_data()
jX, jY, jZ = art3d.rotate_axes(X, Y, Z, zdir)
cset = Axes.contour(self, jX, jY, jZ, *args, **kwargs)
self.add_contour_set(cset, extend3d, stride, zdir, offset)
self.auto_scale_xyz(X, Y, Z, had_data)
return cset
contour3D = contour
def tricontour(self, *args, **kwargs):
"""
Create a 3D contour plot.
========== ================================================
Argument Description
========== ================================================
*X*, *Y*, Data values as numpy.arrays
*Z*
*extend3d* Whether to extend contour in 3D (default: False)
*stride* Stride (step size) for extending contour
*zdir* The direction to use: x, y or z (default)
*offset* If specified plot a projection of the contour
lines on this position in plane normal to zdir
========== ================================================
Other keyword arguments are passed on to
:func:`~matplotlib.axes.Axes.tricontour`
Returns a :class:`~matplotlib.axes.Axes.contour`
.. versionchanged:: 1.3.0
Added support for custom triangulations
EXPERIMENTAL: This method currently produces incorrect output due to a
longstanding bug in 3D PolyCollection rendering.
"""
extend3d = kwargs.pop('extend3d', False)
stride = kwargs.pop('stride', 5)
zdir = kwargs.pop('zdir', 'z')
offset = kwargs.pop('offset', None)
had_data = self.has_data()
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(
*args, **kwargs)
X = tri.x
Y = tri.y
if 'Z' in kwargs:
Z = kwargs.pop('Z')
else:
Z = args[0]
# We do this so Z doesn't get passed as an arg to Axes.tricontour
args = args[1:]
jX, jY, jZ = art3d.rotate_axes(X, Y, Z, zdir)
tri = Triangulation(jX, jY, tri.triangles, tri.mask)
cset = Axes.tricontour(self, tri, jZ, *args, **kwargs)
self.add_contour_set(cset, extend3d, stride, zdir, offset)
self.auto_scale_xyz(X, Y, Z, had_data)
return cset
def contourf(self, X, Y, Z, *args, **kwargs):
'''
Create a 3D contourf plot.
========== ================================================
Argument Description
========== ================================================
*X*, *Y*, Data values as numpy.arrays
*Z*
*zdir* The direction to use: x, y or z (default)
*offset* If specified plot a projection of the filled contour
on this position in plane normal to zdir
========== ================================================
The positional and keyword arguments are passed on to
:func:`~matplotlib.axes.Axes.contourf`
Returns a :class:`~matplotlib.axes.Axes.contourf`
.. versionchanged :: 1.1.0
The *zdir* and *offset* kwargs were added.
'''
zdir = kwargs.pop('zdir', 'z')
offset = kwargs.pop('offset', None)
had_data = self.has_data()
jX, jY, jZ = art3d.rotate_axes(X, Y, Z, zdir)
cset = Axes.contourf(self, jX, jY, jZ, *args, **kwargs)
self.add_contourf_set(cset, zdir, offset)
self.auto_scale_xyz(X, Y, Z, had_data)
return cset
contourf3D = contourf
def tricontourf(self, *args, **kwargs):
"""
Create a 3D contourf plot.
========== ================================================
Argument Description
========== ================================================
*X*, *Y*, Data values as numpy.arrays
*Z*
*zdir* The direction to use: x, y or z (default)
*offset* If specified plot a projection of the contour
lines on this position in plane normal to zdir
========== ================================================
Other keyword arguments are passed on to
:func:`~matplotlib.axes.Axes.tricontour`
Returns a :class:`~matplotlib.axes.Axes.contour`
.. versionchanged :: 1.3.0
Added support for custom triangulations
EXPERIMENTAL: This method currently produces incorrect output due to a
longstanding bug in 3D PolyCollection rendering.
"""
zdir = kwargs.pop('zdir', 'z')
offset = kwargs.pop('offset', None)
had_data = self.has_data()
tri, args, kwargs = Triangulation.get_from_args_and_kwargs(
*args, **kwargs)
X = tri.x
Y = tri.y
if 'Z' in kwargs:
Z = kwargs.pop('Z')
else:
Z = args[0]
# We do this so Z doesn't get passed as an arg to Axes.tricontourf
args = args[1:]
jX, jY, jZ = art3d.rotate_axes(X, Y, Z, zdir)
tri = Triangulation(jX, jY, tri.triangles, tri.mask)
cset = Axes.tricontourf(self, tri, jZ, *args, **kwargs)
self.add_contourf_set(cset, zdir, offset)
self.auto_scale_xyz(X, Y, Z, had_data)
return cset
def add_collection3d(self, col, zs=0, zdir='z'):
'''
Add a 3D collection object to the plot.
2D collection types are converted to a 3D version by
modifying the object and adding z coordinate information.
Supported are:
- PolyCollection
- LineColleciton
- PatchCollection
'''
zvals = np.atleast_1d(zs)
if len(zvals) > 0 :
zsortval = min(zvals)
else :
zsortval = 0 # FIXME: Fairly arbitrary. Is there a better value?
# FIXME: use issubclass() (although, then a 3D collection
# object would also pass.) Maybe have a collection3d
# abstract class to test for and exclude?
if type(col) is mcoll.PolyCollection:
art3d.poly_collection_2d_to_3d(col, zs=zs, zdir=zdir)
col.set_sort_zpos(zsortval)
elif type(col) is mcoll.LineCollection:
art3d.line_collection_2d_to_3d(col, zs=zs, zdir=zdir)
col.set_sort_zpos(zsortval)
elif type(col) is mcoll.PatchCollection:
art3d.patch_collection_2d_to_3d(col, zs=zs, zdir=zdir)
col.set_sort_zpos(zsortval)
Axes.add_collection(self, col)
def scatter(self, xs, ys, zs=0, zdir='z', s=20, c='b', depthshade=True,
*args, **kwargs):
'''
Create a scatter plot.
============ ========================================================
Argument Description
============ ========================================================
*xs*, *ys* Positions of data points.
*zs* Either an array of the same length as *xs* and
*ys* or a single value to place all points in
the same plane. Default is 0.
*zdir* Which direction to use as z ('x', 'y' or 'z')
when plotting a 2D set.
*s* Size in points^2. It is a scalar or an array of the
same length as *x* and *y*.
*c* A color. *c* can be a single color format string, or a
sequence of color specifications of length *N*, or a
sequence of *N* numbers to be mapped to colors using the
*cmap* and *norm* specified via kwargs (see below). Note
that *c* should not be a single numeric RGB or RGBA
sequence because that is indistinguishable from an array
of values to be colormapped. *c* can be a 2-D array in
which the rows are RGB or RGBA, however.
*depthshade*
Whether or not to shade the scatter markers to give
the appearance of depth. Default is *True*.
============ ========================================================
Keyword arguments are passed on to
:func:`~matplotlib.axes.Axes.scatter`.
Returns a :class:`~mpl_toolkits.mplot3d.art3d.Patch3DCollection`
'''
had_data = self.has_data()
xs = np.ma.ravel(xs)
ys = np.ma.ravel(ys)
zs = np.ma.ravel(zs)
if xs.size != ys.size:
raise ValueError("Arguments 'xs' and 'ys' must be of same size.")
if xs.size != zs.size:
if zs.size == 1:
zs = np.tile(zs[0], xs.size)
else:
raise ValueError(("Argument 'zs' must be of same size as 'xs' "
"and 'ys' or of size 1."))
s = np.ma.ravel(s) # This doesn't have to match x, y in size.
cstr = cbook.is_string_like(c) or cbook.is_sequence_of_strings(c)
if not cstr:
c = np.asanyarray(c)
if c.size == xs.size:
c = np.ma.ravel(c)
xs, ys, zs, s, c = cbook.delete_masked_points(xs, ys, zs, s, c)
patches = Axes.scatter(self, xs, ys, s=s, c=c, *args, **kwargs)
if not cbook.iterable(zs):
is_2d = True
zs = np.ones(len(xs)) * zs
else:
is_2d = False
art3d.patch_collection_2d_to_3d(patches, zs=zs, zdir=zdir,
depthshade=depthshade)
if self._zmargin < 0.05 and xs.size > 0:
self.set_zmargin(0.05)
#FIXME: why is this necessary?
if not is_2d:
self.auto_scale_xyz(xs, ys, zs, had_data)
return patches
scatter3D = scatter
def bar(self, left, height, zs=0, zdir='z', *args, **kwargs):
'''
Add 2D bar(s).
========== ================================================
Argument Description
========== ================================================
*left* The x coordinates of the left sides of the bars.
*height* The height of the bars.
*zs* Z coordinate of bars, if one value is specified
they will all be placed at the same z.
*zdir* Which direction to use as z ('x', 'y' or 'z')
when plotting a 2D set.
========== ================================================
Keyword arguments are passed onto :func:`~matplotlib.axes.Axes.bar`.
Returns a :class:`~mpl_toolkits.mplot3d.art3d.Patch3DCollection`
'''
had_data = self.has_data()
patches = Axes.bar(self, left, height, *args, **kwargs)
if not cbook.iterable(zs):
zs = np.ones(len(left)) * zs
verts = []
verts_zs = []
for p, z in zip(patches, zs):
vs = art3d.get_patch_verts(p)
verts += vs.tolist()
verts_zs += [z] * len(vs)
art3d.patch_2d_to_3d(p, z, zdir)
if 'alpha' in kwargs:
p.set_alpha(kwargs['alpha'])
if len(verts) > 0 :
# the following has to be skipped if verts is empty
# NOTE: Bugs could still occur if len(verts) > 0,
# but the "2nd dimension" is empty.
xs, ys = list(zip(*verts))
else :
xs, ys = [], []
xs, ys, verts_zs = art3d.juggle_axes(xs, ys, verts_zs, zdir)
self.auto_scale_xyz(xs, ys, verts_zs, had_data)
return patches
def bar3d(self, x, y, z, dx, dy, dz, color='b',
zsort='average', *args, **kwargs):
'''
Generate a 3D bar, or multiple bars.
When generating multiple bars, x, y, z have to be arrays.
dx, dy, dz can be arrays or scalars.
*color* can be:
- A single color value, to color all bars the same color.
- An array of colors of length N bars, to color each bar
independently.
- An array of colors of length 6, to color the faces of the
bars similarly.
- An array of colors of length 6 * N bars, to color each face
independently.
When coloring the faces of the boxes specifically, this is
the order of the coloring:
1. -Z (bottom of box)
2. +Z (top of box)
3. -Y
4. +Y
5. -X
6. +X
Keyword arguments are passed onto
:func:`~mpl_toolkits.mplot3d.art3d.Poly3DCollection`
'''
had_data = self.has_data()
if not cbook.iterable(x):
x = [x]
if not cbook.iterable(y):
y = [y]
if not cbook.iterable(z):
z = [z]
if not cbook.iterable(dx):
dx = [dx]
if not cbook.iterable(dy):
dy = [dy]
if not cbook.iterable(dz):
dz = [dz]
if len(dx) == 1:
dx = dx * len(x)
if len(dy) == 1:
dy = dy * len(y)
if len(dz) == 1:
dz = dz * len(z)
if len(x) != len(y) or len(x) != len(z):
warnings.warn('x, y, and z must be the same length.')
# FIXME: This is archaic and could be done much better.
minx, miny, minz = 1e20, 1e20, 1e20
maxx, maxy, maxz = -1e20, -1e20, -1e20
polys = []
for xi, yi, zi, dxi, dyi, dzi in zip(x, y, z, dx, dy, dz):
minx = min(xi, minx)
maxx = max(xi + dxi, maxx)
miny = min(yi, miny)
maxy = max(yi + dyi, maxy)
minz = min(zi, minz)
maxz = max(zi + dzi, maxz)
polys.extend([
((xi, yi, zi), (xi + dxi, yi, zi),
(xi + dxi, yi + dyi, zi), (xi, yi + dyi, zi)),
((xi, yi, zi + dzi), (xi + dxi, yi, zi + dzi),
(xi + dxi, yi + dyi, zi + dzi), (xi, yi + dyi, zi + dzi)),
((xi, yi, zi), (xi + dxi, yi, zi),
(xi + dxi, yi, zi + dzi), (xi, yi, zi + dzi)),
((xi, yi + dyi, zi), (xi + dxi, yi + dyi, zi),
(xi + dxi, yi + dyi, zi + dzi), (xi, yi + dyi, zi + dzi)),
((xi, yi, zi), (xi, yi + dyi, zi),
(xi, yi + dyi, zi + dzi), (xi, yi, zi + dzi)),
((xi + dxi, yi, zi), (xi + dxi, yi + dyi, zi),
(xi + dxi, yi + dyi, zi + dzi), (xi + dxi, yi, zi + dzi)),
])
facecolors = []
if color is None:
# no color specified
facecolors = [None] * len(x)
elif len(color) == len(x):
# bar colors specified, need to expand to number of faces
for c in color:
facecolors.extend([c] * 6)
else:
# a single color specified, or face colors specified explicitly
facecolors = list(colorConverter.to_rgba_array(color))
if len(facecolors) < len(x):
facecolors *= (6 * len(x))
normals = self._generate_normals(polys)
sfacecolors = self._shade_colors(facecolors, normals)
col = art3d.Poly3DCollection(polys,
zsort=zsort,
facecolor=sfacecolors,
*args, **kwargs)
self.add_collection(col)
self.auto_scale_xyz((minx, maxx), (miny, maxy), (minz, maxz), had_data)
return col
def set_title(self, label, fontdict=None, loc='center', **kwargs):
ret = Axes.set_title(self, label, fontdict=fontdict, loc=loc, **kwargs)
(x, y) = self.title.get_position()
self.title.set_y(0.92 * y)
return ret
set_title.__doc__ = maxes.Axes.set_title.__doc__
def quiver(self, *args, **kwargs):
"""
Plot a 3D field of arrows.
call signatures::
quiver(X, Y, Z, U, V, W, **kwargs)
Arguments:
*X*, *Y*, *Z*:
The x, y and z coordinates of the arrow locations (default is
tip of arrow; see *pivot* kwarg)
*U*, *V*, *W*:
The x, y and z components of the arrow vectors
The arguments could be array-like or scalars, so long as they
they can be broadcast together. The arguments can also be
masked arrays. If an element in any of argument is masked, then
that corresponding quiver element will not be plotted.
Keyword arguments:
*length*: [1.0 | float]
The length of each quiver, default to 1.0, the unit is
the same with the axes
*arrow_length_ratio*: [0.3 | float]
The ratio of the arrow head with respect to the quiver,
default to 0.3
*pivot*: [ 'tail' | 'middle' | 'tip' ]
The part of the arrow that is at the grid point; the arrow
rotates about this point, hence the name *pivot*.
Any additional keyword arguments are delegated to
:class:`~matplotlib.collections.LineCollection`
"""
def calc_arrow(uvw, angle=15):
"""
To calculate the arrow head. uvw should be a unit vector.
"""
# get unit direction vector perpendicular to (u,v,w)
norm = np.linalg.norm(uvw[:2])
if norm > 0:
x = uvw[1] / norm
y = -uvw[0] / norm
else:
x, y = 0, 1
# compute the two arrowhead direction unit vectors
ra = math.radians(angle)
c = math.cos(ra)
s = math.sin(ra)
# construct the rotation matrices
Rpos = np.array([[c+(x**2)*(1-c), x*y*(1-c), y*s],
[y*x*(1-c), c+(y**2)*(1-c), -x*s],
[-y*s, x*s, c]])
# opposite rotation negates everything but the diagonal
Rneg = Rpos * (np.eye(3)*2 - 1)
# multiply them to get the rotated vector
return Rpos.dot(uvw), Rneg.dot(uvw)
had_data = self.has_data()
# handle kwargs
# shaft length
length = kwargs.pop('length', 1)
# arrow length ratio to the shaft length
arrow_length_ratio = kwargs.pop('arrow_length_ratio', 0.3)
# pivot point
pivot = kwargs.pop('pivot', 'tip')
# handle args
argi = 6
if len(args) < argi:
ValueError('Wrong number of arguments. Expected %d got %d' %
(argi, len(args)))
# first 6 arguments are X, Y, Z, U, V, W
input_args = args[:argi]
# if any of the args are scalar, convert into list
input_args = [[k] if isinstance(k, (int, float)) else k
for k in input_args]
# extract the masks, if any
masks = [k.mask for k in input_args if isinstance(k, np.ma.MaskedArray)]
# broadcast to match the shape
bcast = np.broadcast_arrays(*(input_args + masks))
input_args = bcast[:argi]
masks = bcast[argi:]
if masks:
# combine the masks into one
mask = reduce(np.logical_or, masks)
# put mask on and compress
input_args = [np.ma.array(k, mask=mask).compressed()
for k in input_args]
else:
input_args = [k.flatten() for k in input_args]
if any(len(v) == 0 for v in input_args):
# No quivers, so just make an empty collection and return early
linec = art3d.Line3DCollection([], *args[argi:], **kwargs)
self.add_collection(linec)
return linec
# Following assertions must be true before proceeding
# must all be ndarray
assert all(isinstance(k, np.ndarray) for k in input_args)
# must all in same shape
assert len(set([k.shape for k in input_args])) == 1
shaft_dt = np.linspace(0, length, num=2)
arrow_dt = shaft_dt * arrow_length_ratio
if pivot == 'tail':
shaft_dt -= length
elif pivot == 'middle':
shaft_dt -= length/2.
elif pivot != 'tip':
raise ValueError('Invalid pivot argument: ' + str(pivot))
XYZ = np.column_stack(input_args[:3])
UVW = np.column_stack(input_args[3:argi]).astype(float)
# Normalize rows of UVW
# Note: with numpy 1.9+, could use np.linalg.norm(UVW, axis=1)
norm = np.sqrt(np.sum(UVW**2, axis=1))
# If any row of UVW is all zeros, don't make a quiver for it
mask = norm > 1e-10
XYZ = XYZ[mask]
UVW = UVW[mask] / norm[mask].reshape((-1, 1))
if len(XYZ) > 0:
# compute the shaft lines all at once with an outer product
shafts = (XYZ - np.multiply.outer(shaft_dt, UVW)).swapaxes(0, 1)
# compute head direction vectors, n heads by 2 sides by 3 dimensions
head_dirs = np.array([calc_arrow(d) for d in UVW])
# compute all head lines at once, starting from where the shaft ends
heads = shafts[:, :1] - np.multiply.outer(arrow_dt, head_dirs)
# stack left and right head lines together
heads.shape = (len(arrow_dt), -1, 3)
# transpose to get a list of lines
heads = heads.swapaxes(0, 1)
lines = list(shafts) + list(heads)
else:
lines = []
linec = art3d.Line3DCollection(lines, *args[argi:], **kwargs)
self.add_collection(linec)
self.auto_scale_xyz(XYZ[:, 0], XYZ[:, 1], XYZ[:, 2], had_data)
return linec
quiver3D = quiver
def get_test_data(delta=0.05):
'''
Return a tuple X, Y, Z with a test data set.
'''
from matplotlib.mlab import bivariate_normal
x = y = np.arange(-3.0, 3.0, delta)
X, Y = np.meshgrid(x, y)
Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)
Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1)
Z = Z2 - Z1
X = X * 10
Y = Y * 10
Z = Z * 500
return X, Y, Z
########################################################
# Register Axes3D as a 'projection' object available
# for use just like any other axes
########################################################
import matplotlib.projections as proj
proj.projection_registry.register(Axes3D)
|
{
"content_hash": "76c2800a3000be65d8b155e6cf14b1d7",
"timestamp": "",
"source": "github",
"line_count": 2648,
"max_line_length": 130,
"avg_line_length": 35.09214501510574,
"alnum_prop": 0.5213830657311351,
"repo_name": "rbalda/neural_ocr",
"id": "b1032f45091e97587956f11b78bad38c591edff5",
"size": "93199",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/mpl_toolkits/mplot3d/axes3d.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "497604"
},
{
"name": "C++",
"bytes": "3309990"
},
{
"name": "CSS",
"bytes": "135235"
},
{
"name": "FORTRAN",
"bytes": "10375"
},
{
"name": "HTML",
"bytes": "215390"
},
{
"name": "JavaScript",
"bytes": "206780"
},
{
"name": "Jupyter Notebook",
"bytes": "16254"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "26980034"
},
{
"name": "Shell",
"bytes": "3895"
}
],
"symlink_target": ""
}
|
'''
This file defines the functions that check Models to their
Meta-Meta-Model.
'''
__author__ = 'William Emfinger'
__copyright__ = 'Copyright 2016, ROSMOD'
__credits__ = ['William Emfinger', 'Pranav Srinivas Kumar']
__license__ = 'GPL'
__version__ = '0.4'
__maintainer__ = 'William Emfinger'
__email__ = 'emfinger@isis.vanderbilt.edu'
__status__ = 'Production'
import meta
def checkModelToMeta(root, meta_dict):
test = False not in [
checkObjectToMeta(c, meta_dict)
for c in root
]
return test
def checkChildrenToMeta(model_dict, meta_dict):
meta_type = meta_dict[model_dict['Type']]
meta_name = meta_type['Attributes']['Name']['Value']
allowed_kids = [
c['Attributes']['Name']['Value']
for c in meta_type['Children']
if 'Cardinality' in c['Attributes']
]
cardinality = {
c['Attributes']['Name']['Value']:
c['Attributes']['Cardinality']['Value']
for c in meta_type['Children']
if 'Cardinality' in c['Attributes']
}
for c in model_dict['Children']:
# make sure the child type exists in the meta-model
if not checkObjectToMeta(c, meta_dict):
print 'ERROR: Child type \'{}\' of {} not in meta-model!'.format(
c['Type'],
meta_name
)
return False
child_meta_type = meta_dict[c['Type']]
child_meta_name = child_meta_type['Attributes']['Name']['Value']
# make sure the child type is allowed
if child_meta_name not in allowed_kids:
print 'ERROR: Child \'{}\' not allowed in {}!'.format(
child_meta_name,
meta_name
)
print '\tAllowed Children:\n\t\t{}'.format(allowed_kids)
return False
# make sure the parent is allowed to have this many kids of this type
child_types = [
meta_dict[c['Type']]['Attributes']['Name']['Value']
for c in model_dict['Children']
]
for kid_type in allowed_kids:
min_num, max_num = meta.getMinMaxCardinality(
cardinality[kid_type]
)
actual = child_types.count(kid_type)
if actual < min_num:
print 'ERROR: must have {} children of type \'{}\' in {}'.format(
min_num,
kid_type,
meta_name
)
return False
if max_num > 0 and actual > max_num:
print 'ERROR: can only have {} children of type \'{}\' in {}'.format(
max_num,
kid_type,
meta_name
)
return False
return True
def checkPointersToMeta(model_dict, meta_dict):
meta_type = meta_dict[model_dict['Type']]
meta_name = meta_type['Attributes']['Name']['Value']
allowed_ptrs = [
p['Attributes']['Name']['Value']
for p in meta_type['Children']
if p['Type'] == 'MetaPointer'
]
for p in model_dict['Pointers']:
# make sure the child type exists in the meta-model
if not checkObjectToMeta(p, meta_dict):
print 'ERROR: Pointer \'{}\' of {} not in meta-model!'.format(
p['Type'],
meta_name
)
return False
ptr_meta_type = meta_dict[p['Type']]
ptr_meta_name = ptr_meta_type['Attributes']['Name']['Value']
# make sure the child type is allowed
if ptr_meta_name not in allowed_ptrs:
print 'ERROR: Pointer type \'{}\' not allowed in {}!'.format(
ptr_meta_name,
meta_name
)
print '\tAllowed Pointers:\n\t\t{}'.format(allowed_ptrs)
return False
return True
def checkAttributesToMeta(model_dict, meta_dict):
meta_type = meta_dict[model_dict['Type']]
meta_name = meta_type['Attributes']['Name']['Value']
allowed_attr = [
a['Attributes']['Name']['Value']
for a in meta_type['Children']
if a['Type'] == 'MetaAttribute'
]
for a in model_dict['Attributes']:
if not checkObjectToMeta(a, meta_dict):
print 'ERROR: Attribute \'{}\' of {} not in meta-model!'.format(
a['Type'],
meta_name
)
return False
attr_meta_type = meta_dict[a['Type']]
attr_meta_name = attr_meta_type['Attribute']['Name']['Value']
if attr_meta_name not in allowed_attr:
print 'ERROR: Attribute type \'{}\' not allowed in {}!'.format(
attr_meta_name,
meta_name
)
print '\tAllowed Attributes:\n\t\t{}'.format(allowed_attr)
return False
return True
def checkObjectToMeta(model_dict, meta_dict):
'''
This function does a first-level depth check of the validity of a
model against its meta-model, contained within model_dict and
meta_dict respectively.
'''
# check that it is a valid type
if model_dict['Type'] not in meta_dict:
print 'ERROR: object type \'{}\' not in metamodel!'.format(
model_dict['Type']
)
return False
# check that it has valid children types and numbers (cardinality)
if 'Children' in model_dict:
test = checkChildrenToMeta(model_dict, meta_dict)
if not test:
return test
# check that is has valid pointer objects
if 'Pointers' in model_dict:
test = checkPointersToMeta(model_dict, meta_dict)
if not test:
return test
# check that is has valid attribute objects
if 'Attriburtes' in model_dict:
test = checkAttributesToMeta(model_dict, meta_dict)
if not test:
return test
return True
|
{
"content_hash": "977b872002a7712ece84eb911675a9e4",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 81,
"avg_line_length": 32.817142857142855,
"alnum_prop": 0.5559811944976493,
"repo_name": "finger563/editor",
"id": "67c325906f12efd8ca3b51a566d3e0084c8d29cf",
"size": "5766",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/check_meta.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6163"
},
{
"name": "HTML",
"bytes": "87755"
},
{
"name": "Python",
"bytes": "293587"
}
],
"symlink_target": ""
}
|
"""
Gridded data is already aggregated by month
therefore we don't have daily gridded data, but this is
importing to the same basic table type as if it were daily.
So:
Don't make a daily table
Record what monthly aggregations are available
We only use monthly aggregations on the map and chart so
Some statistics won't be available depending on the aggregation of the gridded data.
Rainfall:
SUM -> AVERAGE -> COUNT
We want SUM
Temp:
We want MAX, MIN,
Or:
record what time_period means in the table.
aggregate month tables into year tables
leave daily tables empty so that we just don't get any values to add
but the table is ready there in case the values ever turn up.
NetCDF data includes units information so need to use this to convert the data.
"""
ClimateDataPortal = local_import("ClimateDataPortal")
InsertChunksWithoutCheckingForExistingReadings = local_import(
"ClimateDataPortal.InsertChunksWithoutCheckingForExistingReadings"
).InsertChunksWithoutCheckingForExistingReadings
def get_or_create(dict, key, creator):
try:
value = dict[key]
except KeyError:
value = dict[key] = creator()
return value
def get_or_create_record(table, query):
query_terms = []
for key, value in query.iteritems():
query_terms.append(getattr(table, key) == value)
reduced_query = reduce(
(lambda left, right: left & right),
query_terms
)
records = db(reduced_query).select()
count = len(records)
assert count <= 1, "Multiple records for %s" % query
if count == 0:
record = table.insert(**query)
db.commit()
else:
record = records.first()
return record.id
def nearly(expected_float, actual_float):
difference_ratio = actual_float / expected_float
return 0.999 < abs(difference_ratio) < 1.001
class InsertRowsIfNoConflict(object):
def __init__(self, database_table_name, db):
raise NotImplemented
self.database_table = database_table
def add_reading(
self,
time_period,
place_id,
value
):
database_table = self.database_table
records = db(
(database_table.time_period == time_period) &
(database_table.place_id == place_id)
).select(database_table.value, database_table.id)
count = len(records)
assert count <= 1
if count == 0:
database_table.insert(
time_period = time_period,
place_id = place_id,
value = value
)
else:
existing = records.first()
assert nearly(existing.value, value), (existing.value, value, place_id)
def done(self):
pass
import datetime
def import_climate_readings(
netcdf_file,
field_name,
add_reading,
converter,
start_time = datetime.date(1971,1,1),
is_undefined = lambda x: -99.900003 < x < -99.9
):
"""
Assumptions:
* there are no places
* the data is in order of places
"""
variables = netcdf_file.variables
if field_name is "?":
print ("field_name could be one of %s" % variables.keys())
else:
# create grid of places
place_ids = {}
def to_list(variable):
result = []
for i in range(len(variable)):
result.append(variable[i])
return result
def iter_pairs(list):
for index in range(len(list)):
yield index, list[index]
times = to_list(variables["time"])
lat = to_list(variables["lat"])
lon = to_list(variables["lon"])
for latitude in lat:
for longitude in lon:
record = get_or_create_record(
climate_place,
dict(
longitude = longitude,
latitude = latitude
)
)
place_ids[(latitude, longitude)] = record
#print longitude, latitude, record
try:
tt = variables[field_name]
except KeyError:
raise Exception("Can't find %s in %s" % (field_name, variables.keys()))
else:
print "up to:", len(times)
for time_index, time in iter_pairs(times):
print time_index, "%i%%" % int((time_index*100) / len(times))
time_period = start_time+datetime.timedelta(hours=time)
for latitude_index, latitude in iter_pairs(lat):
for longitude_index, longitude in iter_pairs(lon):
value = tt[time_index][latitude_index][longitude_index]
if not is_undefined(value):
month_number = ClimateDataPortal.rounded_date_to_month_number(time_period)
place_id = place_ids[(latitude, longitude)]
converted_value = converter(value)
add_reading(
time_period = month_number,
place_id = place_id,
value = converted_value
)
add_reading.done()
db.commit()
print
import sys
from Scientific.IO import NetCDF
climate_sample_tables = {}
for database_table_row in db(climate_sample_table_spec).select():
climate_sample_tables[
"%s %s" % (
ClimateDataPortal.sample_types[database_table_row.sample_type_code],
database_table_row.name
)
] = (
ClimateDataPortal.sample_table_id(database_table_row.id),
database_table_row.units
)
def main(argv):
import argparse
import os
styles = {
"quickly": InsertChunksWithoutCheckingForExistingReadings,
# "safely": InsertRowsIfNoConflict
}
parser = argparse.ArgumentParser(
description = "Imports climate data from NetCDF file.",
prog = argv[0],
usage = """
%(prog)s --NetCDF_file path/to/file.nc --parameter_name <parameter> --style <import style> --field_name <field name>
e.g.
python ./run.py %(prog)s --field_name rr --style quickly --parameter_name "Gridded Rainfall mm" --NetCDF_file gridded_rainfall_mm.nc
"""
)
parser.add_argument(
"--NetCDF_file",
required = True,
help="NetCDF file to import."
)
parser.add_argument(
"--parameter_name",
required = True,
choices = climate_sample_tables.keys(),
help="Parameter name, which corresponds to an added table."
)
parser.add_argument(
"--clear_existing_data",
type = bool,
default = False,
help="Truncate database tables first."
)
parser.add_argument(
"--style",
required = True,
choices = styles.keys(),
default = "safely",
help="""
quickly: just insert readings into the database
safely: check that data is not overwritten
"""
)
parser.add_argument(
"--field_name",
required = True,
help="""name of netCDF field that holds the data value
e.g. "tt" or "rr". Type "?", to discover options."""
)
args = parser.parse_args(argv[1:])
sample_table = ClimateDataPortal.SampleTable.with_name(args.parameter_name)
sample_table.clear()
db.commit()
import_climate_readings(
netcdf_file = NetCDF.NetCDFFile(args.NetCDF_file),
field_name = args.field_name,
add_reading = styles[args.style](sample_table),
converter = ClimateDataPortal.units_in_out[units]["in"]
)
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv))
|
{
"content_hash": "91f747d04c019aaaa5ccd84aaeddbe35",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 133,
"avg_line_length": 31.03174603174603,
"alnum_prop": 0.5767263427109974,
"repo_name": "flavour/cert",
"id": "d612241519de8d18e1878da8e7f8aaba1b0c63d3",
"size": "7821",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "modules/ClimateDataPortal/import_NetCDF_readings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "13068308"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "21061411"
},
{
"name": "Shell",
"bytes": "1645"
}
],
"symlink_target": ""
}
|
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
def fiftycatGBM():
# Training set has only 45 categories cat1 through cat45
#Log.info("Importing 50_cattest_train.csv data...\n")
train = h2o.import_file(path=pyunit_utils.locate("smalldata/gbm_test/50_cattest_train.csv"))
train["y"] = train["y"].asfactor()
#Log.info("Summary of 50_cattest_train.csv from H2O:\n")
#train.summary()
# Train H2O GBM Model:
#Log.info(paste("H2O GBM with parameters:\nntrees = 10, max_depth = 20, nbins = 20\n", sep = ""))
model = h2o.gbm(x=train[["x1","x2"]], y=train["y"], distribution="bernoulli", ntrees=10, max_depth=5, nbins=20)
model.show()
# Test dataset has all 50 categories cat1 through cat50
#Log.info("Importing 50_cattest_test.csv data...\n")
test = h2o.import_file(path=pyunit_utils.locate("smalldata/gbm_test/50_cattest_test.csv"))
#Log.info("Summary of 50_cattest_test.csv from H2O:\n")
#test.summary()
# Predict on test dataset with GBM model:
#Log.info("Performing predictions on test dataset...\n")
predictions = model.predict(test)
predictions.show()
# Get the confusion matrix and AUC
#Log.info("Confusion matrix of predictions (max accuracy):\n")
performance = model.model_performance(test)
test_cm = performance.confusion_matrix()
test_auc = performance.auc()
if __name__ == "__main__":
pyunit_utils.standalone_test(fiftycatGBM)
else:
fiftycatGBM()
|
{
"content_hash": "677e4702f2732de61af86aff631a7baf",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 113,
"avg_line_length": 30.5625,
"alnum_prop": 0.6843899113837764,
"repo_name": "madmax983/h2o-3",
"id": "b4878d958abe42ee06b899bdc8ec0da7389f93d1",
"size": "1467",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/gbm/pyunit_DEPRECATED_fiftycatGBM.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "162402"
},
{
"name": "CoffeeScript",
"bytes": "262107"
},
{
"name": "Emacs Lisp",
"bytes": "8927"
},
{
"name": "HTML",
"bytes": "139398"
},
{
"name": "Java",
"bytes": "5770492"
},
{
"name": "JavaScript",
"bytes": "38932"
},
{
"name": "Makefile",
"bytes": "34048"
},
{
"name": "Python",
"bytes": "2721983"
},
{
"name": "R",
"bytes": "1611237"
},
{
"name": "Rebol",
"bytes": "7059"
},
{
"name": "Ruby",
"bytes": "3506"
},
{
"name": "Scala",
"bytes": "22834"
},
{
"name": "Shell",
"bytes": "46382"
},
{
"name": "TeX",
"bytes": "535732"
}
],
"symlink_target": ""
}
|
class Solution(object):
def wordPattern(self, pattern, strs):
"""
:type pattern: str
:type str: strs
:rtype: bool
"""
if not pattern and not strs:
return True
strlist=strs.split(" ")
if len(strlist)!=len(pattern):
return False
# chars map
charmap=[None]*26
plist=list(pattern)
while len(plist):
string,ch=strlist.pop(),plist.pop()
# get the index
index=ord(ch)-97
if charmap[index]!=string and charmap[index]:
return False
elif charmap[index]!=string and string in charmap:
return False
elif string not in charmap:
charmap[index]=string
return True
|
{
"content_hash": "d96d3884dd50b8df2b2d37ccac4edf84",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 62,
"avg_line_length": 28.666666666666668,
"alnum_prop": 0.4755813953488372,
"repo_name": "Tanych/CodeTracking",
"id": "d8cff43f74d5110e9f754da90d549719e16ebaca",
"size": "860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "290-Word-Pattern/solution.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1723"
},
{
"name": "C++",
"bytes": "1024"
},
{
"name": "Java",
"bytes": "1261"
},
{
"name": "Python",
"bytes": "409211"
}
],
"symlink_target": ""
}
|
"""
Client side of the cert manager RPC API.
"""
from nova import flags
import nova.rpc.proxy
FLAGS = flags.FLAGS
class CertAPI(nova.rpc.proxy.RpcProxy):
'''Client side of the cert rpc API.
API version history:
1.0 - Initial version.
'''
RPC_API_VERSION = '1.0'
def __init__(self):
super(CertAPI, self).__init__(topic=FLAGS.cert_topic,
default_version=self.RPC_API_VERSION)
def revoke_certs_by_user(self, ctxt, user_id):
return self.call(ctxt, self.make_msg('revoke_certs_by_user',
user_id=user_id))
def revoke_certs_by_project(self, ctxt, project_id):
return self.call(ctxt, self.make_msg('revoke_certs_by_project',
project_id=project_id))
def revoke_certs_by_user_and_project(self, ctxt, user_id, project_id):
return self.call(ctxt,
self.make_msg('revoke_certs_by_user_and_project',
user_id=user_id, project_id=project_id))
def generate_x509_cert(self, ctxt, user_id, project_id):
return self.call(ctxt, self.make_msg('generate_x509_cert',
user_id=user_id,
project_id=project_id))
def fetch_ca(self, ctxt, project_id):
return self.call(ctxt, self.make_msg('fetch_ca',
project_id=project_id))
def fetch_crl(self, ctxt, project_id):
return self.call(ctxt, self.make_msg('fetch_crl',
project_id=project_id))
def decrypt_text(self, ctxt, project_id, text):
return self.call(ctxt, self.make_msg('decrypt_text',
project_id=project_id,
text=text))
|
{
"content_hash": "093186d1f5ebe684f6a6138d23629dd9",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 75,
"avg_line_length": 34.96363636363636,
"alnum_prop": 0.514820592823713,
"repo_name": "usc-isi/extra-specs",
"id": "d062026da4b7890dd763664399f11c28a68cdb9a",
"size": "2575",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/cert/rpcapi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "5961999"
},
{
"name": "Shell",
"bytes": "26160"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from rospkg.os_detect import OS_CYGWIN
from .source import SOURCE_INSTALLER
from ..installers import PackageManagerInstaller
from ..shell_utils import read_stdout
APT_CYG_INSTALLER = 'apt-cyg'
def register_installers(context):
context.set_installer(APT_CYG_INSTALLER, AptCygInstaller())
def register_platforms(context):
context.add_os_installer_key(OS_CYGWIN, SOURCE_INSTALLER)
context.add_os_installer_key(OS_CYGWIN, APT_CYG_INSTALLER)
context.set_default_os_installer_key(OS_CYGWIN, lambda self: APT_CYG_INSTALLER)
def cygcheck_detect_single(p):
std_out = read_stdout(['cygcheck', '-c', p])
return std_out.count('OK') > 0
def cygcheck_detect(packages):
return [p for p in packages if cygcheck_detect_single(p)]
class AptCygInstaller(PackageManagerInstaller):
"""
An implementation of the :class:`Installer` for use on
cygwin-style systems.
"""
def __init__(self):
super(AptCygInstaller, self).__init__(cygcheck_detect)
self.as_root = False
self.sudo_command = 'cygstart --action=runas'
def get_install_command(self, resolved, interactive=True, reinstall=False, quiet=False):
packages = self.get_packages_to_install(resolved, reinstall=reinstall)
# TODO: interactive
if not packages:
return []
else:
return [self.elevate_priv(['apt-cyg', '-m', 'ftp://sourceware.org/pub/cygwinports', 'install']) + packages]
if __name__ == '__main__':
print('test cygcheck_detect(true)', cygcheck_detect('cygwin'))
|
{
"content_hash": "7c4faae98b515793bc29b086cfe9f471",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 119,
"avg_line_length": 30.0188679245283,
"alnum_prop": 0.6851037083595223,
"repo_name": "ros-infrastructure/rosdep",
"id": "2808f1dc36a937a8f1d0a1f4a3f0e49a76eb3b4b",
"size": "3202",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/rosdep2/platforms/cygwin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "466"
},
{
"name": "Python",
"bytes": "503079"
},
{
"name": "Shell",
"bytes": "296"
}
],
"symlink_target": ""
}
|
"""
This code was generated by Codezu.
Changes to this file may cause incorrect behavior and will be lost if
the code is regenerated.
"""
from mozurestsdk.mozuclient import default as default_client
from mozurestsdk.mozuurl import MozuUrl;
from mozurestsdk.urllocation import UrlLocation
from mozurestsdk.apicontext import ApiContext;
class Facet(object):
def __init__(self, apiContext: ApiContext = None, mozuClient = None):
self.client = mozuClient or default_client();
if (apiContext is not None):
self.client.withApiContext(apiContext);
else:
self.client.withApiContext(ApiContext());
def getFacet(self,facetId, validate = False, responseFields = None):
""" Retrieves a facet specified by its unique identifier and displays its properties.
Args:
| facetId (int) - Unique identifier of the facet to retrieve.
| validate (bool) - Validates that the product category associated with a facet is active. System-supplied and read only.
| responseFields (string) - Use this field to include those fields which are not included by default.
Returns:
| Facet
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/admin/facets/{facetId}?validate={validate}&responseFields={responseFields}", "GET", UrlLocation.TenantPod, False);
url.formatUrl("facetId", facetId);
url.formatUrl("responseFields", responseFields);
url.formatUrl("validate", validate);
self.client.withResourceUrl(url).execute();
return self.client.result();
def getFacetCategoryList(self,categoryId, includeAvailable = False, validate = False, responseFields = None):
""" Retrieves a list of the facets defined for the specified category.
Args:
| categoryId (int) - Unique identifier of the category to modify.
| includeAvailable (bool) - If true, returns a list of the attributes and categories associated with a product type that have not been defined as a facet for the category.
| validate (bool) - Validates that the product category associated with a facet is active. System-supplied and read only.
| responseFields (string) - Use this field to include those fields which are not included by default.
Returns:
| FacetSet
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/admin/facets/category/{categoryId}?includeAvailable={includeAvailable}&validate={validate}&responseFields={responseFields}", "GET", UrlLocation.TenantPod, False);
url.formatUrl("categoryId", categoryId);
url.formatUrl("includeAvailable", includeAvailable);
url.formatUrl("responseFields", responseFields);
url.formatUrl("validate", validate);
self.client.withResourceUrl(url).execute();
return self.client.result();
def addFacet(self,facet, responseFields = None):
""" Creates a new category, price, or attribute facet. Define the category or attribute source to use for the facet values.
Args:
| facet(facet) - Properties of the facet used to retrieve documents.
| responseFields (string) - Use this field to include those fields which are not included by default.
Returns:
| Facet
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/admin/facets/?responseFields={responseFields}", "POST", UrlLocation.TenantPod, False);
url.formatUrl("responseFields", responseFields);
self.client.withResourceUrl(url).withBody(facet).execute();
return self.client.result();
def updateFacet(self,facet, facetId, responseFields = None):
""" Modifies one or more properties of a defined facet.
Args:
| facet(facet) - Properties of the facet used to retrieve documents.
| facetId (int) - Unique identifier of the facet to retrieve.
| responseFields (string) - Use this field to include those fields which are not included by default.
Returns:
| Facet
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/admin/facets/{facetId}?responseFields={responseFields}", "PUT", UrlLocation.TenantPod, False);
url.formatUrl("facetId", facetId);
url.formatUrl("responseFields", responseFields);
self.client.withResourceUrl(url).withBody(facet).execute();
return self.client.result();
def deleteFacetById(self,facetId):
""" Deletes the facet specified by its unique identifier.
Args:
| facetId (int) - Unique identifier of the facet to retrieve.
Raises:
| ApiException
"""
url = MozuUrl("/api/commerce/catalog/admin/facets/{facetId}", "DELETE", UrlLocation.TenantPod, False);
url.formatUrl("facetId", facetId);
self.client.withResourceUrl(url).execute();
|
{
"content_hash": "d03ec109fded8b9adc74da7efd54dff4",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 201,
"avg_line_length": 33.627737226277375,
"alnum_prop": 0.7271543303668331,
"repo_name": "sanjaymandadi/mozu-python-sdk",
"id": "9b75922f54de63df2fb4feb3d5fb13a53a023d7c",
"size": "4608",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mozurestsdk/commerce/catalog/admin/facet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "649189"
}
],
"symlink_target": ""
}
|
import subprocess
import serial
#------------------------------------------------------------------------------
# Bluetooth stack
#------------------------------------------------------------------------------
#Run a System command and return its output lines as tuple
def RunCommand(command):
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p.communicate()
#Run a Command in a Pipe
def RunCommandPipe(command1, command2):
p1 = subprocess.Popen(command1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(command2, stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close()
return p2.communicate()
#List Bluetooth Devices
def BTList():
res = RunCommand(['sudo', 'hcitool', 'scan'])
lines = res[0].split("\n")
ret = {}
for line in lines:
data = line.strip().split('\t')
if len(data) > 1:
ret[data[1]] = data[0]
return ret
#Pair Device
def PairDevice(device, adress):
basen = 1234
code = 1234
if device.startswith("LAMP"):
index = int(device.replace("LAMP", ""));
code = basen + index * 1111
elif device.startswith("SENSOR"):
index = int(device.replace("SENSOR", ""));
code = basen + index * 1010
return RunCommandPipe(['echo', str(code)], ['sudo', 'bluez-simple-agent', 'hci0', adress])
def GetPortFromDevice(device):
if device.startswith("LAMP"):
index = int(device.replace("LAMP", ""));
code = index + 10
elif device.startswith("SENSOR"):
index = int(device.replace("SENSOR", ""));
code = index + 20;
return "/dev/rfcomm"+str(code)
#Connect to serial device
def BTConnect(device, adress):
devfile = GetPortFromDevice(device)
RunCommand(['sudo', 'rfcomm', 'bind', devfile, adress])
#Disconnect from serial device
def BTDisconnect(device, adress):
devfile = GetPortFromDevice(device)
RunCommand(['sudo', 'rfcomm', 'unbind', devfile, adress])
#------------------------------------------------------------------------------
# Serial stack
#------------------------------------------------------------------------------
def SendLampData(device,pwmvalue):
try:
port = serial.Serial(GetPortFromDevice(device), baudrate=115200, timeout=4)
port.write(str(pwmvalue)+"\n")
val = port.readline()
port.close()
except:
pass
def ReadSensor(device):
try:
port = serial.Serial(GetPortFromDevice(device), baudrate=115200, timeout=4)
port.write("L\n")
val = port.readline()
port.close()
except:
return -1
return int(val)
|
{
"content_hash": "03915fb332f1ec775170e09e4f7e994f",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 92,
"avg_line_length": 29.5,
"alnum_prop": 0.6081025217031831,
"repo_name": "webmaster442/prog-elektonikak",
"id": "32d3a203157dea377da4f4500144f3a8378fee15",
"size": "2618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Kodok/Vegyes/02/functions.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "32729"
},
{
"name": "C++",
"bytes": "45561"
},
{
"name": "HTML",
"bytes": "1130"
},
{
"name": "PHP",
"bytes": "2083"
},
{
"name": "Python",
"bytes": "11772"
},
{
"name": "Shell",
"bytes": "6978"
},
{
"name": "TSQL",
"bytes": "1440"
},
{
"name": "Verilog",
"bytes": "7874"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
import twocode
import os
ROOT = os.path.abspath(os.path.dirname(__file__))
codebase_files = [
os.path.abspath(os.path.join(root, file))[len(ROOT):].lstrip(os.path.sep)
for root, dirs, files in os.walk(os.path.join(ROOT, "code")) for file in files]
codebase_files = [(os.path.join("twocode", os.path.dirname(file)).replace(os.path.sep, "/"), [file.replace(os.path.sep, "/")]) for file in codebase_files]
setup(
name = "Twocode",
version = twocode.__version__,
packages = find_packages(exclude="tests".split()),
# REASON: without, it installs an accessible "tests" module
data_files = codebase_files,
# REASON:
# Manifest.in does nothing
# I can't add "code" as a package because it doesn't have __init__.py
# it has to be hidden, twocode.code just needs to exist
# when installing from git, it deletes twocode/code/__init__.py?
# package_data uses glob whose **/* is recursive 1 level only
# listed manually because nothing else works
# it turns out pip install git+ bypasses setuptools, easy_install works
entry_points = {
"console_scripts": [
"twocode = twocode:main",
],
},
install_requires = [],
include_package_data = True,
test_suite = "tests",
tests_require = ["pytest", "pytest-runner"],
extras_require = {
"testing": ["pytest"],
},
author = "Ondřej Műller",
author_email = "devcoft@gmail.com",
description = "A language designed for code generation. Load a codebase in an interpreter, edit and create classes and functions, then translate it into the target language.",
license = "MIT",
url = "http://github.com/MrCoft/twocode",
)
|
{
"content_hash": "8b153122047b87b31c762fba5b4c26e6",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 179,
"avg_line_length": 39.44444444444444,
"alnum_prop": 0.6405633802816901,
"repo_name": "MrCoft/twocode",
"id": "abaab2f313f8af2e604bea03d79098f9dd5375d8",
"size": "1802",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "439119"
},
{
"name": "Roff",
"bytes": "9936"
}
],
"symlink_target": ""
}
|
"""
==============================================================
Compute spatial resolution metrics to compare MEG with EEG+MEG
==============================================================
Compute peak localisation error and spatial deviation for the point-spread
functions of dSPM and MNE. Plot their distributions and difference of
distributions. This example mimics some results from :footcite:`HaukEtAl2019`,
namely Figure 3 (peak localisation error for PSFs, L2-MNE vs dSPM) and Figure 4
(spatial deviation for PSFs, L2-MNE vs dSPM). It shows that combining MEG with
EEG reduces the point-spread function and increases the spatial resolution of
source imaging, especially for deeper sources.
"""
# Author: Olaf Hauk <olaf.hauk@mrc-cbu.cam.ac.uk>
#
# License: BSD (3-clause)
import mne
from mne.datasets import sample
from mne.minimum_norm.resolution_matrix import make_inverse_resolution_matrix
from mne.minimum_norm.spatial_resolution import resolution_metrics
print(__doc__)
data_path = sample.data_path()
subjects_dir = data_path + '/subjects/'
fname_fwd_emeg = data_path + '/MEG/sample/sample_audvis-meg-eeg-oct-6-fwd.fif'
fname_cov = data_path + '/MEG/sample/sample_audvis-cov.fif'
fname_evo = data_path + '/MEG/sample/sample_audvis-ave.fif'
# read forward solution with EEG and MEG
forward_emeg = mne.read_forward_solution(fname_fwd_emeg)
# forward operator with fixed source orientations
forward_emeg = mne.convert_forward_solution(forward_emeg, surf_ori=True,
force_fixed=True)
# create a forward solution with MEG only
forward_meg = mne.pick_types_forward(forward_emeg, meg=True, eeg=False)
# noise covariance matrix
noise_cov = mne.read_cov(fname_cov)
# evoked data for info
evoked = mne.read_evokeds(fname_evo, 0)
# make inverse operator from forward solution for MEG and EEGMEG
inv_emeg = mne.minimum_norm.make_inverse_operator(
info=evoked.info, forward=forward_emeg, noise_cov=noise_cov, loose=0.,
depth=None)
inv_meg = mne.minimum_norm.make_inverse_operator(
info=evoked.info, forward=forward_meg, noise_cov=noise_cov, loose=0.,
depth=None)
# regularisation parameter
snr = 3.0
lambda2 = 1.0 / snr ** 2
###############################################################################
# EEGMEG
# ------
# Compute resolution matrices, localization error, and spatial deviations
# for MNE:
rm_emeg = make_inverse_resolution_matrix(forward_emeg, inv_emeg,
method='MNE', lambda2=lambda2)
ple_psf_emeg = resolution_metrics(rm_emeg, inv_emeg['src'],
function='psf', metric='peak_err')
sd_psf_emeg = resolution_metrics(rm_emeg, inv_emeg['src'],
function='psf', metric='sd_ext')
del rm_emeg
###############################################################################
# MEG
# ---
# Do the same for MEG:
rm_meg = make_inverse_resolution_matrix(forward_meg, inv_meg,
method='MNE', lambda2=lambda2)
ple_psf_meg = resolution_metrics(rm_meg, inv_meg['src'],
function='psf', metric='peak_err')
sd_psf_meg = resolution_metrics(rm_meg, inv_meg['src'],
function='psf', metric='sd_ext')
del rm_meg
###############################################################################
# Visualization
# -------------
# Look at peak localisation error (PLE) across the whole cortex for PSF:
brain_ple_emeg = ple_psf_emeg.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=1,
clim=dict(kind='value', lims=(0, 2, 4)))
brain_ple_emeg.add_text(0.1, 0.9, 'PLE PSF EMEG', 'title', font_size=16)
###############################################################################
# For MEG only:
brain_ple_meg = ple_psf_meg.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=2,
clim=dict(kind='value', lims=(0, 2, 4)))
brain_ple_meg.add_text(0.1, 0.9, 'PLE PSF MEG', 'title', font_size=16)
###############################################################################
# Subtract the two distributions and plot this difference:
diff_ple = ple_psf_emeg - ple_psf_meg
brain_ple_diff = diff_ple.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=3,
clim=dict(kind='value', pos_lims=(0., .5, 1.)),
smoothing_steps=20)
brain_ple_diff.add_text(0.1, 0.9, 'PLE EMEG-MEG', 'title', font_size=16)
###############################################################################
# These plots show that with respect to peak localization error, adding EEG to
# MEG does not bring much benefit. Next let's visualise spatial deviation (SD)
# across the whole cortex for PSF:
brain_sd_emeg = sd_psf_emeg.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=4,
clim=dict(kind='value', lims=(0, 2, 4)))
brain_sd_emeg.add_text(0.1, 0.9, 'SD PSF EMEG', 'title', font_size=16)
###############################################################################
# For MEG only:
brain_sd_meg = sd_psf_meg.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=5,
clim=dict(kind='value', lims=(0, 2, 4)))
brain_sd_meg.add_text(0.1, 0.9, 'SD PSF MEG', 'title', font_size=16)
###############################################################################
# Subtract the two distributions and plot this difference:
diff_sd = sd_psf_emeg - sd_psf_meg
brain_sd_diff = diff_sd.plot('sample', 'inflated', 'lh',
subjects_dir=subjects_dir, figure=6,
clim=dict(kind='value', pos_lims=(0., .5, 1.)),
smoothing_steps=20)
brain_sd_diff.add_text(0.1, 0.9, 'SD EMEG-MEG', 'title', font_size=16)
###############################################################################
# Adding EEG to MEG decreases the spatial extent of point-spread
# functions (lower spatial deviation, blue colors), thus increasing
# resolution, especially for deeper source locations.
#
# References
# ----------
# .. footbibliography::
|
{
"content_hash": "22c213927ea4fe8acf18497a08d46965",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 79,
"avg_line_length": 40.68789808917197,
"alnum_prop": 0.5464934251721979,
"repo_name": "mne-tools/mne-tools.github.io",
"id": "3a7c90050af7622d2943d466f967205e7aeba936",
"size": "6388",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "0.22/_downloads/aba37b912b38dacc0234d24bd164a568/plot_resolution_metrics_eegmeg.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "708696"
},
{
"name": "Dockerfile",
"bytes": "1820"
},
{
"name": "HTML",
"bytes": "1526247783"
},
{
"name": "JavaScript",
"bytes": "1323087"
},
{
"name": "Jupyter Notebook",
"bytes": "24820047"
},
{
"name": "Python",
"bytes": "18575494"
}
],
"symlink_target": ""
}
|
from typing import TYPE_CHECKING, List
from django.utils.translation import pgettext_lazy
from saleor.extensions import ConfigurationTypeField
from saleor.extensions.base_plugin import BasePlugin
from . import (
GatewayConfig,
authorize,
capture,
get_client_token,
list_client_sources,
process_payment,
refund,
void,
)
GATEWAY_NAME = "Braintree"
if TYPE_CHECKING:
from . import GatewayResponse, PaymentData, TokenConfig
def require_active_plugin(fn):
def wrapped(self, *args, **kwargs):
previous = kwargs.get("previous_value", None)
self._initialize_plugin_configuration()
if not self.active:
return previous
return fn(self, *args, **kwargs)
return wrapped
class BraintreeGatewayPlugin(BasePlugin):
PLUGIN_NAME = GATEWAY_NAME
CONFIG_STRUCTURE = {
"Public API key": {
"type": ConfigurationTypeField.SECRET,
"help_text": pgettext_lazy(
"Plugin help text", "Provide Braintree public API key"
),
"label": pgettext_lazy("Plugin label", "Public API key"),
},
"Secret API key": {
"type": ConfigurationTypeField.SECRET,
"help_text": pgettext_lazy(
"Plugin help text", "Provide Braintree secret API key"
),
"label": pgettext_lazy("Plugin label", "Secret API key"),
},
"Merchant ID": {
"type": ConfigurationTypeField.SECRET,
"help_text": pgettext_lazy(
"Plugin help text", "Provide Braintree merchant ID"
),
"label": pgettext_lazy("Plugin label", "Merchant ID"),
},
"Use sandbox": {
"type": ConfigurationTypeField.BOOLEAN,
"help_text": pgettext_lazy(
"Plugin help text",
"Determines if Saleor should use Braintree sandbox API.",
),
"label": pgettext_lazy("Plugin label", "Use sandbox"),
},
"Store customers card": {
"type": ConfigurationTypeField.BOOLEAN,
"help_text": pgettext_lazy(
"Plugin help text",
"Determines if Saleor should store cards on payments"
" in Braintree customer.",
),
"label": pgettext_lazy("Plugin label", "Store customers card"),
},
"Automatic payment capture": {
"type": ConfigurationTypeField.BOOLEAN,
"help_text": pgettext_lazy(
"Plugin help text",
"Determines if Saleor should automaticaly capture payments.",
),
"label": pgettext_lazy("Plugin label", "Automatic payment capture"),
},
"Require 3D secure": {
"type": ConfigurationTypeField.BOOLEAN,
"help_text": pgettext_lazy(
"Plugin help text",
"Determines if Saleor should enforce 3D secure during payment.",
),
"label": pgettext_lazy("Plugin label", "Require 3D secure"),
},
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config = GatewayConfig(
gateway_name=GATEWAY_NAME, auto_capture=True, connection_params={}
)
def _initialize_plugin_configuration(self):
super()._initialize_plugin_configuration()
if self._cached_config and self._cached_config.configuration:
configuration = self._cached_config.configuration
configuration = {item["name"]: item["value"] for item in configuration}
self.config = GatewayConfig(
gateway_name=GATEWAY_NAME,
auto_capture=configuration["Automatic payment capture"],
connection_params={
"sandbox_mode": configuration["Use sandbox"],
"merchant_id": configuration["Merchant ID"],
"public_key": configuration["Public API key"],
"private_key": configuration["Secret API key"],
},
store_customer=configuration["Store customers card"],
require_3d_secure=configuration["Require 3D secure"],
)
@classmethod
def _get_default_configuration(cls):
defaults = {
"name": cls.PLUGIN_NAME,
"description": "",
"active": False,
"configuration": [
{"name": "Public API key", "value": None},
{"name": "Secret API key", "value": None},
{"name": "Use sandbox", "value": True},
{"name": "Merchant ID", "value": None},
{"name": "Store customers card", "value": False},
{"name": "Automatic payment capture", "value": True},
{"name": "Require 3D secure", "value": False},
],
}
return defaults
def _get_gateway_config(self) -> GatewayConfig:
return self.config
@require_active_plugin
def authorize_payment(
self, payment_information: "PaymentData", previous_value
) -> "GatewayResponse":
return authorize(payment_information, self._get_gateway_config())
@require_active_plugin
def capture_payment(
self, payment_information: "PaymentData", previous_value
) -> "GatewayResponse":
return capture(payment_information, self._get_gateway_config())
@require_active_plugin
def refund_payment(
self, payment_information: "PaymentData", previous_value
) -> "GatewayResponse":
return refund(payment_information, self._get_gateway_config())
@require_active_plugin
def void_payment(
self, payment_information: "PaymentData", previous_value
) -> "GatewayResponse":
return void(payment_information, self._get_gateway_config())
@require_active_plugin
def process_payment(
self, payment_information: "PaymentData", previous_value
) -> "GatewayResponse":
return process_payment(payment_information, self._get_gateway_config())
@require_active_plugin
def list_payment_sources(
self, customer_id: str, previous_value
) -> List["CustomerSource"]:
sources = list_client_sources(self._get_gateway_config(), customer_id)
previous_value.extend(sources)
return previous_value
@require_active_plugin
def get_client_token(self, token_config: "TokenConfig", previous_value):
return get_client_token(self._get_gateway_config(), token_config)
@require_active_plugin
def get_payment_config(self, previous_value):
config = self._get_gateway_config()
return [
{"field": "store_customer_card", "value": config.store_customer},
{"field": "client_token", "value": get_client_token(config=config)},
]
|
{
"content_hash": "f8c3cc39a332ad042b555800bf69d58a",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 83,
"avg_line_length": 36.257894736842104,
"alnum_prop": 0.5819422267382784,
"repo_name": "maferelo/saleor",
"id": "3393b73aec32c390daf3619f16a5db494ef2ccc6",
"size": "6889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saleor/payment/gateways/braintree/plugin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "64217"
},
{
"name": "HTML",
"bytes": "394723"
},
{
"name": "JavaScript",
"bytes": "61157"
},
{
"name": "Python",
"bytes": "585270"
}
],
"symlink_target": ""
}
|
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
from forum_app.models import Profile, Category, Thread, Post, Conversation, Pm
class CategoryForm(forms.ModelForm):
image = forms.FileField(required=False)
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Category
fields = ('name', 'image')
class ThreadForm(forms.ModelForm):
slug = forms.CharField(widget=forms.HiddenInput(), required=False)
class Meta:
model = Thread
fields = ('name',)
def __init__(self, *args, **kwargs):
super(ThreadForm, self).__init__(*args, **kwargs)
self.fields['name'].widget.attrs.update(
{'style':"width:100%"})
class PostForm(forms.ModelForm):
text = forms.CharField(widget=forms.Textarea, required=True)
class Meta:
model = Post
fields = ('text',)
class PmForm(forms.ModelForm):
text = forms.CharField(widget=forms.Textarea, required=True)
class Meta:
model = Pm
fields = ('text',)
class UserForm(UserCreationForm):
email = forms.EmailField(required=True, widget=forms.EmailInput(attrs=
{'placeholder':'enter email'}))
class Meta:
model = User
fields = ('username', 'email', 'password1', 'password2')
# add attributed to the HTML input elements
def __init__(self, *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
self.fields['username'].widget.attrs.update(
{'placeholder':'enter username'})
self.fields['email'].widget.attrs.update(
{'placeholder':'enter email'})
self.fields['password1'].widget.attrs.update(
{'placeholder':'enter password'})
self.fields['password2'].widget.attrs.update(
{'placeholder':'confirm password'})
def save(self, commit=True):
user = super(UserForm, self).save(commit=False)
user.email = self.cleaned_data['email'] # built in scrubber func
if commit:
user.save()
return user
class ProfileForm(forms.ModelForm):
picture = forms.ImageField(required=False)
class Meta:
model = Profile
fields = ('picture',)
class ContactForm(forms.Form):
message = forms.CharField(widget=forms.Textarea, required=True)
email = forms.EmailField(required=False, widget=forms.EmailInput(attrs=
{'placeholder':'reply email (optional)'}))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.fields['message'].widget.attrs.update(
{'placeholder':'message',
'rows':'15',
'cols':'75'})
|
{
"content_hash": "6f800d1a20ef22b91326abd450f5c5cc",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 78,
"avg_line_length": 34.34567901234568,
"alnum_prop": 0.6232925952552121,
"repo_name": "JD-666/wwsc",
"id": "e010555232ff463339e343073d302e5d5f1efd00",
"size": "2782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "forum_app/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3204"
},
{
"name": "HTML",
"bytes": "34636"
},
{
"name": "JavaScript",
"bytes": "1767"
},
{
"name": "Python",
"bytes": "52350"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render
from dynamicForms import views
from formularios.models import Usuario, Club, Country
from formularios.serializer import UsuarioSerializer, CountrySerializer, ClubSerializer
from rest_framework import generics
from rest_framework import permissions
from rest_framework import status
class UserList(generics.ListCreateAPIView):
"""
APIView where the forms of the app are listed and a new form can be added.
"""
model = Usuario
queryset = Usuario.objects.all()
serializer_class = UsuarioSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class UserDetail(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to see details, modify or delete a form.
"""
queryset = Usuario.objects.all()
serializer_class = UsuarioSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class ClubList(generics.ListCreateAPIView):
"""
APIView where the forms of the app are listed and a new form can be added.
"""
model = Club
queryset = Club.objects.all()
serializer_class = ClubSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class ClubDetail(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to see details, modify or delete a form.
"""
queryset = Club.objects.all()
serializer_class = ClubSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class CountryList(generics.ListCreateAPIView):
"""
APIView where the forms of the app are listed and a new form can be added.
"""
model = Country
queryset = Country.objects.all()
serializer_class = CountrySerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
class CountryDetail(generics.RetrieveUpdateDestroyAPIView):
"""
APIView to see details, modify or delete a form.
"""
queryset = Country.objects.all()
serializer_class = CountrySerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
# Create your views here.
|
{
"content_hash": "65dbfa85031213d6a56ca28b41ae0a0c",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 87,
"avg_line_length": 31.96923076923077,
"alnum_prop": 0.7396535129932628,
"repo_name": "trea-uy/django-survey-sample",
"id": "70e059bb8fa2bab00db92c6997f05c08cd583e3a",
"size": "2078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sampleSurveys/formularios/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "917"
},
{
"name": "Python",
"bytes": "52359"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'User'
db.create_table(u'custom_auth_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=200)),
('first_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('last_name', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_joined', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal('custom_auth', ['User'])
# Adding M2M table for field groups on 'User'
m2m_table_name = db.shorten_name(u'custom_auth_user_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm['custom_auth.user'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'group_id'])
# Adding M2M table for field user_permissions on 'User'
m2m_table_name = db.shorten_name(u'custom_auth_user_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm['custom_auth.user'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'permission_id'])
def backwards(self, orm):
# Deleting model 'User'
db.delete_table(u'custom_auth_user')
# Removing M2M table for field groups on 'User'
db.delete_table(db.shorten_name(u'custom_auth_user_groups'))
# Removing M2M table for field user_permissions on 'User'
db.delete_table(db.shorten_name(u'custom_auth_user_user_permissions'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'custom_auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['custom_auth']
|
{
"content_hash": "dd341404f91ce66c77cc9c8a94233514",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 187,
"avg_line_length": 62.56842105263158,
"alnum_prop": 0.5883243606998654,
"repo_name": "stfc/cvmfs-stratum-uploader",
"id": "3900659d1dc9d7844b2f1315468da62ee9d7cec5",
"size": "5968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uploader/custom_auth/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11776"
},
{
"name": "CoffeeScript",
"bytes": "4956"
},
{
"name": "JavaScript",
"bytes": "258688"
},
{
"name": "Python",
"bytes": "96445"
},
{
"name": "Ruby",
"bytes": "2735"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/faction_perk/faction_base_item/shared_alarm_hack_no_sound.iff"
result.attribute_template_id = -1
result.stfName("faction_perk","alarm_hack_n")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "3085dcec566e8bf3e5b17e3ad4ba872e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 98,
"avg_line_length": 25.692307692307693,
"alnum_prop": 0.7035928143712575,
"repo_name": "obi-two/Rebelion",
"id": "11378d990c4d9b8cbd6624245eae4848c85ceaa1",
"size": "479",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/tangible/faction_perk/faction_base_item/shared_alarm_hack_no_sound.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
"""Tools for spectral analysis.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from scipy import fftpack
from . import signaltools
from .windows import get_window
from ._spectral import lombscargle
import warnings
from scipy._lib.six import string_types
__all__ = ['periodogram', 'welch', 'lombscargle', 'csd', 'coherence',
'spectrogram']
def periodogram(x, fs=1.0, window=None, nfft=None, detrend='constant',
return_onesided=True, scaling='density', axis=-1):
"""
Estimate power spectral density using a periodogram.
Parameters
----------
x : array_like
Time series of measurement values
fs : float, optional
Sampling frequency of the `x` time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is an array it will be used
directly as the window. Defaults to None; equivalent to 'boxcar'.
nfft : int, optional
Length of the FFT used. If None the length of `x` will be used.
detrend : str or function or False, optional
Specifies how to detrend `x` prior to computing the spectrum. If
`detrend` is a string, it is passed as the ``type`` argument to
`detrend`. If it is a function, it should return a detrended array.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
return_onesided : bool, optional
If True, return a one-sided spectrum for real data. If False return
a two-sided spectrum. Note that for complex data, a two-sided
spectrum is always returned.
scaling : { 'density', 'spectrum' }, optional
Selects between computing the power spectral density ('density')
where `Pxx` has units of V**2/Hz and computing the power spectrum
('spectrum') where `Pxx` has units of V**2, if `x` is measured in V
and fs is measured in Hz. Defaults to 'density'
axis : int, optional
Axis along which the periodogram is computed; the default is over
the last axis (i.e. ``axis=-1``).
Returns
-------
f : ndarray
Array of sample frequencies.
Pxx : ndarray
Power spectral density or power spectrum of `x`.
Notes
-----
.. versionadded:: 0.12.0
See Also
--------
welch: Estimate power spectral density using Welch's method
lombscargle: Lomb-Scargle periodogram for unevenly sampled data
Examples
--------
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
>>> np.random.seed(1234)
Generate a test signal, a 2 Vrms sine wave at 1234 Hz, corrupted by
0.001 V**2/Hz of white noise sampled at 10 kHz.
>>> fs = 10e3
>>> N = 1e5
>>> amp = 2*np.sqrt(2)
>>> freq = 1234.0
>>> noise_power = 0.001 * fs / 2
>>> time = np.arange(N) / fs
>>> x = amp*np.sin(2*np.pi*freq*time)
>>> x += np.random.normal(scale=np.sqrt(noise_power), size=time.shape)
Compute and plot the power spectral density.
>>> f, Pxx_den = signal.periodogram(x, fs)
>>> plt.semilogy(f, Pxx_den)
>>> plt.ylim([1e-7, 1e2])
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('PSD [V**2/Hz]')
>>> plt.show()
If we average the last half of the spectral density, to exclude the
peak, we can recover the noise power on the signal.
>>> np.mean(Pxx_den[256:])
0.0018156616014838548
Now compute and plot the power spectrum.
>>> f, Pxx_spec = signal.periodogram(x, fs, 'flattop', scaling='spectrum')
>>> plt.figure()
>>> plt.semilogy(f, np.sqrt(Pxx_spec))
>>> plt.ylim([1e-4, 1e1])
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('Linear spectrum [V RMS]')
>>> plt.show()
The peak height in the power spectrum is an estimate of the RMS amplitude.
>>> np.sqrt(Pxx_spec.max())
2.0077340678640727
"""
x = np.asarray(x)
if x.size == 0:
return np.empty(x.shape), np.empty(x.shape)
if window is None:
window = 'boxcar'
if nfft is None:
nperseg = x.shape[axis]
elif nfft == x.shape[axis]:
nperseg = nfft
elif nfft > x.shape[axis]:
nperseg = x.shape[axis]
elif nfft < x.shape[axis]:
s = [np.s_[:]]*len(x.shape)
s[axis] = np.s_[:nfft]
x = x[s]
nperseg = nfft
nfft = None
return welch(x, fs, window, nperseg, 0, nfft, detrend, return_onesided,
scaling, axis)
def welch(x, fs=1.0, window='hann', nperseg=256, noverlap=None, nfft=None,
detrend='constant', return_onesided=True, scaling='density', axis=-1):
"""
Estimate power spectral density using Welch's method.
Welch's method [1]_ computes an estimate of the power spectral density
by dividing the data into overlapping segments, computing a modified
periodogram for each segment and averaging the periodograms.
Parameters
----------
x : array_like
Time series of measurement values
fs : float, optional
Sampling frequency of the `x` time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is array_like it will be used
directly as the window and its length will be used for nperseg.
Defaults to 'hann'.
nperseg : int, optional
Length of each segment. Defaults to 256.
noverlap : int, optional
Number of points to overlap between segments. If None,
``noverlap = nperseg // 2``. Defaults to None.
nfft : int, optional
Length of the FFT used, if a zero padded FFT is desired. If None,
the FFT length is `nperseg`. Defaults to None.
detrend : str or function or False, optional
Specifies how to detrend each segment. If `detrend` is a string,
it is passed as the ``type`` argument to `detrend`. If it is a
function, it takes a segment and returns a detrended segment.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
return_onesided : bool, optional
If True, return a one-sided spectrum for real data. If False return
a two-sided spectrum. Note that for complex data, a two-sided
spectrum is always returned.
scaling : { 'density', 'spectrum' }, optional
Selects between computing the power spectral density ('density')
where `Pxx` has units of V**2/Hz and computing the power spectrum
('spectrum') where `Pxx` has units of V**2, if `x` is measured in V
and fs is measured in Hz. Defaults to 'density'
axis : int, optional
Axis along which the periodogram is computed; the default is over
the last axis (i.e. ``axis=-1``).
Returns
-------
f : ndarray
Array of sample frequencies.
Pxx : ndarray
Power spectral density or power spectrum of x.
See Also
--------
periodogram: Simple, optionally modified periodogram
lombscargle: Lomb-Scargle periodogram for unevenly sampled data
Notes
-----
An appropriate amount of overlap will depend on the choice of window
and on your requirements. For the default 'hann' window an
overlap of 50% is a reasonable trade off between accurately estimating
the signal power, while not over counting any of the data. Narrower
windows may require a larger overlap.
If `noverlap` is 0, this method is equivalent to Bartlett's method [2]_.
.. versionadded:: 0.12.0
References
----------
.. [1] P. Welch, "The use of the fast Fourier transform for the
estimation of power spectra: A method based on time averaging
over short, modified periodograms", IEEE Trans. Audio
Electroacoust. vol. 15, pp. 70-73, 1967.
.. [2] M.S. Bartlett, "Periodogram Analysis and Continuous Spectra",
Biometrika, vol. 37, pp. 1-16, 1950.
Examples
--------
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
>>> np.random.seed(1234)
Generate a test signal, a 2 Vrms sine wave at 1234 Hz, corrupted by
0.001 V**2/Hz of white noise sampled at 10 kHz.
>>> fs = 10e3
>>> N = 1e5
>>> amp = 2*np.sqrt(2)
>>> freq = 1234.0
>>> noise_power = 0.001 * fs / 2
>>> time = np.arange(N) / fs
>>> x = amp*np.sin(2*np.pi*freq*time)
>>> x += np.random.normal(scale=np.sqrt(noise_power), size=time.shape)
Compute and plot the power spectral density.
>>> f, Pxx_den = signal.welch(x, fs, nperseg=1024)
>>> plt.semilogy(f, Pxx_den)
>>> plt.ylim([0.5e-3, 1])
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('PSD [V**2/Hz]')
>>> plt.show()
If we average the last half of the spectral density, to exclude the
peak, we can recover the noise power on the signal.
>>> np.mean(Pxx_den[256:])
0.0009924865443739191
Now compute and plot the power spectrum.
>>> f, Pxx_spec = signal.welch(x, fs, 'flattop', 1024, scaling='spectrum')
>>> plt.figure()
>>> plt.semilogy(f, np.sqrt(Pxx_spec))
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('Linear spectrum [V RMS]')
>>> plt.show()
The peak height in the power spectrum is an estimate of the RMS amplitude.
>>> np.sqrt(Pxx_spec.max())
2.0077340678640727
"""
freqs, Pxx = csd(x, x, fs, window, nperseg, noverlap, nfft, detrend,
return_onesided, scaling, axis)
return freqs, Pxx.real
def csd(x, y, fs=1.0, window='hann', nperseg=256, noverlap=None, nfft=None,
detrend='constant', return_onesided=True, scaling='density', axis=-1):
"""
Estimate the cross power spectral density, Pxy, using Welch's method.
Parameters
----------
x : array_like
Time series of measurement values
y : array_like
Time series of measurement values
fs : float, optional
Sampling frequency of the `x` and `y` time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is array_like it will be used
directly as the window and its length will be used for nperseg.
Defaults to 'hann'.
nperseg : int, optional
Length of each segment. Defaults to 256.
noverlap: int, optional
Number of points to overlap between segments. If None,
``noverlap = nperseg // 2``. Defaults to None.
nfft : int, optional
Length of the FFT used, if a zero padded FFT is desired. If None,
the FFT length is `nperseg`. Defaults to None.
detrend : str or function or False, optional
Specifies how to detrend each segment. If `detrend` is a string,
it is passed as the ``type`` argument to `detrend`. If it is a
function, it takes a segment and returns a detrended segment.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
return_onesided : bool, optional
If True, return a one-sided spectrum for real data. If False return
a two-sided spectrum. Note that for complex data, a two-sided
spectrum is always returned.
scaling : { 'density', 'spectrum' }, optional
Selects between computing the cross spectral density ('density')
where `Pxy` has units of V**2/Hz and computing the cross spectrum
('spectrum') where `Pxy` has units of V**2, if `x` and `y` are
measured in V and fs is measured in Hz. Defaults to 'density'
axis : int, optional
Axis along which the CSD is computed for both inputs; the default is
over the last axis (i.e. ``axis=-1``).
Returns
-------
f : ndarray
Array of sample frequencies.
Pxy : ndarray
Cross spectral density or cross power spectrum of x,y.
See Also
--------
periodogram: Simple, optionally modified periodogram
lombscargle: Lomb-Scargle periodogram for unevenly sampled data
welch: Power spectral density by Welch's method. [Equivalent to csd(x,x)]
coherence: Magnitude squared coherence by Welch's method.
Notes
--------
By convention, Pxy is computed with the conjugate FFT of X multiplied by
the FFT of Y.
If the input series differ in length, the shorter series will be
zero-padded to match.
An appropriate amount of overlap will depend on the choice of window
and on your requirements. For the default 'hann' window an
overlap of 50\\% is a reasonable trade off between accurately estimating
the signal power, while not over counting any of the data. Narrower
windows may require a larger overlap.
.. versionadded:: 0.16.0
References
----------
.. [1] P. Welch, "The use of the fast Fourier transform for the
estimation of power spectra: A method based on time averaging
over short, modified periodograms", IEEE Trans. Audio
Electroacoust. vol. 15, pp. 70-73, 1967.
.. [2] Rabiner, Lawrence R., and B. Gold. "Theory and Application of
Digital Signal Processing" Prentice-Hall, pp. 414-419, 1975
Examples
--------
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
Generate two test signals with some common features.
>>> fs = 10e3
>>> N = 1e5
>>> amp = 20
>>> freq = 1234.0
>>> noise_power = 0.001 * fs / 2
>>> time = np.arange(N) / fs
>>> b, a = signal.butter(2, 0.25, 'low')
>>> x = np.random.normal(scale=np.sqrt(noise_power), size=time.shape)
>>> y = signal.lfilter(b, a, x)
>>> x += amp*np.sin(2*np.pi*freq*time)
>>> y += np.random.normal(scale=0.1*np.sqrt(noise_power), size=time.shape)
Compute and plot the magnitude of the cross spectral density.
>>> f, Pxy = signal.csd(x, y, fs, nperseg=1024)
>>> plt.semilogy(f, np.abs(Pxy))
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('CSD [V**2/Hz]')
>>> plt.show()
"""
freqs, _, Pxy = _spectral_helper(x, y, fs, window, nperseg, noverlap, nfft,
detrend, return_onesided, scaling, axis,
mode='psd')
# Average over windows.
if len(Pxy.shape) >= 2 and Pxy.size > 0:
if Pxy.shape[-1] > 1:
Pxy = Pxy.mean(axis=-1)
else:
Pxy = np.reshape(Pxy, Pxy.shape[:-1])
return freqs, Pxy
def spectrogram(x, fs=1.0, window=('tukey',.25), nperseg=256, noverlap=None,
nfft=None, detrend='constant', return_onesided=True,
scaling='density', axis=-1, mode='psd'):
"""
Compute a spectrogram with consecutive Fourier transforms.
Spectrograms can be used as a way of visualizing the change of a
nonstationary signal's frequency content over time.
Parameters
----------
x : array_like
Time series of measurement values
fs : float, optional
Sampling frequency of the `x` time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is array_like it will be used
directly as the window and its length will be used for nperseg.
Defaults to a Tukey window with shape parameter of 0.25.
nperseg : int, optional
Length of each segment. Defaults to 256.
noverlap : int, optional
Number of points to overlap between segments. If None,
``noverlap = nperseg // 8``. Defaults to None.
nfft : int, optional
Length of the FFT used, if a zero padded FFT is desired. If None,
the FFT length is `nperseg`. Defaults to None.
detrend : str or function or False, optional
Specifies how to detrend each segment. If `detrend` is a string,
it is passed as the ``type`` argument to `detrend`. If it is a
function, it takes a segment and returns a detrended segment.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
return_onesided : bool, optional
If True, return a one-sided spectrum for real data. If False return
a two-sided spectrum. Note that for complex data, a two-sided
spectrum is always returned.
scaling : { 'density', 'spectrum' }, optional
Selects between computing the power spectral density ('density')
where `Pxx` has units of V**2/Hz and computing the power spectrum
('spectrum') where `Pxx` has units of V**2, if `x` is measured in V
and fs is measured in Hz. Defaults to 'density'
axis : int, optional
Axis along which the spectrogram is computed; the default is over
the last axis (i.e. ``axis=-1``).
mode : str, optional
Defines what kind of return values are expected. Options are ['psd',
'complex', 'magnitude', 'angle', 'phase'].
Returns
-------
f : ndarray
Array of sample frequencies.
t : ndarray
Array of segment times.
Sxx : ndarray
Spectrogram of x. By default, the last axis of Sxx corresponds to the
segment times.
See Also
--------
periodogram: Simple, optionally modified periodogram
lombscargle: Lomb-Scargle periodogram for unevenly sampled data
welch: Power spectral density by Welch's method.
csd: Cross spectral density by Welch's method.
Notes
-----
An appropriate amount of overlap will depend on the choice of window
and on your requirements. In contrast to welch's method, where the entire
data stream is averaged over, one may wish to use a smaller overlap (or
perhaps none at all) when computing a spectrogram, to maintain some
statistical independence between individual segments.
.. versionadded:: 0.16.0
References
----------
.. [1] Oppenheim, Alan V., Ronald W. Schafer, John R. Buck "Discrete-Time
Signal Processing", Prentice Hall, 1999.
Examples
--------
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
Generate a test signal, a 2 Vrms sine wave whose frequency linearly changes
with time from 1kHz to 2kHz, corrupted by 0.001 V**2/Hz of white noise
sampled at 10 kHz.
>>> fs = 10e3
>>> N = 1e5
>>> amp = 2 * np.sqrt(2)
>>> noise_power = 0.001 * fs / 2
>>> time = np.arange(N) / fs
>>> freq = np.linspace(1e3, 2e3, N)
>>> x = amp * np.sin(2*np.pi*freq*time)
>>> x += np.random.normal(scale=np.sqrt(noise_power), size=time.shape)
Compute and plot the spectrogram.
>>> f, t, Sxx = signal.spectrogram(x, fs)
>>> plt.pcolormesh(t, f, Sxx)
>>> plt.ylabel('Frequency [Hz]')
>>> plt.xlabel('Time [sec]')
>>> plt.show()
"""
# Less overlap than welch, so samples are more statisically independent
if noverlap is None:
noverlap = nperseg // 8
freqs, time, Pxy = _spectral_helper(x, x, fs, window, nperseg, noverlap,
nfft, detrend, return_onesided, scaling,
axis, mode=mode)
return freqs, time, Pxy
def coherence(x, y, fs=1.0, window='hann', nperseg=256, noverlap=None,
nfft=None, detrend='constant', axis=-1):
"""
Estimate the magnitude squared coherence estimate, Cxy, of discrete-time
signals X and Y using Welch's method.
Cxy = abs(Pxy)**2/(Pxx*Pyy), where Pxx and Pyy are power spectral density
estimates of X and Y, and Pxy is the cross spectral density estimate of X
and Y.
Parameters
----------
x : array_like
Time series of measurement values
y : array_like
Time series of measurement values
fs : float, optional
Sampling frequency of the `x` and `y` time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is array_like it will be used
directly as the window and its length will be used for nperseg.
Defaults to 'hann'.
nperseg : int, optional
Length of each segment. Defaults to 256.
noverlap: int, optional
Number of points to overlap between segments. If None,
``noverlap = nperseg // 2``. Defaults to None.
nfft : int, optional
Length of the FFT used, if a zero padded FFT is desired. If None,
the FFT length is `nperseg`. Defaults to None.
detrend : str or function or False, optional
Specifies how to detrend each segment. If `detrend` is a string,
it is passed as the ``type`` argument to `detrend`. If it is a
function, it takes a segment and returns a detrended segment.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
axis : int, optional
Axis along which the coherence is computed for both inputs; the default is
over the last axis (i.e. ``axis=-1``).
Returns
-------
f : ndarray
Array of sample frequencies.
Cxy : ndarray
Magnitude squared coherence of x and y.
See Also
--------
periodogram: Simple, optionally modified periodogram
lombscargle: Lomb-Scargle periodogram for unevenly sampled data
welch: Power spectral density by Welch's method.
csd: Cross spectral density by Welch's method.
Notes
--------
An appropriate amount of overlap will depend on the choice of window
and on your requirements. For the default 'hann' window an
overlap of 50\\% is a reasonable trade off between accurately estimating
the signal power, while not over counting any of the data. Narrower
windows may require a larger overlap.
.. versionadded:: 0.16.0
References
----------
.. [1] P. Welch, "The use of the fast Fourier transform for the
estimation of power spectra: A method based on time averaging
over short, modified periodograms", IEEE Trans. Audio
Electroacoust. vol. 15, pp. 70-73, 1967.
.. [2] Stoica, Petre, and Randolph Moses, "Spectral Analysis of Signals"
Prentice Hall, 2005
Examples
--------
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
Generate two test signals with some common features.
>>> fs = 10e3
>>> N = 1e5
>>> amp = 20
>>> freq = 1234.0
>>> noise_power = 0.001 * fs / 2
>>> time = np.arange(N) / fs
>>> b, a = signal.butter(2, 0.25, 'low')
>>> x = np.random.normal(scale=np.sqrt(noise_power), size=time.shape)
>>> y = signal.lfilter(b, a, x)
>>> x += amp*np.sin(2*np.pi*freq*time)
>>> y += np.random.normal(scale=0.1*np.sqrt(noise_power), size=time.shape)
Compute and plot the coherence.
>>> f, Cxy = signal.coherence(x, y, fs, nperseg=1024)
>>> plt.semilogy(f, Cxy)
>>> plt.xlabel('frequency [Hz]')
>>> plt.ylabel('Coherence')
>>> plt.show()
"""
freqs, Pxx = welch(x, fs, window, nperseg, noverlap, nfft, detrend,
axis=axis)
_, Pyy = welch(y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis)
_, Pxy = csd(x, y, fs, window, nperseg, noverlap, nfft, detrend, axis=axis)
Cxy = np.abs(Pxy)**2 / Pxx / Pyy
return freqs, Cxy
def _spectral_helper(x, y, fs=1.0, window='hann', nperseg=256,
noverlap=None, nfft=None, detrend='constant',
return_onesided=True, scaling='spectrum', axis=-1,
mode='psd'):
"""
Calculate various forms of windowed FFTs for PSD, CSD, etc.
This is a helper function that implements the commonality between the
psd, csd, and spectrogram functions. It is not designed to be called
externally. The windows are not averaged over; the result from each window
is returned.
Parameters
---------
x : array_like
Array or sequence containing the data to be analyzed.
y : array_like
Array or sequence containing the data to be analyzed. If this is
the same object in memoery as x (i.e. _spectral_helper(x, x, ...)),
the extra computations are spared.
fs : float, optional
Sampling frequency of the time series. Defaults to 1.0.
window : str or tuple or array_like, optional
Desired window to use. See `get_window` for a list of windows and
required parameters. If `window` is array_like it will be used
directly as the window and its length will be used for nperseg.
Defaults to 'hann'.
nperseg : int, optional
Length of each segment. Defaults to 256.
noverlap : int, optional
Number of points to overlap between segments. If None,
``noverlap = nperseg // 2``. Defaults to None.
nfft : int, optional
Length of the FFT used, if a zero padded FFT is desired. If None,
the FFT length is `nperseg`. Defaults to None.
detrend : str or function or False, optional
Specifies how to detrend each segment. If `detrend` is a string,
it is passed as the ``type`` argument to `detrend`. If it is a
function, it takes a segment and returns a detrended segment.
If `detrend` is False, no detrending is done. Defaults to 'constant'.
return_onesided : bool, optional
If True, return a one-sided spectrum for real data. If False return
a two-sided spectrum. Note that for complex data, a two-sided
spectrum is always returned.
scaling : { 'density', 'spectrum' }, optional
Selects between computing the cross spectral density ('density')
where `Pxy` has units of V**2/Hz and computing the cross spectrum
('spectrum') where `Pxy` has units of V**2, if `x` and `y` are
measured in V and fs is measured in Hz. Defaults to 'density'
axis : int, optional
Axis along which the periodogram is computed; the default is over
the last axis (i.e. ``axis=-1``).
mode : str, optional
Defines what kind of return values are expected. Options are ['psd',
'complex', 'magnitude', 'angle', 'phase'].
Returns
-------
freqs : ndarray
Array of sample frequencies.
t : ndarray
Array of times corresponding to each data segment
result : ndarray
Array of output data, contents dependant on *mode* kwarg.
References
----------
.. [1] Stack Overflow, "Rolling window for 1D arrays in Numpy?",
http://stackoverflow.com/a/6811241
.. [2] Stack Overflow, "Using strides for an efficient moving average
filter", http://stackoverflow.com/a/4947453
Notes
-----
Adapted from matplotlib.mlab
.. versionadded:: 0.16.0
"""
if mode not in ['psd', 'complex', 'magnitude', 'angle', 'phase']:
raise ValueError("Unknown value for mode %s, must be one of: "
"'default', 'psd', 'complex', "
"'magnitude', 'angle', 'phase'" % mode)
# If x and y are the same object we can save ourselves some computation.
same_data = y is x
if not same_data and mode != 'psd':
raise ValueError("x and y must be equal if mode is not 'psd'")
axis = int(axis)
# Ensure we have np.arrays, get outdtype
x = np.asarray(x)
if not same_data:
y = np.asarray(y)
outdtype = np.result_type(x,y,np.complex64)
else:
outdtype = np.result_type(x,np.complex64)
if not same_data:
# Check if we can broadcast the outer axes together
xouter = list(x.shape)
youter = list(y.shape)
xouter.pop(axis)
youter.pop(axis)
try:
outershape = np.broadcast(np.empty(xouter), np.empty(youter)).shape
except ValueError:
raise ValueError('x and y cannot be broadcast together.')
if same_data:
if x.size == 0:
return np.empty(x.shape), np.empty(x.shape), np.empty(x.shape)
else:
if x.size == 0 or y.size == 0:
outshape = outershape + (min([x.shape[axis], y.shape[axis]]),)
emptyout = np.rollaxis(np.empty(outshape), -1, axis)
return emptyout, emptyout, emptyout
if x.ndim > 1:
if axis != -1:
x = np.rollaxis(x, axis, len(x.shape))
if not same_data and y.ndim > 1:
y = np.rollaxis(y, axis, len(y.shape))
# Check if x and y are the same length, zero-pad if neccesary
if not same_data:
if x.shape[-1] != y.shape[-1]:
if x.shape[-1] < y.shape[-1]:
pad_shape = list(x.shape)
pad_shape[-1] = y.shape[-1] - x.shape[-1]
x = np.concatenate((x, np.zeros(pad_shape)), -1)
else:
pad_shape = list(y.shape)
pad_shape[-1] = x.shape[-1] - y.shape[-1]
y = np.concatenate((y, np.zeros(pad_shape)), -1)
# X and Y are same length now, can test nperseg with either
if x.shape[-1] < nperseg:
warnings.warn('nperseg = {0:d}, is greater than input length = {1:d}, '
'using nperseg = {1:d}'.format(nperseg, x.shape[-1]))
nperseg = x.shape[-1]
nperseg = int(nperseg)
if nperseg < 1:
raise ValueError('nperseg must be a positive integer')
if nfft is None:
nfft = nperseg
elif nfft < nperseg:
raise ValueError('nfft must be greater than or equal to nperseg.')
else:
nfft = int(nfft)
if noverlap is None:
noverlap = nperseg//2
elif noverlap >= nperseg:
raise ValueError('noverlap must be less than nperseg.')
else:
noverlap = int(noverlap)
# Handle detrending and window functions
if not detrend:
def detrend_func(d):
return d
elif not hasattr(detrend, '__call__'):
def detrend_func(d):
return signaltools.detrend(d, type=detrend, axis=-1)
elif axis != -1:
# Wrap this function so that it receives a shape that it could
# reasonably expect to receive.
def detrend_func(d):
d = np.rollaxis(d, -1, axis)
d = detrend(d)
return np.rollaxis(d, axis, len(d.shape))
else:
detrend_func = detrend
if isinstance(window, string_types) or type(window) is tuple:
win = get_window(window, nperseg)
else:
win = np.asarray(window)
if len(win.shape) != 1:
raise ValueError('window must be 1-D')
if win.shape[0] != nperseg:
raise ValueError('window must have length of nperseg')
if np.result_type(win,np.complex64) != outdtype:
win = win.astype(outdtype)
if mode == 'psd':
if scaling == 'density':
scale = 1.0 / (fs * (win*win).sum())
elif scaling == 'spectrum':
scale = 1.0 / win.sum()**2
else:
raise ValueError('Unknown scaling: %r' % scaling)
else:
scale = 1
if return_onesided is True:
if np.iscomplexobj(x):
sides = 'twosided'
else:
sides = 'onesided'
if not same_data:
if np.iscomplexobj(y):
sides = 'twosided'
else:
sides = 'twosided'
if sides == 'twosided':
num_freqs = nfft
elif sides == 'onesided':
if nfft % 2:
num_freqs = (nfft + 1)//2
else:
num_freqs = nfft//2 + 1
# Perform the windowed FFTs
result = _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft)
result = result[..., :num_freqs]
freqs = fftpack.fftfreq(nfft, 1/fs)[:num_freqs]
if not same_data:
# All the same operations on the y data
result_y = _fft_helper(y, win, detrend_func, nperseg, noverlap, nfft)
result_y = result_y[..., :num_freqs]
result = np.conjugate(result) * result_y
elif mode == 'psd':
result = np.conjugate(result) * result
elif mode == 'magnitude':
result = np.absolute(result)
elif mode == 'angle' or mode == 'phase':
result = np.angle(result)
elif mode == 'complex':
pass
result *= scale
if sides == 'onesided':
if nfft % 2:
result[...,1:] *= 2
else:
# Last point is unpaired Nyquist freq point, don't double
result[...,1:-1] *= 2
t = np.arange(nperseg/2, x.shape[-1] - nperseg/2 + 1, nperseg - noverlap)/float(fs)
if sides != 'twosided' and not nfft % 2:
# get the last value correctly, it is negative otherwise
freqs[-1] *= -1
# we unwrap the phase here to handle the onesided vs. twosided case
if mode == 'phase':
result = np.unwrap(result, axis=-1)
result = result.astype(outdtype)
# All imaginary parts are zero anyways
if same_data and mode != 'complex':
result = result.real
# Output is going to have new last axis for window index
if axis != -1:
# Specify as positive axis index
if axis < 0:
axis = len(result.shape)-1-axis
# Roll frequency axis back to axis where the data came from
result = np.rollaxis(result, -1, axis)
else:
# Make sure window/time index is last axis
result = np.rollaxis(result, -1, -2)
return freqs, t, result
def _fft_helper(x, win, detrend_func, nperseg, noverlap, nfft):
"""
Calculate windowed FFT, for internal use by scipy.signal._spectral_helper
This is a helper function that does the main FFT calculation for
_spectral helper. All input valdiation is performed there, and the data
axis is assumed to be the last axis of x. It is not designed to be called
externally. The windows are not averaged over; the result from each window
is returned.
Returns
-------
result : ndarray
Array of FFT data
References
----------
.. [1] Stack Overflow, "Repeat NumPy array without replicating data?",
http://stackoverflow.com/a/5568169
Notes
-----
Adapted from matplotlib.mlab
.. versionadded:: 0.16.0
"""
# Created strided array of data segments
if nperseg == 1 and noverlap == 0:
result = x[..., np.newaxis]
else:
step = nperseg - noverlap
shape = x.shape[:-1]+((x.shape[-1]-noverlap)//step, nperseg)
strides = x.strides[:-1]+(step*x.strides[-1], x.strides[-1])
result = np.lib.stride_tricks.as_strided(x, shape=shape,
strides=strides)
# Detrend each data segment individually
result = detrend_func(result)
# Apply window by multiplication
result = win * result
# Perform the fft. Acts on last axis by default. Zero-pads automatically
result = fftpack.fft(result, n=nfft)
return result
|
{
"content_hash": "85825dc75efd43888e081f7be040cf98",
"timestamp": "",
"source": "github",
"line_count": 954,
"max_line_length": 87,
"avg_line_length": 36.66771488469602,
"alnum_prop": 0.6136759955404363,
"repo_name": "kalvdans/scipy",
"id": "6a901932283349c90c3b666bd600da946e806234",
"size": "34981",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scipy/signal/spectral.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4127403"
},
{
"name": "C++",
"bytes": "503114"
},
{
"name": "Fortran",
"bytes": "5574493"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "11190581"
},
{
"name": "Shell",
"bytes": "2226"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
}
|
import threading
class StringFolderDevice(object):
"""Device plugin that allows getting and setting a file-backed string"""
def __init__(self, default_value=''):
self.lock = threading.Lock()
self.value1 = default_value
self.value2 = default_value
self.value3 = default_value
self.set_value1(default_value)
self.set_value2(default_value)
self.set_value3(default_value)
self.config = {
"strings": {
"string_one": {
'#getter': self.get_value1,
'#setter': self.set_value1,
'#units': 'string'
},
"string_two": {
'#getter': self.get_value2,
'#setter': self.set_value2,
'#units': 'string'
},
"string_three": {
'#getter': self.get_value3,
'#setter': self.set_value3,
'#units': 'string'
},
}
}
def get_value1(self):
with self.lock:
return self.value1
def set_value1(self, value):
with self.lock:
self.value1 = value
def get_value2(self):
with self.lock:
return self.value2
def set_value2(self, value):
with self.lock:
self.value2 = value
def get_value3(self):
with self.lock:
return self.value3
def set_value3(self, value):
with self.lock:
self.value3 = value
|
{
"content_hash": "7163645518c0e1abac88b20a026b1f5b",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 76,
"avg_line_length": 27.789473684210527,
"alnum_prop": 0.476010101010101,
"repo_name": "intel-ctrlsys/actsys",
"id": "f169f697d8b5d35478966f96f626f36defffce46",
"size": "1584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oobrestserver/oob_rest_default_providers/StringFolderDevice.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "11641"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "1048209"
}
],
"symlink_target": ""
}
|
"""
@brief test log(time=1s)
"""
import unittest
from code_beatrix.faq.faq_python import instruction_pass
class TestPass(unittest.TestCase):
def test_pass(self):
instruction_pass()
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "4da9d156e818f9da51638406dc76f600",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 56,
"avg_line_length": 16.666666666666668,
"alnum_prop": 0.644,
"repo_name": "sdpython/code_beatrix",
"id": "82eb3ebaebd2ad3cf2b91fa5866af1cadd645b23",
"size": "250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_unittests/ut_faq/test_pass.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "382"
},
{
"name": "CSS",
"bytes": "13138"
},
{
"name": "CoffeeScript",
"bytes": "2022"
},
{
"name": "HTML",
"bytes": "12537"
},
{
"name": "JavaScript",
"bytes": "3718909"
},
{
"name": "Jupyter Notebook",
"bytes": "11928350"
},
{
"name": "Python",
"bytes": "163509"
},
{
"name": "Shell",
"bytes": "715"
},
{
"name": "Stylus",
"bytes": "16039"
}
],
"symlink_target": ""
}
|
typo_dict = {
'steele stake': 'steel stake',
'gas mowe': 'gas mower',
'metal plate cover gcfi': 'metal plate cover gfci',
'lawn sprkinler': 'lawn sprinkler',
'ourdoor patio tile': 'outdoor patio tile',
'6 teir shelving': '6 tier shelving',
'storage shelve': 'storage shelf',
'American Standard Bone round toliet': 'American Standard Bone round toilet',
'6 stell': '6 steel',
'fece posts metal': 'fence posts metal',
'cushions outdoorlounge': 'cushions outdoor lounge',
'pricepfister kitchen faucet g135': 'price pfister kitchen faucet g135',
'glaciar bay toiled': 'glacier bay toilet',
'glacie bay dual flush': 'glacier bay dual flush',
'glacier bay tiolet tank lid': 'glacier bay toilet tank lid',
'handycap toilets': 'handicap toilets',
'high boy tolet': 'highboy toilet',
'gas wayer heaters': 'gas water heaters',
'basemetnt window': 'basement window',
'rustollum epoxy': 'rustoleum epoxy',
'air /heaterconditioner window': 'air /heat conditioner window',
'spliter ac unit': 'splitter ac unit',
'berh deck over': 'behr deck over',
'28 snow thower': '28 snow thrower',
'base board molding boundle': 'baseboard molding bundle',
'1 infloor flange': '1 in floor flange',
'10 window sping rod': '10 window spring rod',
'combo powertool kit': 'combo power tool kit',
'desalt impact 18': 'dewalt impact 18',
'rigid lithium ion batteries fuego drill': 'ridgid lithium ion batteries fuego drill',
'fiberglass repir kit': 'fiberglass repair kit',
'portable air condtioners': 'portable air conditioners',
'wall pannels': 'wall panels',
'2X4 SRUDS': '2X4 STUDS',
'frostking window shrink film': 'frost king window shrink film',
'Florescent Light Bulbs': 'Fluorescent Light Bulbs',
'violet flourescent light': 'violet fluorescent light',
'lawn mower- electic': 'lawn mower- electric',
'closetmade': 'closetmaid',
'greecianmarble floor tile': 'grecian marble floor tile',
'join compound wall tile': 'joint compound wall tile',
'montagnia contina floor tile': 'montagna cortina floor tile',
'porcelin floor tile 6x24': 'porcelain floor tile 6x24',
'three wayy': 'three way',
'incide wall heater': 'inside wall heater',
'westminster pedistal combo': 'westminster pedestal combo',
'water softners': 'water softeners',
'miricale': 'miracle',
'sliding windos locks': 'sliding window locks',
'20v dewalt kombo': '20v dewalt combo',
'DEWALT VACCUM': 'DEWALT VACUUM',
'lithium 20 dewalt': 'lithium 20v dewalt',
'water heather': 'water heater',
'riobi blower vac 9056': 'ryobi blower vac 9056',
'DRAWEER PULLS': 'DRAWER PULLS',
'bagged cinder mulch': 'bagged cedar mulch',
'hindges': 'hinges',
'chair rail hieght': 'chair rail height',
'celling light': 'ceiling light',
'tub repair kit procelian': 'tub repair kit porcelain',
'dewalr tools': 'dewalt tools',
'zinc plated flatbraces': 'zinc plated flat braces',
'cieling': 'ceiling',
'control celing fan': 'control ceiling fan',
'roll roofing lap cemet': 'roll roofing lap cement',
'cedart board': 'cedar board',
'lg stcking kit': 'lg stacking kit',
'ajustable ladder feet': 'adjustable ladder feet',
'milwakee M12': 'milwaukee M12',
'garden sprayer non pump': 'garden sprayer no pump',
'roof rdge flashing': 'roof edge flashing',
'cable prime line emergensy open': 'cable prime line emergency open',
'roybi l18v': 'ryobi l18v',
'milwaukee 18-volt lithium-ion cordlessrotary hammerss': 'milwaukee 18-volt lithium-ion cordless rotary hammers',
'bath sinnk': 'bath sink',
'bathro sinks': 'bathroom sinks',
'bathroom pedelal sink': 'bathroom pedestal sink',
'epoxy concrete pain': 'epoxy concrete paint',
'pool suppll': 'pool supply',
'3-3 galvinized tubing': '3-3 galvanized tubing',
'portable air conditionar and heater': 'portable air conditioner and heater',
'vynal windows': 'vinyl windows',
'aluminun tread plate': 'aluminum tread plate',
'3/4 vlve': '3/4 valve',
'kitchen ceiling lightening': 'kitchen ceiling lighting',
'led fixtues for the kitchen': 'led fixtures for the kitchen',
'wall design cermic': 'wall design ceramic',
'door chim buttons': 'door chime buttons',
'plastice corrugated panels': 'plastic corrugated panels',
'doors gaurds': 'doors guards',
'24 inche sink and vanity for bath': '24 inch sink and vanity for bath',
'24 swantone vanity top': '24 swanstone vanity top',
'40 wattsolar charged lights': '40 watt solar charged lights',
'buikids toilet seat': 'buy kids toilet seat',
'toliet seats': 'toilet seats',
'land scaping timbers': 'landscaping timbers',
'everblit heavy duty canvas dropcloth': 'everbilt heavy duty canvas drop cloth',
'3/4 sharkbits': '3/4 sharkbite',
'bath rom toilets': 'bathroom toilets',
'alumanam sheets': 'aluminum sheets',
'huskvarna': 'husqvarna',
'treate 2x4': 'treated 2x4',
'12000 btuair conditioners window': '12000 btu air conditioners window',
'air conditioner vbration': 'air conditioner vibration',
'heith-zenith motion lights': 'heath-zenith motion lights',
'small paint rollerss': 'small paint rollers',
'fencde posts': 'fence posts',
'knoty pine fencing': 'knotty pine fencing',
'metal sheet underpenning': 'metal sheet underpinning',
'plastic untility shelves': 'plastic utility shelves',
'christmass lights': 'christmas lights',
'garlend lights': 'garland lights',
'ceilig fan mount': 'ceiling fan mount',
'paito table and chairs': 'patio table and chairs',
'glacier bay one pice flapper': 'glacier bay one piece flapper',
'dcanvas drop cloth': 'canvas drop cloth',
'lawn mowre covers': 'lawn mower covers',
'vaccum for dw745': 'vacuum for dw745',
'Club cadet primer bulb': 'Cub cadet primer bulb',
'interior door lcoks': 'interior door locks',
'dremel toll kit': 'dremel tool kit',
'round up nozzle replacment': 'roundup nozzle replacement',
'ceder mulch': 'cedar mulch',
'sikalatexr concrete vonding adhesive': 'sikalatex concrete bonding adhesive',
'rigid air compressor': 'ridgid air compressor',
'garge doors': 'garage doors',
'ridding mowers': 'riding mowers',
'ridiing lawnmower': 'riding lawn mower',
'sliding mirror bathroom medicn cabinets': 'sliding mirror bathroom medicine cabinets',
'pastic qtr round': 'plastic quarter round',
'robutussin dh 835 replacement wick': 'robitussin dh 835 replacement wick',
'brick wall panles': 'brick wall panels',
'kitchen floor tikles': 'kitchen floor tiles',
'buffer polishewr': 'buffer polisher',
'keorsene heater wicks': 'kerosene heater wicks',
'1x6 cedar boaed': '1x6 cedar board',
'infered heaters': 'infrared heaters',
'1-1/2in. x 1ft. blk pipe': '1-1/2in. x 1 ft. black pipe',
'show me all 60 inch vaniteis': 'show me all 60 inch vanities',
'cieling fan': 'ceiling fan',
'instant waater heater gas lp': 'instant water heater gas lp',
'woodebn fence panels': 'wooden fence panels',
'hardiboard siding': 'hardie board siding',
'craft an lawn mower': 'craftsman lawn mower',
'kohler wellworth tpoilet': 'kohler wellworth toilet',
'moen dhower faucet': 'moen shower faucet',
'dewalt hand toolsg saw cordless': 'dewalt hand tools saw cordless',
'hindged l bracket': 'hinged l bracket',
'ceiling fan canopie for flst ceiling': 'ceiling fan canopy for flat ceiling',
'furnance vent delfector': 'furnace vent deflector',
'flourescent shop light': 'fluorescent shop light',
'bateries': 'batteries',
'bath wall tile chanpayne': 'bath wall tile champagne',
'floor ceramick': 'floor ceramic',
'stone are mb11': 'stone care mb11',
'traffic master porcelin ceramic tile portland stone': 'trafficmaster porcelain ceramic tile portland stone',
'celing fans hampton bay': 'ceiling fans hampton bay',
'outdoor ceilikng fan with light': 'outdoor ceiling fan with light',
'36in vinale fence': '36in vinyl fence',
'extention ladder little gaint': 'extension ladder little giant',
'closet rod 8 n9ickel': 'closet rod 8 nickel',
'closetmaid wire eight itier organizer': 'closetmaid wire eight tier organizer',
'shorten pendent lighting': 'shorten pendant lighting',
'chainlink gate': 'chain link gate',
'4 flourescent': '4 fluorescent',
'lithium batties': 'lithium batteries',
'24x73 book shelve case white': '24x73 bookshelf case white',
'linoliuml adhesive': 'linoleum adhesive',
'vynal flooring': 'vinyl flooring',
'vynal grip strip': 'vinyl grip strip',
'hagchet': 'hatchet',
'frameless mirro mount': 'frameless mirror mount',
'microwarve cart': 'microwave cart',
'mosia grout sealer': 'mosaic grout sealer',
'backsplach': 'backsplash',
'dimable ceiling strip lights': 'dimmable ceiling strip lights',
'lithum leaf blower': 'lithium leaf blower',
'rayoby batteries': 'ryobi batteries',
'pressure washerparts': 'pressure washer parts',
'rigid 18v lituim ion nicad': 'ridgid 18v lithium ion nicad',
'artric air portable': 'arctic air portable',
'8ft wht veranda post sleeve': '8 ft white veranda post sleeve',
'vynal fence': 'vinyl fence',
'solar naturlas salt': 'solar naturals salt',
'metl flashing': 'metal flashing',
'dog fence batt': 'dog fence battery',
'onda pressure washer': 'honda pressure washer',
'pressue washer': 'pressure washer',
'fridgdare air conditioners': 'frigidaire air conditioners',
'double pain windows': 'double pane windows',
'round flat topmetal post caps': 'round flat top metal post caps',
'1/2\' plyweood': '1/2\' plywood',
'ddummy door knobs interior': 'dummy door knobs interior',
'robi battery lawn trimmer': 'ryobi battery lawn trimmer',
'weewacker edger': 'weed wacker edger',
'prunning shears': 'pruning shears',
'steel enrty doors': 'steel entry doors',
'forimca': 'formica',
'satin nickle door hinge 4 in': 'satin nickel door hinge 4 in',
'garden hose repir cuplings': 'garden hose repair couplings',
'1/3 hoursepower garbage disposal': '1/3 horsepower garbage disposal',
'chicken wire 16 gauze': 'chicken wire 16 gauge',
'wheelbarow': 'wheelbarrow',
'didger': 'dodger',
'hhigh efficiency round toilet in white': 'high efficiency round toilet in white',
'accordian door venetian': 'accordion door venetian',
'patio flurniture covers': 'patio furniture covers',
'through thewall air conditioner': 'through the wall air conditioner',
'Whirpool washer': 'Whirlpool washer',
'4x6treaded wood': '4x6 treated wood',
'preature treated lumber 2in. x12in.x12 ft.': 'pressure treated lumber 2in. x 12 in.x 12 ft.',
'closetmade wood': 'closetmaid wood',
'steam cleanerm mop': 'steam cleaner mop',
'steqamers': 'steamers',
'pendant shads': 'pendant shades',
'battery operated flashingm light': 'battery operated flashing light',
'metal flexable water hose': 'metal flexible water hose',
'air filter for lawn equitment': 'air filter for lawn equipment',
'fiber glass pip insulation': 'fiberglass pipe insulation',
'insallation': 'installation',
'insullation': 'insulation',
'contracor string light': 'contractor string light',
'gas furnace and hotwater': 'gas furnace and hot water',
'rust oleum cabinet stain kit': 'rustoleum cabinet stain kit',
'sjhelf': 'shelf',
'small brackets for selves': 'small brackets for shelves',
'hecurles': 'hercules',
'anderson window grate': 'andersen window grate',
'anderson windows': 'andersen windows',
'lasron slider windows': 'larson slider windows',
'samsung 25.6 french door refridgerator': 'samsung 25.6 french door refrigerator',
'closet doors oganizers': 'closet doors organizers',
'koehler cimarron bathroom sink': 'kohler cimarron bathroom sink',
'kohler pedestal sink cimeron': 'kohler pedestal sink cimarron',
'cover for pole structue': 'cover for pole structure',
'drils': 'drills',
'surface mount channe': 'surface mount channel',
'outside corner- dentil': 'outside corner- dental',
'14heightx24withx15depth air conditioner': '14 heightx24 with 15 depth air conditioner',
'r30 demin insulation': 'r30 denim insulation',
'6 metal tee posts': '6 metal t posts',
'metal fence postsd': 'metal fence posts',
'aluminum l cahnnel': 'aluminum l channel',
'conner trim moulding': 'corner trim moulding',
'cornor board': 'corner board',
'pvc planel glue': 'pvc panel glue',
'3 in 1 vacum, ryobi': '3 in 1 vacuum, ryobi',
'toliet bowl rebuilding kits': 'toilet bowl rebuilding kits',
'swing set accesories': 'swing set accessories',
'ventenatural gas heater': 'vented natural gas heater',
'square ube wood': 'square cube wood',
'swivrl wood anchors': 'swivel wood anchors',
'ge gridle': 'ge griddle',
'pendant shafe': 'pendant shade',
'3/8 pipe galvinized': '3/8 pipe galvanized',
'vaporbarrier, crawl space': 'vapor barrier, crawl space',
'self sealant membrane': 'self sealing membrane',
'husky work bemch': 'husky work bench',
'vanity light fictures': 'vanity light fixtures',
'bed frames headboaed': 'bed frames headboard',
'replace plasticbathroom towel holder': 'replace plastic bathroom towel holder',
'whirlpool diswasher weather stripping': 'whirlpool dishwasher weather stripping',
'36 inch front dooe with casing': '36 inch front door with casing',
'glass back doorr': 'glass back door',
'pre hu door': 'pre hung door',
'backsplash paneks': 'backsplash panels',
'jeffery court mozaic tile': 'jeffrey court mosaic tile',
'floo shets': 'floor sheets',
'gazhose for dryer machine': 'gas hose for dryer machine',
'electric fireplacewater heaters': 'electric fireplace water heaters',
'ceiling mounted lighting fixures': 'ceiling mounted lighting fixtures',
'tools bloowers': 'tools blowers',
'artifical ground cover': 'artificial ground cover',
'waxhers and electric dryers': 'washers and electric dryers',
'outdoor tilees': 'outdoor tiles',
'owens corning ashingles': 'owens corning shingles',
'peper towel holder wall mount': 'paper towel holder wall mount',
'genecrac generators': 'generac generators',
'robyi gas weeder': 'ryobi gas weeder',
'acrtlic tape': 'acrylic tape',
'foam insulaion panels': 'foam insulation panels',
'rumbl;estone': 'rumblestone',
'famed sliding door $289.00': 'framed sliding door $289.00',
'padio door': 'patio door',
'cement boards ciding': 'cement boards siding',
'upholstry': 'upholstery',
'miror interior doors': 'mirror interior doors',
'recessed medicien cabinet': 'recessed medicine cabinet',
'bulked washed sand and gravel': 'bulk washed sand and gravel',
'sheet stock floorinh': 'sheet stock flooring',
'polycarbonite': 'polycarbonate',
'dedwalt cordless drill': 'dewalt cordless drill',
'ryobi power chalking gun': 'ryobi power caulking gun',
'poulan pro lawn motor blades': 'poulan pro lawn mower blades',
'diining set outdoor': 'dining set outdoor',
'granite countertop glu': 'granite countertop glue',
'cyculer saw': 'circular saw',
'kitchenaid frenchdoor ref': 'kitchenaid french door ref',
'rigid wet dry vac': 'ridgid wet dry vac',
'whirlpool caprios 4.3': 'whirlpool cabrio 4.3',
'micro wave ovens': 'microwave ovens',
'8 valleta edger': '8 valletta edger',
'decking hardsware': 'decking hardware',
'utility traiter': 'utility trailer',
'ceilin storage': 'ceiling storage',
'white wall bathroon cabinets': 'white wall bathroom cabinets',
'tsnkless hot water heater': 'tankless hot water heater',
'weed killer consertrated': 'weed killer concentrate',
'milwaukee ha,,er drill': 'milwaukee hammer drill',
'23 ince': '23 inch',
'stone outside tile': 'stone outdoor tile',
'galvanized outdoor celing fan': 'galvanized outdoor ceiling fan',
'oil rubbered bronze dor': 'oil rubbed bronze door',
'vynik tiles peel stick': 'vinyl tiles peel stick',
'window aircondiioner 12000 but': 'window air conditioner 12000 btu',
'60 lb hi strength concrete': '60 lb high strength concrete',
'plexy glass 24 x 24': 'plexiglass 24 x 24',
'porch liht fixture': 'porch light fixture',
'moving trollie': 'moving trolley',
'shoipping cart': 'shopping cart',
'accesory bags': 'accessory bags',
'garage door 70 lb extention spring': 'garage door 70 lb extension spring',
'riobi shop vac filter': 'ryobi shop vac filter',
'wet carpet cleaninig': 'wet carpet cleaning',
'pvd electrical conduit': 'pvc electrical conduit',
'roller up window blinds': 'roll up window blinds',
'uplihght': 'uplight',
'metal shelfs': 'metal shelves',
'dewalt 20v recepicating saw': 'dewalt 20v reciprocating saw',
'outdooor carpet': 'outdoor carpet',
'step latter': 'step ladder',
'kitchen cabinte hardware blue knob': 'kitchen cabinet hardware blue knob',
'pivotangle lock hinge': 'pivot angle lock hinge',
'plasticl panels': 'plastic panels',
'varigated fiber board': 'variegated fiber board',
'battery chages': 'battery charges',
'1/2 inch blk iron coupling': '1/2 inch black iron coupling',
'defiant led armer max': 'defiant led armormax',
'defiant led ight': 'defiant led light',
'led flashlightts': 'led flashlights',
'pfister pasedena 4 center set faucet': 'pfister pasadena 4 center set faucet',
'meguire plastic cleaner': 'meguiars plastic cleaner',
'single board pannel': 'single board panel',
'foundation fent covers': 'foundation vent covers',
'bottom freezer refrdgerators': 'bottom freezer refrigerators',
'colbolt drill bits': 'cobalt drill bits',
'soundfroofing material': 'soundproofing material',
'hanging light masn gar': 'hanging light mason jar',
'drywall mudd': 'drywall mud',
'delta bathroom falcet': 'delta bathroom faucet',
'ridgid 10000 watt': 'rigid 10000 watt',
'pvc edgetape white': 'pvc edge tape white',
'fireplace mantle': 'fireplace mantel',
'drop in sink ovel': 'drop in sink oval',
'40ft aluminumm ladder': '40 ft aluminum ladder',
'rigid shop vac filter': 'ridgid shop vac filter',
'moen single handle valvue rebuild': 'moen single handle valve rebuild',
'hunter ceiling fans accesories strip': 'hunter ceiling fans accessories strip',
'wheel barrel': 'wheelbarrow',
'16 aluminuim ladder': '16 aluminum ladder',
'1/2\' olastic pipe': '1/2\' plastic pipe',
'moen 7570 single hanlel faucet': 'moen 7570 single handle faucet',
'padtio heater': 'patio heater',
'rachet scret drivers': 'ratchet screwdrivers',
'water fountain nozle': 'water fountain nozzle',
'rigid sander': 'ridgid sander',
'anderson 4000 windows': 'andersen 4000 windows',
'doublew stainless': 'double stainless',
'milwakee m12 cordless heated jacket': 'milwaukee m12 cordless heated jacket',
'french door scree doorsscreen door': 'french door screen doors screen door',
'samsung refridegrator': 'samsung refrigerator',
'flurorescent light bulbs': 'fluorescent light bulbs',
'phillips 40t12cw plus florescent tube': 'phillips 40t12cw plus fluorescent tube',
'black and decker timmer parts st4500': 'black and decker trimmer parts st4500',
'gas range slide inove': 'gas range slide in love',
'baldwin lock stets': 'baldwin lock sets',
'6 ft ceder fence': '6 ft cedar fence',
'storeage': 'storage',
'beckett fountin pump': 'beckett fountain pump',
'polyeurethane exterior': 'polyurethane exterior',
'ceiling pannel': 'ceiling panel',
'70 celing fan': '70 ceiling fan',
'vynil barackets': 'vinyl brackets',
'moen kitchen fauchet': 'moen kitchen faucet',
'ridgid model wd1680 filter': 'rigid model wd1680 filter',
'point of use electtric': 'point of use electric',
'stell finished french patio door': 'steel finished french patio door',
'lg elec laundry suite': 'lg electric laundry suite',
'outdoor screem': 'outdoor screen',
'patio chair cushions/marth stewart': 'patio chair cushions/martha stewart',
'24 hollow core closet dor': '24 hollow core closet door',
'rigid miter saw': 'ridgid miter saw',
'ruotor table': 'router table',
'airconditioner decoritive cover unit': 'air conditioner decorative cover unit',
'miwaukee 18v battery and charger': 'milwaukee 18v battery and charger',
'potable air conditioner': 'portable air conditioner',
'perhung 30x80 interior door': 'prehung 30 x 80 interior door',
'6 dewalt skill saw': '6 dewalt skil saw',
'1x8x8 toung and grove': '1x8x8 tongue and groove',
'river feather door threashold': 'river feather door threshold',
'range connnector': 'range connector',
'ligt fixture covers': 'light fixture covers',
'window flasheing': 'window flashing',
'backet metal': 'bracket metal',
'horizantel fence panel': 'horizontal fence panel',
'rug pad 8 x 10': 'rug pad 8x10',
'frigadaire appliances': 'frigidaire appliances',
'bath si k cabinets': 'bath sink cabinets',
'8x10 outside storage': '8x10 outdoor storage',
'earthgrow mulch': 'earthgro mulch',
'10 60 tooth blde': '10 60 tooth blade',
'sink faucet with soap dispencer': 'sink faucet with soap dispenser',
'ridgid job max attatchmens': 'ridgid jobmax attachments',
'ridgid wrachet head': 'ridgid ratchet head',
'celliling light': 'ceiling light',
'waterroo concrete paint': 'waterproof concrete paint',
'americian standard champion 4 toliets': 'american standard champion 4 toilets',
'4 ftawning frame': '4 ft awning frame',
'restour for concrete': 'restore for concrete',
'econo florecent bulb': 'econo fluorescent bulb',
'florecent bulb holder': 'fluorescent bulb holder',
'light fictures': 'light fixtures',
'lihonia 4 led work light': 'lithonia 4 led work light',
'interrior frnch doors': 'interior french doors',
'hamptom bay cusion': 'hampton bay cushion',
'wndows': 'windows',
'porcalain thinset': 'porcelain thinset',
'versabon 50lb': 'versabond 50 lb',
'table for outsde': 'table for outside',
'hoinda gas edger': 'honda gas edger',
'installing sockets for flor': 'installing sockets for floor',
'laguna porcelin tile': 'laguna porcelain tile',
'showe heads in oil rubbed bronze': 'shower heads in oil rubbed bronze',
'chase lounge cushions': 'chaise lounge cushions',
'electric detector in simming pool water': 'electric detector in swimming pool water',
'elongagated toilet seat': 'elongated toilet seat',
'towbehind lawn spreaders': 'tow behind lawn spreaders',
'cable poter': 'cable porter',
'fraiming nailer electric': 'framing nailer electric',
'12 x 12 porcelian floor and wall tile': '12 x 12 porcelain floor and wall tile',
'marrazi': 'marazzi',
'range hoodu': 'range hood',
'whirpool range': 'whirlpool range',
'subway title 3 x 6': 'subway tile 3 x 6',
'untique stone': 'antique stone',
'post sleeveee': 'post sleeve',
'dinning chair seats': 'dining chair seats',
'christmas lights icicle colerful': 'christmas lights icicle colorful',
'colpay garage door molding': 'clopay garage door molding',
'light for public ligthining': 'light for public lightning',
'slate timberland shingle': 'slate timberline shingle',
'cicular saw blad': 'circular saw blade',
'varbide 7 1/4 circular saw blade': 'carbide 7 1/4 circular saw blade',
'10 flourescent bulbs': '10 fluorescent bulbs',
'kids outside furnature': 'kids outside furniture',
'whirpool gas range': 'whirlpool gas range',
'starter fertillzer': 'starter fertilizer',
'toro snowerblower light kit': 'toro snowblower light kit',
'High Wheel String Trimer': 'High Wheel String Trimmer',
'insided house door': 'inside house door',
'3 1/2 non-mortison hinges satin finish': '3 1/2 non-mortise hinges satin finish',
'miracle grow garden soil': 'miracle gro garden soil',
'miracle grow spray dispensers': 'miracle gro spray dispensers',
'alure flooring black oak': 'allure flooring black oak',
'sweeping atatchment for weed wacker': 'sweeping attachment for weed wacker',
'retangle bathroom sinks': 'rectangular bathroom sinks',
'underthe cabinet microwaves': 'under the cabinet microwaves',
'24 inch lover doors': '24 inch louvered doors',
'window drip egedg': 'window drip edge',
'rechargable portable air compressor': 'rechargeable portable air compressor',
'birkmann 5 burner': 'brinkmann 5 burner',
'whirlpool gasnstove self cleaning oven': 'whirlpool gas stove self cleaning oven',
'havc brush': 'hvac brush',
'discharge hose 1.5 inces': 'discharge hose 1.5 inches',
'6 ft laminite countertop': '6 ft laminate countertop',
'pool vaccum': 'pool vacuum',
'1/2 in.x 1/2 in. thread albow male to male': '1/2 in.x 1/2 in. threaded elbow male to male',
'sofet': 'soffit',
'sliding patio doort': 'sliding patio door',
'30inch flourescent tubes': '30 inch fluorescent tubes',
'phillips light bulbs': 'philips light bulbs',
'stainless steel sinl': 'stainless steel sink',
'burgular bars for front porch': 'burglar bars for front porch',
'oach lights': 'coach lights',
'2 in lnsulated bushings': '2 in insulated bushings',
'motion lught': 'motion light',
'residental light sensor security lights': 'residential light sensor security lights',
'vertical blind accsesories': 'vertical blind accessories',
'1/2 in ree bar': '1/2 in rebar',
'cloths rod and shelf brackets': 'clothes rod and shelf brackets',
'fire rated buildng materials': 'fire rated building materials',
'hot point water filer': 'hotpoint water filter',
'bathroom cabinet without fermaldehyde': 'bathroom cabinet without formaldehyde',
'9.6 bvolt': '9.6 volt',
'rustoleum bright coach metallic': 'rustoleum bright coat metallic',
'stone effect sante cecilia top': 'stone effects santa cecilia top',
'suspanded ceiling': 'suspended ceiling',
'4x8 plywood pressure treeted': '4x8 plywood pressure treated',
'acess panel': 'access panel',
'genie excellartor garage door opener': 'genie excelerator garage door opener',
'ge dish washer with 46 dba rating': 'ge dishwasher with 46 dba rating',
'wood and concret stain': 'wood and concrete stain',
'8 foot flour sent': '8 foot fluorescent',
'infared grills': 'infrared grills',
'wirless interconnected smoke dedector': 'wireless interconnected smoke detector',
'luever': 'leuver',
'3 in roung head bolt': '3 in round head bolt',
'rachet': 'ratchet',
'rigid 12 volt': 'ridgid 12 volt',
'sharkbit': 'sharkbite',
'hamiltton collectin': 'hamilton collection',
'kitchen aide wine and beverage refrigerator': 'kitchenaid wine and beverage refrigerator',
'paint markers burgondy color': 'paint markers burgundy color',
'glass washer with sucktion cups': 'glass washer with suction cups',
'andersor doors': 'anderson doors',
'hickory cabinett': 'hickory cabinet',
'repacement can type light bulbs': 'replacement can type light bulbs',
'ceeling patio shades': 'ceiling patio shades',
'white vainty 8 faucet': 'white vanity 8 faucet',
'daylight florisant bulb 36inch': 'daylight fluorescent bulb 36 inch',
'contact paoer': 'contact paper',
'air bathtubes': 'air bathtubs',
'cushions for wecker furniture': 'cushions for wicker furniture',
'galvinized poles 20long': 'galvanized poles 20 long',
'siegel light pendent': 'siegel light pendant',
'spaonges': 'sponges',
'extorior shatters': 'exterior shutters',
'led blubs': 'led bulbs',
'4 inch back flow prenter': '4 inch backflow preventer',
'silding closet doors track': 'sliding closet doors track',
'10000 btu windowair condiioner': '10000 btu window air conditioner',
'sewer pipe hoider': 'sewer pipe holder',
'vinal blind paint': 'vinyl blind paint',
'fuacet': 'faucet',
'picinic tables': 'picnic tables',
'all in one topmount kraus sinks': 'all in one top mount kraus sinks',
'solar post lmapy': 'solar post lamp',
'transormations': 'transformations',
'daltiles sandy beach': 'daltile sandy beach',
'wallmount indoor lights with plug': 'wall mounted indoor lights with plug',
'kennal kit': 'kennel kit',
'46 high output grow florescent bulb': '46 high output grow fluorescent bulb',
'frost fee freezers': 'frost free freezers',
'stainles steel door handle': 'stainless steel door handle',
'combo drill makita 20v': 'combi drill makita 20v',
'shop vacumm': 'shop vacuum',
'primer for led paint': 'primer for lead paint',
'outdoor gas fiepits': 'outdoor gas firepits',
'hallway pendendant lighting': 'hallway pendant lighting',
'chesapeke oak flooring': 'chesapeake oak flooring',
'ryobi multi tool acccessories': 'ryobi multi tool accessories',
'ryobi raidos': 'ryobi radios',
'milwaukee skill saw': 'milwaukee skil saw',
'ligh chrismas hanging tree': 'light christmas hanging tree',
'galvinized screws': 'galvanized screws',
'led circuline bulbs': 'led circline bulbs',
'kholer elongated toilet seat': 'kohler elongated toilet seat',
'tolet seats': 'toilet seats',
'ock blade knife piece 3': 'lock blade knife piece 3',
'portable airconditioner': 'portable air conditioner',
'window aircondition': 'window air conditioner',
'36 vx 72 commercial outdoor mats': '36 x 72 commercial outdoor mats',
'runner commerical': 'runner commercial',
'montagna dappy gray': 'montagna dapple gray',
'soil temperture test kit': 'soil temperature test kit',
'basement tolet': 'basement toilet',
'32 door threshhold': '32 door threshold',
'hampton bay oak bast cabinets': 'hampton bay oak base cabinets',
'charbroil parts': 'char broil parts',
'qucikie mop': 'quickie mop',
'concret anchor bolts': 'concrete anchor bolts',
'24 whtie storage cabinet': '24 white storage cabinet',
'door handle deabolt kit': 'door handle deadbolt kit',
'ge profile 30 inch charcoal folters': 'ge profile 30 inch charcoal filters',
'49 inch napolian vanity top': '49 inch napoleon vanity top',
'4in pvc franco cuppling': '4in pvc fernco coupling',
'graveless gravaless sewer pipe': 'graveless graveless sewer pipe',
'shower fllor': 'shower floor',
'riverera screen doors': 'riviera screen doors',
'animal deterent': 'animal deterrent',
'woodpeckers repellant': 'woodpeckers repellent',
'wood buring insert 200-250': 'wood burning insert 200-250',
'spectrazide ant': 'spectracide ant',
'gas grill accesories': 'gas grill accessories',
'elecronic insect repeller': 'electronic insect repeller',
'slyvanna motion nite light': 'sylvania motion nite light',
'4 in pvs end cap': '4 in pvc end cap',
'delta portor shower and tub trim': 'delta porter shower and tub trim',
'replacment mini bulbs': 'replacement mini bulbs',
'braxilian cherry laminate': 'brazilian cherry laminate',
'15 amp tampe resistant outlets': '15 amp tamper resistant outlets',
'hydraulic jack renat': 'hydraulic jack rental',
'32 x 32 shower baser': '32 x 32 shower base',
'electronic bed bug repellant': 'electronic bed bug repellent',
'ridgid auger': 'rigid auger',
'2000 psi force nozzzle': '2000 psi force nozzle',
'25 height beveragecooler': '25 height beverage cooler',
'anderson windows 400 seriesimpact resistant': 'andersen windows 400 series impact resistant',
'drill 20 lithium battery': 'drill 20v lithium battery',
'extertal air vent cover': 'external air vent cover',
'resin shesd': 'resin sheds',
'8x8x4 conctete block': '8x8x4 concrete block',
'tun faucet spout': 'tub faucet spout',
'continuos curtain rods': 'continuous curtain rods',
'upholstry cleaner': 'upholstery cleaner',
'ureka vaccuum': 'eureka vacuum',
'30 towel rods brushed nicol': '30 towel rods brushed nickel',
'1/2 gal thermos': '1/2 gallon thermos',
'unbralla fabric top only': 'umbrella fabric top only',
'outdoor cieling fans': 'outdoor ceiling fans',
'20 amps cros hinghs breaker': '20 amps cross highs breaker',
'mixing tubn': 'mixing tub',
'gfi circuit breaker': 'gfci circuit breaker',
'wrought iuron fence panels': 'wrought iron fence panels',
'ac air vent sleave': 'ac air vent sleeve',
'air ventalation deflector': 'air ventilation deflector',
'buddahs hand tree': 'buddha\'s hand tree',
'lawm mowers': 'lawn mowers',
'asathbula 7 piece': 'ashtabula 7 piece',
'recessed lightjs': 'recessed lights',
'hing pin door dtop': 'hinge pin door stop',
'elerical outlets plates': 'electrical outlets plates',
'bed tool boc': 'bed tool box',
'16 inch fabn': '16 inch fan',
'battery poerated motion sensor': 'battery operated motion sensor',
'grqss': 'grass',
'troy build trimmer extension': 'troy bilt trimmer extension',
'mansonry impact bit': 'masonry impact bit',
'high output basebord': 'high output baseboard',
'shower door sealparts': 'shower door seal parts',
'12 inch hight wall cabinet': '12 inch height wall cabinet',
'light s for sno throwers': 'lights for snow throwers',
'ceiling medallians': 'ceiling medallions',
'medalion': 'medallion',
'everbilt sloted': 'everbilt slotted',
'transparant redwood stain': 'transparent redwood stain',
'black and decker scub buster extreme': 'black and decker scrub buster extreme',
'mobilehome siding': 'mobile home siding',
'shutter screwws': 'shutter screws',
'hampton pation set with firepit': 'hampton patio set with firepit',
'industreial wire': 'industrial wire',
'vegtable seeds': 'vegetable seeds',
'masterpeice 72': 'masterpiece 72',
'5/4 lumbe': '5/4 lumber',
'dawn to dusk lig': 'dawn to dusk light',
'dusk to dawn motion sensoroutdoor lighting fixtures': 'dusk to dawn motion sensor outdoor lighting fixtures',
'cordless sweeperr': 'cordless sweeper',
'mill valley colle': 'mill valley college',
'outdoorstorage bin': 'outdoor storage bin',
'haging wire': 'hanging wire',
'4 in white recessed haol baffle in soft white': '4 in white recessed led baffle in soft white',
'11 1/2x25 1/2 white aluminun': '11 1/2 x 25 1/2 white aluminum',
'saratoga hickorya': 'saratoga hickory',
'surface gringer': 'surface grinder',
'kidie co2': 'kidde co2',
'batterys and charger kits': 'batteries and charger kits',
'nutru ninja': 'nutri ninja',
'23.5 shower door nickle': '23.5 shower door nickel',
'glass panel retiner': 'glass panel retainer',
'12v replacement blubs': '12v replacement bulbs',
'martha steward': 'martha stewart',
'1 1/2inchbrasswalltube18 inch': '1 1/2 inch brass wall tube 18 inch',
'brown color scheem': 'brown color scheme',
'spiral latters': 'spiral letters',
'24 incyh range': '24 inch range',
'8x8 ezup canopie cover': '8x8 ez up canopy cover',
'kitcheen door blind': 'kitchen door blind',
'flourescent balast 120-2/32is': 'fluorescent ballast 120-2/32is',
'vinyl lattiace': 'vinyl lattice',
'1/4 28 threadded connector': '1/4 28 threaded connector',
'kitchaid 3 burner': 'kitchenaid 3 burner',
'10 condiut pvc': '10 conduit pvc',
'WEBER GRILL GENIS 310': 'WEBER GRILL GENESIS 310',
'wall mount tub fauet moen': 'wall mount tub faucet moen',
'sower cleaner': 'shower cleaner',
'batteryfor alarm system': 'battery for alarm system',
'bed gugs': 'bed bugs',
'show the pric of washer and dryer': 'show the price of washer and dryer',
'washer electic dryer': 'washer electric dryer',
'ho hub couplings': 'no hub couplings',
'battey string trimmers': 'battery string trimmers',
'3/4 in. wide quarteround': '3/4 in. wide quarter round',
'ac dip pans': 'ac drip pans',
'rutland wood stove termometer': 'rutland wood stove thermometer',
'outdoor daucets': 'outdoor faucets',
'badless vacuum cleaners': 'bagless vacuum cleaners',
'dewalt 20 volt xr hamer': 'dewalt 20 volt xr hammer',
'dewalt drillimpact tool 20 volt xr': 'dewalt drill impact tool 20 volt xr',
'martha steward bath mirror': 'martha stewart bath mirror',
'infared thermometer': 'infrared thermometer',
'millwaukee 1/2 ele.c drill': 'milwaukee 1/2 elec drill',
'25 watt 4 foot flourescent': '25 watt 4 foot fluorescent',
'boscj bit': 'bosch bit',
'barbque grills': 'barbecue grills',
'brinkman grill burner': 'brinkmann grill burner',
'malbu replacement led light bubles': 'malibu replacement led light bulbs',
'natural stone tiele': 'natural stone tile',
'stone vaneer': 'stone veneer',
'stone venner sequia': 'stone veneer sequoia',
'ceiling fan replacement clades': 'ceiling fan replacement blades',
'transformet for flurescent tube lights': 'transformer for fluorescent tube lights',
'refrigerator frenchdoor': 'refrigerator french door',
'flourescent paint': 'fluorescent paint',
'marking baint': 'marking paint',
'mirrir hanger': 'mirror hanger',
'chrisymas tree bags': 'christmas tree bags',
'comercial food processor': 'commercial food processor',
'picture haning kitpicture hanging kit': 'picture hanging kit picture hanging kit',
'bathroom vanity cabinetwithouttops': 'bathroom vanity cabinets without tops',
'amcrest survelliance systems': 'amcrest surveillance systems',
'30 inch refigrator': '30 inch refrigerator',
'chain saw eletric': 'chainsaw electric',
'power dprayer': 'power sprayer',
'douglas fur fake christmas trees': 'douglas fir fake christmas trees',
'brinkman grill': 'brinkmann grill',
'dual switch dimer': 'dual switch dimmer',
'Ortho Wed B Gone max': 'Ortho Weed B Gon max',
'ortho weed be gone': 'ortho weed b gon',
'4ft flourescent bulb t8': '4ft fluorescent bulb t8',
'18 volt 1/2 roter hammer': '18 volt 1/2 roto hammer',
'cabinents with drawers': 'cabinets with drawers',
'7 mil trash bgs': '7 mil trash bags',
'1/2 ntp to 1/2': '1/2 npt to 1/2',
'3/8 rachert set': '3/8 ratchet set',
'hunter shower eshaust fan with light': 'hunter shower exhaust fan with light',
'vanity in mahogany mirros': 'vanity in mahogany mirrors',
'hasmmock bed': 'hammock bed',
'composit fencing': 'composite fencing',
'post insurts': 'post inserts',
'3500 psi pressue washer': '3500 psi pressure washer',
'idylus air purifier': 'idylis air purifier',
'garden solenoide valves': 'garden solenoid valves',
'window plastic instulation': 'window plastic insulation',
'engineered wood floorcleaners': 'engineered wood floor cleaners',
'parquee flooring': 'parquet flooring',
'dermal saw max ultra': 'dremel saw max ultra',
'external structual connector screw': 'external structural connector screw',
'tv shelv': 'tv shelf',
'kithen cabinets 18 white': 'kitchen cabinets 18 white',
'1 1/2 couplingg': '1 1/2 coupling',
'porceline faucet handle': 'porcelain faucet handle',
'duplex outlet and ubs charger': 'duplex outlet and usb charger',
'1/4 quarter round cherries jublilee': '1/4 quarter round cherries jubilee',
'lg hausys viaterra': 'lg hausys viatera',
'bear semi transparent cedar stain': 'behr semi transparent cedar stain',
'27 mivrowave': '27 microwave',
'gardinias': 'gardenias',
'ull spectrum plant light': 'full spectrum plant light',
'942196brinkmann 2 burner': '942196 brinkmann 2 burner',
'gargage storage ideas': 'garage storage ideas',
'outside horizontal storage sheds': 'outdoor horizontal storage sheds',
'bouganvilla': 'bougainvillea',
'led recressed lighting': 'led recessed lighting',
'3 x3 marle tile': '3x3 marble tile',
'concrete saw dewall': 'concrete saw dewalt',
'replacement glass for pellet stive': 'replacement glass for pellet stove',
'porcelin tile black pencil tile': 'porcelain tile black pencil tile',
'smoke dectectors': 'smoke detectors',
'humidifier fulters': 'humidifier filters',
'3/4 in. pvc assesories': '3/4 in. pvc accessories',
'12 inch sower head': '12 inch shower head',
'22 mm impact ocket': '22mm impact socket',
'garvanized wood screws': 'galvanized wood screws',
'interlocking rubbber floor mats': 'interlocking rubber floor mats',
'Hose end nozzel': 'Hose end nozzle',
'led energy efficient kitchen lites': 'led energy efficient kitchen lights',
'barn syslet door': 'barn style door',
'rat or mice poision': 'rat or mice poison',
'led ressed deameable lights': 'led recessed dimmable lights',
'prelit tree mutli': 'pre lit tree multi',
'sodering iron': 'soldering iron',
'tub suround': 'tub surround',
'fireplace screen assessories': 'fireplace screen accessories',
'acrilic white paint': 'acrylic white paint',
'gibraltor locking': 'gibraltar locking',
'air conditioner sideays': 'air conditioner sideways',
'white inyrtior paint': 'white interior paint',
'100 watt candlebra': '100 watt candelabra',
'llhampton bay patio rocker': 'hampton bay patio rocker',
'lock brushed nicke;': 'lock brushed nickel;',
'structered media': 'structured media',
'summit 24 inch ss gaqs range': 'summit 24 inch ss gas range',
'ryobl battery': 'ryobi battery',
'replacement carbrator for robyi': 'replacement carburetor for ryobi',
'balist': 'ballast',
'pressuer washer': 'pressure washer',
'22 storage shelve': '22 storage shelf',
'32\' strorm door': '32\' storm door',
'hazardous locationlight fixture globe': 'hazardous location light fixture globe',
'john deer bagger': 'john deere bagger',
'ridinng lawn mowers mulching': 'riding lawn mowers mulching',
'1/2 fpt x 1/2 inch pex': '1/2 npt x 1/2 inch pex',
'2 kindorff straps': '2 kindorf straps',
'telemechanic square d': 'telemecanique square d',
'thresh hold': 'threshold',
'24x24 framless recessed mount mirrored medicine': '24x24 frameless recessed mount mirrored medicine',
'600 connector cylander': '600 connector cylinder',
'well pump submerciable': 'well pump submersible',
'security gate pannel': 'security gate panel',
'1/4-20 jamb nuts': '1/4-20 jam nuts',
'american standard flush valvu': 'american standard flush valve',
'stove adopter': 'stove adapter',
'kitchenaide dishwasher': 'kitchenaid dishwasher',
'roofing leadders': 'roofing ladders',
'heath zenity 180 security light': 'heath zenith 180 security light',
'solar powerd lights': 'solar powered lights',
'24 white walloven': '24 white wall oven',
'kitchen aide mixer': 'kitchenaid mixer',
'10 in w 30 in l inetrior vent': '10 in w 30 in l interior vent',
'co smoke detector kiddie': 'co smoke detector kidde',
'vacum aa bag 58236c': 'vacuum aa bag 58236c',
'sealant for sideing': 'sealant for siding',
'come along and chaincome along and chain': 'come along and chain come along and chain',
'wall paper bprder': 'wallpaper border',
'cararra tile': 'carrara tile',
'14 gauge strranded wire': '14 gauge stranded wire',
'30 gal electirc water heater': '30 gal electric water heater',
'guarter round tile': 'quarter round tile',
'summit gril': 'summit grill',
'gavanized pipe 20 feet': 'galvanized pipe 20 feet',
'melamine sheliving': 'melamine shelving',
'composite fiscia board': 'composite fascia board',
'spunge mop refill': 'sponge mop refill',
'wall mount outside motion dector': 'wall mount outdoor motion detector',
'bisquit tub refinish kit': 'biscuit tub refinish kit',
'patternn paint rollers': 'pattern paint rollers',
'built in wall nitch': 'built in wall niche',
'ironboard built in': 'iron board built in',
'behr melrot': 'behr merlot',
'led shoplightmakita light': 'led shop light makita light',
'armazone': 'amazon',
'soild 6 panel interior door': 'solid 6 panel interior door',
'dishs for 8': 'dishes for 8',
'1 1/4 steel ppes': '1 1/4 steel pipes',
'pull out drw': 'pull out draw',
'swffer mop': 'swiffer mop',
'milwaukee m18 tootls': 'milwaukee m18 tools',
'bronzw phone wall jack cover': 'bronze phone wall jack cover',
'flourscent lights size 18x24': 'fluorescent lights size 18x24',
'berber carpeting destiny doeskin': 'berber carpet destiny doeskin',
'spring heavy dut': 'spring heavy duty',
'2 in pvc pipe incresers': '2 in pvc pipe increasers',
'lifetime rouind table': 'lifetime round table',
'16x26 recesssed medicine cabinets': '16x26 recessed medicine cabinets',
'rolling barn dorr hardware': 'rolling barn door hardware',
'huricane panel caps': 'hurricane panel caps',
'73 inch anderson patio screen doors': '73 inch andersen patio screen doors',
'barbque grill temperature guage': 'barbecue grill temperature gauge',
'bath tub shower repair lit': 'bathtub shower repair kit',
'entery door sidelights': 'entry door sidelights',
'5 burnerner brikman gas grill': '5 burner brinkmann gas grill',
'battub floor mat': 'bathtub floor mat',
'outlet wallplate with cover': 'outlet wall plate with cover',
'fungacide': 'fungicide',
'tuband tile latex caulk': 'tub and tile latex caulk',
'natural gas barbeque': 'natural gas barbecue',
'hallogen bulb flood': 'halogen bulb flood',
'roudulf': 'rudolf',
'cellular shade 23.75x37': 'cellular shade 23.75x 37',
'wyndham vanities with no tops': 'wyndham vanities without tops',
'frigidare gas range': 'frigidaire gas range',
'frigidare refrigerator': 'frigidaire refrigerator',
'dishwasher moiunting kit': 'dishwasher mounting kit',
'black refrigeratore': 'black refrigerator',
'barcello estates light fi': 'bercello estates light fi',
'kohler ch730 maintance kits': 'kohler ch730 maintenance kits',
'phillips led slimline a19': 'philips led slimline a19',
'asburn mahogany medicine cabinate': 'ashburn mahogany medicine cabinet',
'stove top replacement patr': 'stove top replacement part',
'hampton bay pendent light parts': 'hampton bay pendant light parts',
'wall mountreading light': 'wall mount reading light',
'heat on malamine tape': 'heat on melamine tape',
'vinal plank selection': 'vinyl plank selection',
'marble qwhite': 'marble white',
'reheem performance 75 gal water heater': 'rheem performance 75 gal water heater',
'cover for a double barrow grill': 'cover for a double barrel grill',
'water taste kits': 'water test kits',
'roybi gas trimmer repair kit': 'ryobi gas trimmer repair kit',
'masonary dril bits': 'masonry drill bits',
'bath and shower facet set': 'bath and shower faucet set',
'sanding sponce': 'sanding sponge',
'silestone sammples': 'silestone samples',
'ge mwr filter': 'ge mwf filter',
'rectangele garbage can': 'rectangle garbage can',
'light podt sensor': 'light post sensor',
'honewell wireless doorbell': 'honeywell wireless doorbell',
'vertical door slide mechanis': 'vertical door slide mechanism',
'2 inch bye 6 inch thick board': '2 inch by 6 inch thick board',
'28x80 contl splt rh': '28x80 control split rh',
'doors exterior with top windo': 'doors exterior with top window',
'water filter for vanitys': 'water filter for vanities',
'hampton bay geogian wall plates aged bronze': 'hampton bay georgian wall plates aged bronze',
'18 wat let lamps': '18 watt led lamps',
'qstatic cling window film': 'static cling window film',
'eletric pole hedge clippers': 'electric pole hedge clippers',
'moen voss lightin': 'moen voss lighting',
'dreamline showeruni door': 'dreamline shower door',
'dewaqlt air nailers': 'dewalt air nailers',
'hex drill chcuck': 'hex drill chuck',
'vinal siding per box': 'vinyl siding per box',
'verticle blind': 'vertical blind',
'chome framed mirror': 'chrome framed mirror',
'b onnet': 'bonnet',
'dowel sprial': 'dowel spiral',
'deck tdiles': 'deck tiles',
'driveing bits': 'driving bits',
'water putifiers': 'water purifiers',
'clyvus': 'clivus',
'old style nailshand forgednails': 'old style nails hand forged nails',
'grohe essencekitchen faucet': 'grohe essence kitchen faucet',
'femle end hose repair': 'female end hose repair',
'garden hose reair kits': 'garden hose repair kits',
'bathroom facets': 'bathroom faucets',
'kitchenaid refrigerator bottom frrezer': 'kitchenaid refrigerator bottom freezer',
'chrome/polished brass 2-handle 4-in centerset bathroom fauc': 'chrome/polished brass 2-handle 4-in centerset bathroom faucet',
'spackilng knife': 'spackling knife',
'cadelabra light bulbs led': 'candelabra light bulbs led',
'roller bracker for frameless shower doors': 'roller bracket for frameless shower doors',
'morola tile metro penny': 'merola tile metro penny',
'48 inchled tube': '48 inch led tube',
'corner sorage': 'corner storage',
'glaciar bay crystal shower': 'glacier bay crystal shower',
'tosco ivory tile': 'tosca ivory tile',
'elecric screw driver batteries': 'electric screwdriver batteries',
'mobilehome wall paint': 'mobile home wall paint',
'chainsaw rplacement chains': 'chainsaw replacement chains',
'electric guage cable': 'electric gauge cable',
'f15 t5 florescent': 'f15 t5 fluorescent',
'sprinkler conroller': 'sprinkler controller',
'wireless light sitch': 'wireless light switch',
'16x16x60boxes for moving': '16x16x60 boxes for moving',
'engeenered wood': 'engineered wood',
'frigidare microwave': 'frigidaire microwave',
'nals for subfloor': 'nails for subfloor',
'verathane': 'varathane',
'remote controlle light dimmer': 'remote controlled light dimmer',
'koehler shower door': 'kohler shower door',
'burgluar bar tool': 'burglar bar tool',
'greem roofing shingles': 'green roofing shingles',
'milwoki circular saw': 'milwaukee circular saw',
'tub faucets bronza': 'tub faucets bronze',
'bathtubdoor towel racks': 'bathtub door towel racks',
'ac exhaust extention': 'ac exhaust extension',
'outside deck boards composit': 'outside deck boards composite',
'4inch ligh junction box': '4 inch light junction box',
'gardenn containers': 'garden containers',
'plant continers': 'plant containers',
'3 paint bbrush': '3 paint brush',
'26 in woodsaddle stool': '26 in wood saddle stool',
'adhensive with nozzle': 'adhesive with nozzle',
'swanstone kitchen sink accesories': 'swanstone kitchen sink accessories',
'pvc to corragated connector': 'pvc to corrugated connector',
'unsanded grout bisquit': 'unsanded grout biscuit',
'spray paint rust-oleum gray': 'spray paint rustoleum gray',
'brushes drils': 'brushed drills',
'indoor mounting tpe': 'indoor mounting tape',
'indoor grow light blubs': 'indoor grow light bulbs',
'thinset morter': 'thin set mortar',
'flourescent g25 60watt': 'fluorescent g25 60 watt',
'diatemaceous earth': 'diatomaceous earth',
'23\' biview surface mount med cab chestnut': '23\' bi view surface mount med cab chestnut',
'72 hour carpt': '72 hour carpet',
'2 \' galvanise street 90': '2 \' galvanized street 90',
'maytab bravos': 'maytag bravos',
'600w incandecent toggle dimmer': '600w incandescent toggle dimmer',
'galvanized wire 10 guage': 'galvanized wire 10 gauge',
'assemble hight 17 inches': 'assembled height 17 inches',
'pvc t coulpler': 'pvc t coupler',
'water heatere drain pan': 'water heater drain pan',
'faucet steam washers': 'faucet stem washers',
'heat window filtm': 'heat window film',
'dewalt circlular saw blades': 'dewalt circular saw blades',
'5plinth block': 'plinth block',
'french pation doors with sidepanels': 'french patio doors with side panels',
'30 unfinish filler': '30 unfinished filler',
'home depot in cambrige': 'home depot in cambridge',
'faucet siphon hose connecter': 'faucet siphon hose connector',
'black out doors spray paint': 'black outdoor spray paint',
'anderson storm door full view easy install': 'andersen storm door full view easy install',
'ice marker water kits': 'ice maker water kits',
'adhesive magnetized roll': 'adhesive magnetic roll',
'metal kkitchen cabines': 'metal kitchen cabinets',
'2\' x 1 1/2 reducing busing thread': '2\' x 1 1/2 reducing bushing threaded',
'abs rambit pipe saver': 'abs rambut pipe saver',
'33 in w x 18 icnh depth vanity': '33 in w x 18 inch depth vanity',
'built in landry shelving': 'built in laundry shelving',
'grey rubbermaid trash barrells': 'grey rubbermaid trash barrels',
'sawall blades': 'sawzall blades',
'9v battery ackup': '9v battery backup',
'1/2 in. fip x 7/16 in. or 1/2 in. slip joint angle stop valv': '1/2 in. fip x 7/16 in. or 1/2 in. slip joint angle stop valve',
'peir block': 'pier block',
'under ceiling garag storage': 'under ceiling garage storage',
'stone effects backsplash cool fushion': 'stone effects backsplash cool fusion',
'desoldering vacum pump': 'desoldering vacuum pump',
'elrctric welders': 'electric welders',
'unfinushed kitchen cabinets': 'unfinished kitchen cabinets',
'3 pole range reciptical': '3 pole range receptacle',
'sink cutting oard': 'sink cutting board',
'steel tubing falanges': 'steel tubing flanges',
'outdoor unskid tiles': 'outdoor non skid tiles',
'6 round headlag bolt': '6 round head lag bolt',
'cyprees fence': 'cypress fence',
'75 qrt cooler with wheels': '75 quart cooler with wheels',
'buit in themostat': 'built in thermostat',
'speacalty bit set': 'specialty bit set',
'curtain rod classic sqaure finial': 'curtain rod classic square finial',
'silk poinsetia': 'silk poinsettia',
'1 1/4 pvcsch 80': '1 1/4 pvc sch 80',
'grill ousite door': 'grill outside door',
'lumionaire': 'luminaire',
'adienne bathroom vanity light': 'adrienne bathroom vanity light',
'chashing led lights': 'chasing led lights',
'24 inch vessal tops': '24 inch vessel tops',
'co2 detector kiddie': 'co2 detector kidde',
'white glazed 4 tilw': 'white glazed 4 tile',
'wood lattace': 'wood lattice',
'premaid stair railing': 'premade stair railing',
'3 function double walll switch': '3 function double wall switch',
'koehler shower faucet with spray': 'kohler shower faucet with spray',
'askley electric fireplace': 'ashley electric fireplace',
'blind for paladian': 'blind for paladin',
'regancy railin': 'regency railing',
'weatherside purit': 'weatherside purity',
'vent a hood dampr': 'vent a hood damper',
'light tropper 2x4': 'light troffer 2x4',
'30 amp generater receptical': '30 amp generator receptacle',
'prefab wood gate panals': 'prefab wood gate panels',
'floating corner shelfing': 'floating corner shelving',
'fridgidaire dehumidifier': 'frigidaire dehumidifier',
'pegs for cabinent shelves': 'pegs for cabinet shelves',
'100 amp to 200a lit': '100 amp to 200 a lot',
'decorative metal sceen': 'decorative metal screen',
'lacross weather pro center': 'lacrosse weather pro center',
'behr flat white marque': 'behr flat white marquee',
'high output floresant': 'high output fluorescent',
'behr hawian paint': 'behr hawaiian paint',
'pressure vaccuum breaker o-ring': 'pressure vacuum breaker o-ring',
'psint gun': 'paint gun',
'wine coller': 'wine cooler',
'rug ruunners': 'rug runners',
'clock control for fridgidare gas stove': 'clock control for frigidaire gas stove',
'outlet expsnsion surge protector': 'outlet expansion surge protector',
'rigid pipe threader': 'ridgid pipe threader',
'electical box': 'electrical box',
'insect granuels': 'insect granules',
'compsit outside corner': 'composite outside corner',
'cabinet kitchen ligth': 'cabinet kitchen light',
'dewalt ratchet srewdriver': 'dewalt ratchet screwdriver',
'18.5 outside chair cushiobs': '18.5 outside chair cushions',
'fenching and gate latches': 'fencing and gate latches',
'heater for refrigertor': 'heater for refrigerator',
'motion detect indoor': 'motion detector indoor',
'refrigerater french doors ge brand': 'refrigerator french doors ge brand',
'tiki tourches': 'tiki torches',
'gren house kits': 'greenhouse kits',
'5000 btu aircondition': '5000 btu air conditioner',
'airator dishwasher': 'aerator dishwasher',
'2x6 metal brakets': '2x6 metal brackets',
'weldn 3': 'weldon 3',
'ceiling paint pray': 'ceiling paint spray',
'flourescent fixture metal parts': 'fluorescent fixture metal parts',
'natural hickery kitchen cabinets': 'natural hickory kitchen cabinets',
'kitchen aide dishwasher': 'kitchenaid dishwasher',
'led track lightning systems': 'led track lighting systems',
'duplex receptacle nickle': 'duplex receptacle nickel',
'12 foot ceadar': '12 foot cedar',
'faux wood shade 100 jnches': 'faux wood shade 100 inches',
'contracto0r hose': 'contractor hose',
'lspacers for toilet bowl': 'spacers for toilet bowl',
'aftificial prelit christmas trees': 'artificial prelit christmas trees',
'paint colores by rooms': 'paint colors by rooms',
'warm whit led bulb': 'warm white led bulb',
'clamps for unistruct': 'clamps for unistrut',
'kitchen trviso price phister': 'kitchen treviso price pfister',
'10guage copper wire 3 stand': '10 gauge copper wire 3 stand',
'deep frezer with glass cover': 'deep freezer with glass cover',
'powder clorine shock treatment': 'powder chlorine shock treatment',
'galvanaized can': 'galvanized can',
'prebent aluminum facia': 'prebent aluminum fascia',
'vinyl scrapper for jack hammer': 'vinyl scraper for jack hammer',
'dwaft outside plants': 'dwarf outside plants',
'tilebath walls small': 'tile bath walls small',
'2 ton aircondition': '2 ton air conditioner',
'martha stewart metalic paint gallon': 'martha stewart metallic paint gallon',
'schilage electronic deadbolts locks': 'schlage electronic deadbolts locks',
'60x65shower doors': '60x65 shower doors',
'tile slide cuter': 'tile slide cutter',
'eagle peak hoickory': 'eagle peak hickory',
'gas black range worldpool': 'gas black range whirlpool',
'trigger makita skillsaw': 'trigger makita skil saw',
'hardi lap hanger': 'hardie lap hanger',
'master flow insolated duct wrap': 'master flow insulated duct wrap',
'replacment stove knobs': 'replacement stove knobs',
'outdoor alrm': 'outdoor alarm',
'wireless outdoor thermom': 'wireless outdoor thermometer',
'faun paint': 'fawn paint',
'wireless security caamera': 'wireless security camera',
'fiet electric led gu10': 'feit electric led gu10',
'stair unners': 'stair runners',
'stainstess steel spray paint': 'stainless steel spray paint',
'mount blanv': 'mont blanc',
'riobi power tool combo': 'ryobi power tool combo',
'24 sydey collection': '24 sydney collection',
'air compresser': 'air compressor',
'no tresspassing signs': 'no trespassing signs',
'flexable 6 inch': 'flexible 6 inch',
'wall beveled framelessmirror': 'wall beveled frameless mirror',
'slidein range bisque': 'slide in range bisque',
'router templit kits letters': 'router template kits letters',
'american sandard 1660.225,': 'american standard 1660.225,',
'onyx sand porcelian': 'onyx sand porcelain',
'watherproof electrical boxes': 'weatherproof electrical boxes',
'carpet remmnant': 'carpet remnant',
'8\' sigle wall gal pipe': '8\' single wall galv pipe',
'byfold hinges': 'bi fold hinges',
'terra cota quarry stones': 'terracotta quarry stones',
'rustolem appliance touch up paint': 'rustoleum appliance touch up paint',
'rain nickle': 'rain nickel',
'whirlpool light bulb part 8206232': 'whirlpool light bulb part 8206232a',
'Vigaro fall fertilizer': 'Vigoro fall fertilizer',
'pneumatic cynlinder': 'pneumatic cylinder',
'20 ft electical romex': '20 ft electrical romex',
'medicine cabinets recessable black': 'medicine cabinets recessed black',
'krass 30 inch kitchen sink': 'kraus 30 inch kitchen sink',
'stainless steel grat': 'stainless steel grate',
'suncort 8\' duct fans': 'suncourt 8\' duct fans',
'nutmag mirrors': 'nutmeg mirrors',
'clawfoot tub faucit kit': 'clawfoot tub faucet kit',
'protective pper': 'protective paper',
'touchless dishwashing kintchen dispenser': 'touchless dishwashing kitchen dispenser',
'air temperture contorl valve': 'air temperature control valve',
'melinger hand truck wheals': 'melinger hand truck wheels',
'watt premiere water filters': 'watt premier water filters',
'weed killer spray contaner': 'weed killer spray container',
'18in hardware coth': '18in hardware cloth',
'ac window supprt': 'ac window support',
'vegetable plannter': 'vegetable planter',
'soap punp': 'soap pump',
'wall paper murial glue': 'wallpaper mural glue',
'vertical binds hardware': 'vertical blinds hardware',
'rubbermaid verital sheds': 'rubbermaid vertical sheds',
'1/2 in. extension joint': '1/2 in. expansion joint',
'curtin rods': 'curtain rods',
'edge glued rounda': 'edge glued rounds',
'plywood edge taope': 'plywood edge tape',
'36\' copoktop': '36\' cooktop',
'curtains non black out': 'curtains not blackout',
'honolule center drain': 'honolulu center drain',
'toliet snake': 'toilet snake',
'black and deckerbattery pack': 'black and decker battery pack',
'beer and wine combination frigerator': 'beer and wine combination refrigerator',
'mess wire fencing': 'mesh wire fencing',
'ecosmart 90 led daylight br30': 'ecosmart 90w led daylight br30',
'miniture bulbs 2 pin': 'miniature bulbs 2 pin',
'dishwasher water connection vlave': 'dishwasher water connection valve',
'ant bait raps': 'ant bait traps',
'coragated aluimin special order': 'corrugated aluminum special order',
'carpot canopy 10x20': 'carport canopy 10x20',
'10 foot white ethjernet cable': '10 foot white ethernet cable',
'polished chrome cbinet pulls': 'polished chrome cabinet pulls',
'cooper tubing': 'copper tubing',
'dwarf pereniel plants': 'dwarf perennial plants',
'lampost motion detector': 'lamp post motion detector',
'3 gutter oulets': '3 gutter outlets',
'kohler shower ddoors for tubs in nickel': 'kohler shower doors for tubs in nickel',
'zep liquid air fresher': 'zep liquid air freshener',
'rewiring built in oven': 'wiring built in oven',
'10/4 SJ CABLE': '10/4 SJO CABLE',
'tempered glass wndow': 'tempered glass window',
'mataeials needed for paver patio': 'materials needed for paver patio',
'tankles water heater gas outdoor': 'tankless water heater gas outdoor',
'ypermethrin': 'cypermethrin',
'kwikset halifax door leaver': 'kwikset halifax door lever',
'ryobi coordless 18v starter kit': 'ryobi cordless 18v starter kit',
'habor gazeebo': 'harbor gazebo',
'electric barbeque grills': 'electric barbecue grills',
'rasin raised garden bed': 'resin raised garden bed',
'barbeque grills big and easy': 'barbecue grills big and easy',
'floor warming matt': 'floor warming mat',
'machette': 'machete',
'cool tube lgts': 'cool tube lights',
'universal faucet connect': 'universal faucet connector',
'daltile hexgon': 'daltile hexagon',
'hurracaine brackets': 'hurricane brackets',
'martha stewart curtiins': 'martha stewart curtains',
'byfold doors': 'bifold doors',
'2 tier adjustable cabinet orgainzer': '2 tier adjustable cabinet organizer',
'7w compact flourescent bulb': '7w compact fluorescent bulb',
'singel wall stove pipe': 'single wall stove pipe',
'wheeld trimmer': 'wheeled trimmer',
'boader rocks': 'border rocks',
'crown moldinf jig': 'crown molding jig',
'small refridgerators': 'small refrigerators',
'blind courner': 'blind corner',
'asphault gap repair': 'asphalt gap repair',
'no. 30 ridgid cutting wheel': 'no. 30 rigid cutting wheel',
'battery cable conector': 'battery cable connector',
'coranado baseboard pine': 'coronado baseboard pine',
'cerrowire 18 gauge': 'cerro wire 18 gauge',
'universal exstention cord': 'universal extension cord',
'wirlpool counterdepth side by side refrigrator': 'whirlpool counter depth side by side refrigerator',
'cedar bahr 502 stain': 'cedar behr 502 stain',
'small tracerse rods': 'small traverse rods',
'yelloe safet tape': 'yellow safety tape',
'elctric heating lamps': 'electric heating lamps',
't8 flourescent bulbs': 't8 fluorescent bulbs',
'u bents fluorescent': 'u bend fluorescent',
'pergo lamate flooring': 'pergo laminate flooring',
'sweenys mole and gopher repelant': 'sweeney\'s mole and gopher repellent',
'rg6 connecto': 'rg6 connector',
'ge electriv burners': 'ge electric burners',
'replacement part for koehler toilet kb3': 'replacement part for kohler toilet kb3',
'furiture paint, stain and varnish': 'furniture paint, stain and varnish',
'mission prarie camber top slab': 'mission prairie camber top slab',
'mirr edge': 'mirror edge',
'orbital sanding disck': 'orbital sanding disc',
'quickrete 50 lb mix': 'quikrete 50 lb mix',
'high efficiency dust baf rigid vac': 'high efficiency dust bag ridgid vac',
'liminate flooring cleaning': 'laminate flooring cleaning',
'gtxworks trimmer spools': 'gt worx trimmer spools',
'securty bar mounts': 'security bar mounts',
'fall supression kit': 'fall suppression kit',
'weatherproof boom box': 'waterproof boombox',
'geld wen 2500 96 x 36': 'jeld wen 2500 96 x 36',
'enfineered floors drifting sand': 'engineered floors drifting sand',
'well pump back presure valve': 'well pump back pressure valve',
'heavy duty shevlving': 'heavy duty shelving',
'mmodel': 'model',
'frigidare stainless refrig': 'frigidaire stainless refrig',
'rusteoulm spray paint': 'rustoleum spray paint',
't5 high output ligh': 't5 high output light',
'sandpap': 'sandpaper',
'cerowire 12 gauge': 'cerro wire 12 gauge',
'what rings for toitets': 'what rings for toilets',
'infrared theomomter': 'infrared thermometer',
'1x6 toungh groove': '1x6 tongue groove',
'v ceader board': 'v cedar board',
'sodpstone': 'soapstone',
'10 yeaer smoke detectors/carbon monoxide combo': '10 year smoke detectors/carbon monoxide combo',
'kkohler toilet seat': 'kohler toilet seat',
'pink toliet seat elongated': 'pink toilet seat elongated',
'flexiblr bit': 'flexible bit',
'coleman instasmart grill': 'coleman instastart grill',
'exide battery 75,car battrey': 'exide battery 75,car battery',
'black cherry stainer': 'black cherry stain',
'1x4 pre primed mfd trim': '1 x 4 pre primed mdf trim',
'mnt movr combo shovel': 'mnt move combo shovel',
'100 watt candlabra bulb': '100 watt candelabra bulb',
'samsung black stainles': 'samsung black stainless',
'dewalt jig saw blad': 'dewalt jig saw blade',
'alluminum downspout connector': 'aluminum downspout connector',
'alltyp of fences': 'all type of fences',
'clow hammer 16 0z': 'claw hammer 16 0z',
'tomatoe plants': 'tomato plants',
'white lacquer wall selves': 'white lacquer wall shelves',
'pressure guage': 'pressure gauge',
'slid pad': 'slide pad',
'female hose connectore': 'female hose connector',
'solor lamp outside': 'solar lamp outside',
'daltile urban camoflogue': 'daltile urban camouflage',
'deocorative screws for hanging pictures': 'decorative screws for hanging pictures',
'kitchen composie double sinks': 'kitchen composite double sinks',
'whitesilicone': 'white silicone',
'self contained recepticles': 'self contained receptacles',
'brass handel door': 'brass handle door',
'charley brown christmas trees': 'charlie brown christmas trees',
'carbon fiber vinel': 'carbon fiber vinyl',
'phillips fluorescent 40': 'philips fluorescent 40',
'36 inxh return air grill': '36 inch return air grill',
'garden pond pump impellor': 'garden pond pump impeller',
'vinal flooring 25 year warranty': 'vinyl flooring 25 year warranty',
'mulcing blades for troy built': 'mulching blades for troy bilt',
'5 1/4 deckboard': '5 1/4 deck board',
'plaste dip': 'plasti dip',
'cemnt pads for makita bo5030': 'cement pads for makita bo5030',
'ge beverage refriderator': 'ge beverage refrigerator',
'bathroom plummbing': 'bathroom plumbing',
'gas pire column': 'gas fire column',
'confrence': 'conference',
'clock cuitain rod wood': 'clock curtain rod wood',
'decrotive outdoor lighting': 'decorative outdoor lighting',
'ballast for single t12 fluorscent bulb': 'ballast for single t12 fluorescent bulb',
'workstar cordless and recharable work light': 'workstar cordless and rechargeable work light',
'light bulb 250 cfl': 'light bulb 250w cfl',
'rubber gromet': 'rubber grommet',
'spray metallicpaint': 'spray metallic paint',
'paint quart zise': 'paint quart size',
'blinds for portch': 'blinds for porch',
'sable browj 95': 'sable brown 95',
'1/2 conduet': '1/2 conduit',
'wooden curton rod brackets': 'wooden curtain rod brackets',
'corbels and shelfs': 'corbels and shelves',
'seimens typ qt breaker': 'siemens type qt breaker',
'steel builco': 'steel bilco',
'metal joinst': 'metal joist',
'externol patio doors': 'external patio doors',
'FENSE LIGHTING': 'FENCE LIGHTING',
'oil bronzed wine glass rack': 'oiled bronze wine glass rack',
'klien lether pouch': 'klein leather pouch',
'shark rocket filtes': 'shark rocket filters',
'4x7 ruggs': '4 x 7 rugs',
'24 elicreic stove': '24 electric stove',
'grill hasmilton': 'grill hamilton',
'air vents for plumping': 'air vents for plumbing',
'gazebo with shelfs': 'gazebo with shelves',
'expanding plastic sleeves for scews': 'expanding plastic sleeves for screws',
'oli rubbed bronze drain': 'oil rubbed bronze drain',
'clothsline rope': 'clothesline rope',
'stove gas replacement knops': 'stove gas replacement knobs',
'rechargale batteries for solar lights': 'rechargeable batteries for solar lights',
'standard artificial grasa synthetic lawn turf': 'standard artificial grass synthetic lawn turf',
'new deck for rtz 50': 'new deck for rzt 50',
'wire shelv liner': 'wire shelf liner',
'wood paint with primerin blue': 'wood paint with primer in blue',
'fabreeze': 'febreze',
'ceilng fan': 'ceiling fan',
'manuel for 425 - 1649': 'manual for 425 - 1649',
'14 in dimond circular saw blade': '14 in diamond circular saw blade',
'berhr solid 213 deep': 'behr solid 213 deep',
'driveway m arkers': 'driveway markers',
'commercil threshold': 'commercial threshold',
'multinozzle spray painting': 'multi nozzle spray painting',
'shower nitch': 'shower niche',
'1/2x1/2 quater round': '1/2 x 1/2 quarter round',
'Insulted work gloves': 'Insulated work gloves',
'5000 lumnes': '5000 lumens',
'magnets for gromets': 'magnets for grommets',
'toro springkler': 'toro sprinkler',
'motion sensoring black decorative lamp': 'motion sensing black decorative lamp',
'proclean concentrated drain cleaner': 'pro clean concentrated drain cleaner',
'feather river doorsth sidelights': 'feather river doors sidelights',
'ridgid powerwasher parts': 'ridgid power washer parts',
'skill pressure sander': 'skil pressure sander',
'outdoor vertical sheda': 'outdoor vertical sheds',
'brick web thin brick flats': 'brickweb thin brick flats',
'airguard undelayment': 'airguard underlayment',
'toyotaa': 'toyota',
'round rug for kitch': 'round rug for kitchen',
'round one piece tiolet': 'round one piece toilet',
'sppeed square': 'speed square',
'adirondak chair': 'adirondack chair',
'hickory hadwre touch of spring': 'hickory hardware touch of spring',
'garge door handle': 'garage door handle',
'whiteled tree': 'white led tree',
'airosol epoxy paint': 'aerosol epoxy paint',
'ice ring machine': 'ice rink machine',
'deep expresso walnut/new ellenton': 'deep espresso walnut/new ellenton',
'interior walls bieges brown': 'interior walls beige brown',
'pet disinfectent': 'pet disinfectant',
'altra furniture parsons credenza desk with drawer and bookc': 'altra furniture parsons credenza desk with drawer and books',
'gorilla gold cpvc gluetm': 'gorilla gold cpvc glue',
'aligator clips': 'alligator clips',
'irrigation pipe connectoer': 'irrigation pipe connector',
'citronella fire pot fue': 'citronella fire pot fuel',
'garden spreklers heads': 'garden sprinklers heads',
'light swith insulation': 'light switch insulation',
'dual lock 3m veclro': 'dual lock 3m velcro',
'water proof mc connecter': 'waterproof dc connector',
'snow blowerr scraper blade': 'snowblower scraper blade',
'vesel tub': 'vessel tub',
'carrrs': 'careers',
'odl 6\' x 6\'retractable screens': 'odl 6\' x 6 retractable screens',
'outdoord storage locker': 'outdoor storage locker',
'standing seam roof panals': 'standing seam roof panels',
'phillips 65w 2 pack': 'philips 65w 2 pack',
'2 squares double 5 vinly siding': '2 squares double 5 vinyl siding',
'fabric steam cleamer': 'fabric steam cleaner',
'scikkens stain': 'sikkens stain',
'polyethylne cap': 'polyethylene cap',
'decorative interor glass doors': 'decorative interior glass doors',
'vanity top for two vessell': 'vanity top for two vessel',
'giant bird of paridise': 'giant bird of paradise',
'almeda hickory': 'alameda hickory',
'cabinet ba rpulls in bronze': 'cabinet bar pulls in bronze',
'l screwa': 'l screws',
'johan deer 0 turns': 'john deere 0 turns',
'milwankee 7 pc set': 'milwaukee 7 pc set',
'faucet pl801l 18 guage': 'faucet pl801l 18 gauge',
'12 light bronze chandilier': '12 light bronze chandelier',
'flourecent light plastic covers': 'fluorescent light plastic covers',
'roof pannel foam': 'roof panel foam',
'under cabinet lighting ro-hs': 'under cabinet lighting rohs',
'round lshower kit': 'round shower kit',
'concreet enchors': 'concrete anchors',
'woodwen pallet': 'wooden pallet',
'shigles': 'shingles',
'comercial plank doors': 'commercial plank doors',
'stainless steel kithen faucet with soap dispenser': 'stainless steel kitchen faucet with soap dispenser',
'm4 50 srcew': 'm4 50 screw',
'splitbolt connector': 'split bolt connector',
'charming 18 roll': 'charmin 18 roll',
'table glass oatu': 'table glass oahu',
'kohlor flush for toilet tank 4421': 'kohler flush for toilet tank 4421',
'outdoor pendant lioghting': 'outdoor pendant lighting',
'24 inflex gas line': '24 in flex gas line',
'lawn mower rechargeable batterys': 'lawn mower rechargeable batteries',
'merola metalic tile': 'merola metallic tile',
'above ground pool vaccume': 'above ground pool vacuum',
'bosss water softner': 'boss water softener',
'moen one handel kitchen faucet repair parts': 'moen one handle kitchen faucet repair parts',
'sanding machinehardwood floors': 'sanding machine hardwood floors',
'super patch driverway sealler': 'super patch driveway sealer',
'sschlueter shower system': 'schluter shower system',
'offset flang': 'offset flange',
'aluminium tube rectangle': 'aluminium tube rectangular',
'legrad keystone cat5e jack': 'legrand keystone cat5e jack',
'yellow jacket extenison cord': 'yellow jacket extension cord',
'Habachi': 'Hibachi',
'mini pendant braket': 'mini pendant bracket',
'hose to presure washer': 'hose to pressure washer',
'gliddon speed wall': 'glidden speed wall',
'new age produucts': 'new age products',
'archor tub and shower faucet trim': 'archer tub and shower faucet trim',
'space saving stoage': 'space saving storage',
'vinyl flooring that clicks togther': 'vinyl flooring that clicks together',
'gladden smooth stone': 'glidden smooth stone',
'knape vogt baseket': 'knape vogt basket',
'ul liquidthight 25': 'ul liquidtight 25',
'white glossy furniture pain': 'white gloss furniture paint',
'square bannister': 'square banister',
'greenh wall paint': 'green wall paint',
'tile medalions for the floor or wall': 'tile medallions for the floor or wall',
'milwalke brewers garden flag': 'milwaukee brewers garden flag',
'versatiube': 'versatube',
'kenocen can nozzle': 'kenken can nozzle',
'mosaic esterior': 'mosaic exterior',
'winow wheels': 'window wheels',
'stud popers': 'stud poppers',
'trane 2.5 toon 13 seer heat pump': 'trane 2.5 ton 13 seer heat pump',
'ultra vue quick screeen': 'ultra vue quick screen',
'watterproof cleated boots': 'waterproof cleated boots',
'hdx pneumaitic paint': 'hdx pneumatic paint',
'biscue dishwashers': 'bisque dishwashers',
'sunbrella sipcovers': 'sunbrella slipcovers',
'miracle grow water absorbing crystals': 'miracle gro water absorbing crystals',
'disposal rim and stopperkohler': 'disposal rim and stopper kohler',
'long brakets': 'long brackets',
'freplace gloves': 'fireplace gloves',
'ridgid power drve pipe threadrs': 'ridgid power drive pipe threader',
'12x24 shefl': '12x24 shelf',
'1x6 prime molding': '1x6 primed molding',
'countertop soap dispensor': 'countertop soap dispenser',
'bushbutton for door bell': 'push button for doorbell',
'cauk saver': 'caulk saver',
'rubber stipper': 'rubber stopper',
'16 inch flourescent': '16 inch fluorescent',
'pendents amber': 'pendants amber',
'newtone broan round 751': 'nutone broan round 751',
'danze shower vlve': 'danze shower valve',
'wooden track drawer replacment': 'wooden track drawer replacement',
'single granit bathroom vanity': 'single granite bathroom vanity',
'oval steele tubs': 'oval steel tubs',
'liquid weed and feeed': 'liquid weed and feed',
'outodoor oatoman': 'outdoor ottoman',
'nutone vaccum wall plate': 'nutone vacuum wall plate',
'collor clamp': 'collar clamp',
'pure air ultra filtration syste,m': 'pure air ultra filtration system',
'llantana': 'lantana',
'white melimine cabinet': 'white melamine cabinet',
'2-handlet diverter repair kit': '2-handle diverter repair kit',
'mosiac lamps': 'mosaic lamps',
'exterior pipeinsulation': 'exterior pipe insulation',
'warm espresso bamboo quarteround': 'warm espresso bamboo quarter round',
'hardwood medialons': 'hardwood medallions',
'tub/hand shoer diverter with trim': 'tub/hand shower diverter with trim',
'locite 2 plus 1': 'loctite 2 plus 1',
'kwiksest door handle delta': 'kwikset door handle delta',
'frame nail hitschi': 'frame nailer hitachi',
'30 mirrow medicine cabinet': '30 mirrored medicine cabinet',
'pecane trees': 'pecan trees',
'lifeproof carpet sample lower trasure': 'lifeproof carpet sample lower treasure',
'umbrell hole ring': 'umbrella hole ring',
'melmane wood': 'melamine wood',
'melomine accessories': 'melamine accessories',
'windows single hang': 'windows single hung',
'portabe bar': 'portable bar',
'crystable table set lamps': 'crystal table set lamps',
'schlage handleset bermingham': 'schlage handleset birmingham',
'lp gas converion kit': 'lp gas conversion kit',
'quart exterior semi glass enamel': 'quart exterior semi gloss enamel',
'woodrx ultra natural': 'wood rx ultra natural',
'brushed barringnton': 'brushed barrington',
'leather lgue': 'leather glue',
'moen bronze low arch faucet': 'moen bronze low arc faucet',
'18 inch linen closit': '18 inch linen closet',
'bear paint green myth': 'behr paint green myth',
'solar light rechargable batteries': 'solar light rechargeable batteries',
'solar powered emergancy unit': 'solar powered emergency unit',
'kohler 3 handle shower reapair kit': 'kohler 3 handle shower repair kit',
'thermadore black cast kitchen sink': 'thermador black cast kitchen sink',
'dental shelf door': 'dentil shelf door',
'seed starting mixx': 'seed starting mix',
'rubberaid dust mop': 'rubbermaid dust mop',
'phillips bugle-head finethread sharp': 'phillips bugle-head fine thread sharp',
'black laminate shelfing': 'black laminate shelving',
'ice maker cylinoid ge': 'ice maker solenoid ge',
'home decorators mantle green': 'home decorators mantel green',
'perrenial white daisy like': 'perennial white daisy like',
'chamber-top halifax glass dooor': 'chamber-top halifax glass door',
'depp well socket set': 'deep well socket set',
'hanger racc vertical': 'hanger rack vertical',
'tool package with pilers,needlenose': 'tool package with pliers,needlenose',
'fome core board': 'foam core board',
'colaroo outdoor shades corded': 'coolaroo outdoor shades corded',
'decoator chain': 'decorator chain',
'rust oleum dark hunter green spray enamel paint': 'rustoleum dark hunter green spray enamel paint',
'lights and siloutte': 'lights and silhouette',
'real live orchred plants': 'real live orchid plants',
'2ftx3ft industrail rbber mat': '2ftx3ft industrial rubber mat',
'fernace vent shut off': 'furnace vent shut off',
'cedar wood balisters': 'cedar wood balusters',
'gliden premium semi gloss quart': 'glidden premium semi gloss quart',
'mosaic tile costal mist': 'mosaic tile coastal mist',
'toilet lever kphler brass': 'toilet lever kohler brass',
'front doors - poinye zinc': 'front doors - pointe zinc',
'matte bailey mohogany': 'matte bailey mahogany',
'wesleyand': 'wesleyan',
'plasic diffuser': 'plastic diffuser',
'cover kage for pet': 'cover page for pet',
'network agapter': 'network adapter',
'whitehaus bathroom sinl': 'whitehaus bathroom sink',
'icey tech': 'icey tek',
'kaorik wine': 'kalorik wine',
'susbenders': 'suspenders',
'policarbonate case': 'polycarbonate case',
'shaw livng rugs model rac66': 'shaw living rugs model rac66',
'carpet in bassment': 'carpet in basement',
'bifold doorsfold plantation': 'bi fold doors fold plantation',
'handheld seed speader': 'handheld seed spreader',
'hot dipped galvinized coil nails': 'hot dipped galvanized coil nails',
'hand saw sharpner': 'hand saw sharpener',
'mattress foam protecter': 'mattress foam protector',
'n utdriver bit': 'nut driver bit',
'lattice wwod tone': 'lattice wood tone',
'our door receptacles': 'outdoor receptacles',
'great outdors': 'great outdoors',
'exterior string ligts': 'exterior string lights',
'dog ,cat,repellant': 'dog ,cat,repellent',
'20a wht nylon duple': '20a wht nylon duplex',
'fatmax leveler premier': 'fatmax level premier',
'ralph laren brown paints': 'ralph lauren brown paints',
'liquid bi fuels': 'liquid biofuels',
'scrubbin sponge': 'scrubbing sponge',
'ceramic tile tooth brush and soap holder': 'ceramic tile toothbrush and soap holder',
'cultured marbl;e shower walls': 'cultured marble shower walls',
'did recorder player': 'dvd recorder player',
'golith': 'goliath',
'black maytag french door refrigirator': 'black maytag french door refrigerator',
'stair nose santos maogani': 'stair nose santos mahogany',
'l tub fauctes': 'l tub faucets',
'eyebolt brass': 'eye bolt brass',
'terracotta exteriorpaint': 'terracotta exterior paint',
'manuel venting sky light': 'manual venting skylight',
'bathroom fan motion sencer': 'bathroom fan motion sensor',
'hard start capacitator': 'hard start capacitor',
'windows gazing bead': 'windows glazing bead',
'vanitiy top back splach': 'vanity top backsplash',
'large yellow screw inground anchors': 'large yellow screw in ground anchors',
'heavy duty polyurathane': 'heavy duty polyurethane',
'folfable stool': 'foldable stool',
'charlston south carolina': 'charleston south carolina',
'pine flooring, tang end grove': 'pine flooring, tongue and groove',
'starter fuil': 'starter fuel',
'granite colr group prices': 'granite color group prices',
'calanvreas': 'calaveras',
'golden krome spray': 'gold chrome spray',
'g e micewave': 'g e microwave',
'sheet meatal hole cutter': 'sheet metal hole cutter',
'zurn hot short stemcartridge': 'zurn hot short stem cartridge',
'outdoor picture ftames': 'outdoor picture frames',
'shower pad porceleain': 'shower pan porcelain',
'battery under counter lightening': 'battery under counter lighting',
'elictric door bail': 'electric door bell',
'barbeque insert': 'barbecue insert',
'barclay glass bathroom shelfs': 'barclay glass bathroom shelves',
'preserva wood caryon': 'preserva wood crayon',
'white grey floor tile mosiac': 'white grey floor tile mosaic',
'minwax wood puty': 'minwax wood putty',
'the govenore': 'the governor',
'diverter 5 in. tub spout with slip fit connection in chrom': 'diverter 5 in. tub spout with slip fit connection in chrome',
'vinyl plank blue slatr': 'vinyl plank blue slate',
'frameless shwoer panel': 'frameless shower panel',
'virtue usa huntshire': 'virtu usa huntshire',
'3.5 Hindge': '3.5 Hinge',
'round plastic tablrs': 'round plastic tables',
'paint storage contaiers': 'paint storage containers',
'centerset 2-handle weall': 'centerset 2-handle wall',
'wax ring with self taping bolts': 'wax ring with self tapping bolts',
'gama sonic winsor pier base': 'gama sonic windsor pier base',
'pilla windows': 'pella windows',
'dresser acessories': 'dresser accessories',
'duel compression 1/2 x 3/8 valve': 'dual compression 1/2 x 3/8 valve',
'american atanderd plebe 4086': 'american standard plebe 4086',
'dyson ball allergy vaccume': 'dyson ball allergy vacuum',
'low woltage relay': 'low voltage relay',
'hand steam cleanere': 'hand steam cleaner',
'eiectric concrte mixer': 'electric concrete mixer',
'pemco sill extender': 'pemko sill extender',
'silver branzing rods': 'silver brazing rods',
'sanding beltsr': 'sanding belts',
'dorr faceplates': 'door faceplates',
'stainless steel ball beating for hinges': 'stainless steel ball bearing for hinges',
'stabilty': 'stability',
'hose bibb replacement valve': 'hose bib replacement valve',
'long shower curtins': 'long shower curtains',
'crub rubber': 'crumb rubber',
'swivel saftey cover': 'swivel safety cover',
'makita oscilating saw': 'makita oscillating saw',
'whithaus faucet speckled brass': 'whitehaus faucet speckled brass',
'energy efficent skylight': 'energy efficient skylight',
'garden seed packs': 'garden seed packets',
'boshe double bevel sliding miter saw': 'bosch double bevel sliding miter saw',
'taylor test lit': 'taylor test kit',
'chargril grill': 'charbroil grill',
'over ran': 'overran',
'recipricating saw 15 amp': 'reciprocating saw 15 amp',
'mikita 18v 2.6 ah': 'makita 18v 2.6 ah',
'no burn spry': 'no burn spray',
'cuctis soil': 'cactus soil',
'brushed stainless cabin ate hardware': 'brushed stainless cabinet hardware',
'fork lift strps': 'forklift straps',
'electrian': 'electrician',
'doorbell chimes and transformr': 'doorbell chimes and transformer',
'faux diamondplate': 'faux diamond plate',
'milstead vintage maple engineered flooring': 'millstead vintage maple engineered flooring',
'ce tech coaxial cablenail in clips': 'ce tech coaxial cable nail in clips',
'bq heat distributipn plates': 'bbq heat distribution plates',
'metal lath stuko': 'metal lath stucco',
'cord less drill portcable': 'cordless drill porter cable',
'round bulb sting lights': 'round bulb string lights',
'lp coversion kit maytag dryer': 'lp conversion kit maytag dryer',
'chase lounger covers': 'chaise lounge covers',
'insl-x pure step': 'insl-x sure step',
'gerber knife tactiical': 'gerber knife tactical',
'deecals number': 'decals number',
'hampton bat 26\'. w tilt out hamper white': 'hampton bay 26\'. w tilt out hamper white',
'outdoor chritstmas light remote': 'outdoor christmas light remote',
'wood fuelpellets': 'wood fuel pellets',
'cpipe lamp': 'pipe lamp',
'wiemans stainless cleaner': 'weimans stainless cleaner',
'10 roll up outside blinds': '10 roll up outdoor blinds',
'wainscote': 'wainscot',
'heat resistant spicket': 'heat resistant spigot',
'garage shelve': 'garage shelf',
'shevles': 'shelves',
'storage shelfs': 'storage shelves',
'proipane': 'propane',
'ventless gas heters': 'ventless gas heaters',
'vinal fencing': 'vinyl fencing',
'toliet bowl': 'toilet bowl',
'toliet bowl wrench': 'toilet bowl wrench',
'fanc wire': 'fancy wire',
't post fence assesories': 't post fence accessories',
'telescooping ladder': 'telescoping ladder',
'spring haven brown all weather wicked': 'spring haven brown all weather wicker',
'36 exterior steele door': '36 exterior steel door',
'faucetskitchen': 'faucets kitchen',
'batt insulatiom': 'batt insulation',
'congolium': 'congoleum',
'vinal flooring': 'vinyl flooring',
'vynil floorring': 'vinyl flooring',
'clacier bay toliet': 'glacier bay toilet',
'GLAZER BAY TOILET': 'GLACIER BAY TOILET',
'norton hot water heater ingniter': 'norton hot water heater igniter',
'undercounter lighs': 'under counter lights',
'stainless refridgerator': 'stainless refrigerator',
'stainless steel refridgerator': 'stainless steel refrigerator',
'window ac manuel operation': 'window ac manual operation',
'rustolem': 'rustoleum',
'18v drill brushles': '18v drill brushless',
'dining sets outdo?': 'dining sets outdoor?',
'eat resistant epoxy': 'heat resistant epoxy',
'cordless drils': 'cordless drills',
'3 piece bfush set': '3 piece brush set',
'kitchen faucet installtion tools': 'kitchen faucet installation tools',
'Moen Kitchen sink fauset': 'Moen Kitchen sink faucet',
'plaqstic bucket': 'plastic bucket',
'3m winow film': '3m window film',
'water softner': 'water softener',
'flourescent light bulp': 'fluorescent light bulb',
'closermaid cabinet': 'closetmaid cabinet',
'raised panel extirior doors': 'raised panel exterior doors',
'blcktop repair kit': 'blacktop repair kit',
'peal and stick flashning': 'peel and stick flashing',
'marshaltown 6 inch': 'marshalltown 6 inch',
'vynel wall tiles': 'vinyl wall tiles',
'presusre treated post': 'pressure treated post',
'LAWN LEAF VACUM': 'LAWN LEAF VACUUM',
'space heatres': 'space heaters',
'alumium fence 6 ft 6ft': 'aluminum fence 6 ft 6 ft',
'bathroom sinks kholer': 'bathroom sinks kohler',
'pedistal': 'pedestal',
'clear eppoxy': 'clear epoxy',
'wood fir plank flooring': 'wood for plank flooring',
'quickcrete waterproof cement': 'quikrete waterproof cement',
'rood rake': 'roof rake',
'propane gas tank meater': 'propane gas tank meter',
'ac cooling fin straightenrs': 'ac cooling fin straightener',
'slidng panel lock': 'sliding panel lock',
'closet maiid cabinets': 'closet maid cabinets',
'ridge power tools combo packs': 'ridgid power tools combo packs',
'backsplash tiiles': 'backsplash tiles',
'cabinet knobsd': 'cabinet knobs',
'cabnet knobs': 'cabinet knobs',
'dealt air compressor parts': 'dewalt air compressor parts',
'spgot': 'spigot',
'paver bricks scolloped': 'paver bricks scalloped',
'CHASE LOUNGE': 'CHAISE LOUNGE',
'layndry tu': 'laundry tu',
'submeribale pedistal sump pump': 'submersible pedestal sump pump',
'celling fans': 'ceiling fans',
'wall sconse': 'wall sconce',
'93 inch widecellular shades': '93 inch wide cellular shades',
'post white ligth': 'post white light',
'palmero brushed nickel ceiling fan': 'palermo brushed nickel ceiling fan',
'aromaticeatern red cedar planking': 'aromatic eastern red cedar planking',
'black and decker hobby crafter': 'black and decker hobbycrafter',
'front load fridaire': 'front load frigidaire',
'pedestial washer': 'pedestal washer',
'whilrpool front loader washer': 'whirlpool front loader washer',
'extrior louvored wood door 30x80': 'exterior louvered wood door 30x80',
'interior doorser': 'interior doors',
'dill battery 12v model g0805': 'drill battery 12v model g0805',
'10 stair lader': '10 stair ladder',
'milwakee 1/2 impact cordless': 'milwaukee 1/2 impact cordless',
'kolher': 'kohler',
'floor slealer': 'floor sealer',
'high traffic floor polurethane paint': 'high traffic floor polyurethane paint',
'sawzall blades miluakee': 'sawzall blades milwaukee',
'vaccum hose': 'vacuum hose',
'vynal repalcement windows': 'vinyl replacement windows',
'vinil for flors': 'vinyl for floors',
'led withe': 'led white',
'squar flushmount lights': 'square flush mount lights',
'huskey 18': 'husky 18',
'remove oder from kerosine': 'remove odor from kerosene',
'25ft huskt tape': '25 ft husky tape',
'plastic corrougeted roofing': 'plastic corrugated roofing',
'kholerhighland white toilet': 'kohler highline white toilet',
'toilet seat for briggs toliet': 'toilet seat for briggs toilet',
'steel shelve': 'steel shelf',
'dig irritation drip': 'dig irrigation drip',
'kohler pedastal sink': 'kohler pedestal sink',
'high loss natural jabota': 'high loss natural jatoba',
'Huskavarna': 'Husqvarna',
'power cordclass 2 power model xy_2900600_u': 'power cord class 2 power model xy_2900600_u',
'treaated plywood': 'treated plywood',
'air condtioning wall unit': 'air conditioning wall unit',
'wall air conditioneer': 'wall air conditioner',
'window ac insaller': 'window ac installer',
'sensor porch ligts': 'sensor porch lights',
'miricile applet or and tray': 'miracle applet or and tray',
'paint refil tray': 'paint refill tray',
'door knobs exteria': 'door knobs exterior',
'exhaustless portable airconditioner': 'exhaustless portable air conditioner',
'portable aircondition': 'portable air conditioner',
'oscilliating too': 'oscillating tool',
'PYWOOD': 'PLYWOOD',
'rigid nailer': 'ridgid nailer',
'bankoft toilet biscuit': 'bancroft toilet biscuit',
'mown pull down faucet': 'moen pull down faucet',
'lo gas water heater': 'low gas water heater',
'richman water heater': 'richmond water heater',
'tall toliet': 'tall toilet',
'ridding mower covers': 'riding mower covers',
'hole angel jig': 'hole angle jig',
'10 deep kitchen sink porcelin': '10 deep kitchen sink porcelain',
'plastic tiles pcv': 'plastic tiles pvc',
'vinyl sheeti': 'vinyl sheet',
'samsungelectric ranges': 'samsung electric ranges',
'frameless shoer doors': 'frameless shower doors',
'webber charcoal grill': 'weber charcoal grill',
'kerosine heaters': 'kerosene heaters',
'kersone heaters': 'kerosene heaters',
'propain heater': 'propane heater',
'heating elements for dyer whirlpool': 'heating elements for dryer whirlpool',
'safty glasses': 'safety glasses',
'eletric stove': 'electric stove',
'Schecule 40 Pipe': 'Schedule 40 Pipe',
'bayonett saw blades': 'bayonet saw blades',
'sconses': 'sconces',
'52\' pinacle ceiling fan': '52\' pinnacle ceiling fan',
'atic fans with lubers': 'attic fans with louvers',
'cealing fans': 'ceiling fans',
'hampton bay out door celing fan': 'hampton bay outdoor ceiling fan',
'out door celing fan': 'outdoor ceiling fan',
'kitchen exaust fan': 'kitchen exhaust fan',
'Cimmaron': 'Cimarron',
'fridgedaire': 'frigidaire',
'frigidaire washer door striker/catch': 'frigidaire washer door striker/latch',
'lawn mover wrench': 'lawn mower wrench',
'bmboo lattecie': 'bamboo lattice',
'1 handle tub and shower faucet shower and tub vlaves': '1 handle tub and shower faucet shower and tub valves',
'hansgroph faucets bathroom': 'hansgrohe faucets bathroom',
'led light bulbsbulbs': 'led light bulbs bulbs',
'landscape srone': 'landscape stone',
'braid nailer combo kit': 'brad nailer combo kit',
'doors for mobilhomes': 'doors for mobile homes',
'smaller closet lights': 'small closet lights',
'traficmaster': 'trafficmaster',
'hardi board smooth': 'hardie board smooth',
'wainscoating': 'wainscoting',
'galvanisedround fire pit ring': 'galvanized round fire pit ring',
'electrichot water heaters residential': 'electric hot water heaters residential',
'garage shelf unjit': 'garage shelf unit',
'stone baxksplash': 'stone backsplash',
'pendent cealing fixture': 'pendant ceiling fixture',
'undercabinet ligghts': 'under cabinet lights',
'martha stewartcabinet pull': 'martha stewart cabinet pull',
'4 fluorescant fixture covers': '4 fluorescent fixture covers',
'exterior vanyl french door': 'exterior vinyl french door',
'adheasive': 'adhesive',
'lineulium floor': 'linoleum floor',
'plexiglass selves': 'plexiglass shelves',
'Allure mellowood flooring': 'Allure mellow wood flooring',
'allure tile sedon?': 'allure tile sedona?',
'allure vinyl tilecordoba': 'allure vinyl tile cordoba',
'wood veener facing for kitchen cabinets': 'wood veneer facing for kitchen cabinets',
'painters plastice': 'painters plastic',
'granitne sealer': 'granite sealer',
'55 inch cultured marble vanity tope': '55 inch cultured marble vanity top',
'mirros': 'mirrors',
'garge floor paint': 'garage floor paint',
'weather indoor and outpoor temp': 'weather indoor and outdoor temp',
'ryobi blower with batery': 'ryobi blower with battery',
'powerwasher hose': 'power washer hose',
'mikita 9.5 volt drill': 'makita 9.5 volt drill',
'vinal fence straps': 'vinyl fence straps',
'black chandelier wjth black shades': 'black chandelier with black shades',
'medecine cabinet': 'medicine cabinet',
'medicient cabinet': 'medicine cabinet',
'serface mount medicine cabinets': 'surface mount medicine cabinets',
'husqvarna presure washer': 'husqvarna pressure washer',
'back yard weather forecasteer': 'backyard weather forecaster',
'chain link fenceing': 'chain link fencing',
'jogsaw tool': 'jigsaw tool',
'lg ruff wall instalation': 'lg ruff wall installation',
'pcv pipe sement': 'pvc pipe cement',
'hardi trim': 'hardietrim',
'vynal siding insol': 'vinyl siding insol',
'cheapete gas 40 gallon hot water heater': 'cheapest gas 40 gallon hot water heater',
'powervent water heater': 'power vent water heater',
'exterieur door 32 inch': 'exterior door 32 inch',
'vynal floor matting': 'vinyl floor matting',
'door knobsw': 'door knobs',
'black decke weed eaters': 'black decker weed eaters',
'lectric string trimmer cst1200r': 'electric string trimmer cst1200r',
'1.4 mircowave over the stove': '1.4 microwave over the stove',
'stove excaust fan': 'stove exhaust fan',
'mobile home extior doors': 'mobile home exterior doors',
'wood lathesw': 'wood lathes',
'anderson replacement double hung window 34.5x36.5': 'andersen replacement double hung window 34.5x 36.5',
'contrcator baseboard': 'contractor baseboard',
'moehn kitchen facet 87211srssd': 'moen kitchen faucet 87211srs',
'repare kit for 2-handle side sprayer kitchen faucet': 'repair kit for 2-handle side sprayer kitchen faucet',
'ecco friendly garden hose': 'eco friendly garden hose',
'flex gardn hose': 'flex garden hose',
'garden host 50': 'garden hose 50',
'bathroon lighting': 'bathroom lighting',
'lanscape timber': 'landscape timber',
'bathroom valnity lights': 'bathroom vanity lights',
'gas pressure regular': 'gas pressure regulator',
'ashely 48 in electric chi': 'ashley 48 in electric chi',
'2x6 treted 8ft long': '2x6 treated 8ft long',
'wheel borrow': 'wheelbarrow',
'whellbarrow': 'wheelbarrow',
'scement bags': 'cement bags',
'accordian door': 'accordion door',
'Electic Lawn Mowers': 'Electric Lawn Mowers',
'hampton bay cabinetscornor cabinetupper': 'hampton bay cabinets corner cabinet upper',
'electric pump for sprying': 'electric pump for spraying',
'front foor 2 siding': 'front door 2 siding',
'whirlpool lgas dryer': 'whirlpool gas dryer',
'pressure treated lumber spaint': 'pressure treated lumber paint',
'rhee. 40 gallon water heaters': 'rheem. 40 gallon water heaters',
'8x96 white decrotive shelf': '8x96 white decorative shelf',
'bathroom pendastal': 'bathroom pedestal',
'r25/r30 faced insullation': 'r25/r30 faced insulation',
'heavy dutty letter support': 'heavy duty letter support',
'ceder decking': 'cedar decking',
'negitave air machine': 'negative air machine',
'outdoor maouse traps': 'outdoor mouse traps',
'storeage shed': 'storage shed',
'car canoply': 'car canopy',
'commerical tile': 'commercial tile',
'1 1/2 colated rock screws': '1 1/2 collated rock screws',
'sheeet rock mud': 'sheetrock mud',
'counterdepth fridge': 'counter depth fridge',
'maytag refregirator': 'maytag refrigerator',
'whirlpool french door frig 30 wide': 'whirlpool french door fridge 30 wide',
'wirlpool 30 wide french door': 'whirlpool 30 wide french door',
'dleta shower faucet handles': 'delta shower faucet handles',
'38 grainte composit sink': '38 granite composite sink',
'blown in insulaation': 'blown in insulation',
'foam insulatino': 'foam insulation',
'doors interiorwith door jams': 'doors interior with door jams',
'residentialsteel door and frame': 'residential steel door and frame',
'wood swimg set kits': 'wood swing set kits',
'quickcrete resurfacer': 'quikrete resurfacer',
'2 inch srew cap': '2 inch screw cap',
'30 gar builtin ranges': '30 gas built in ranges',
'samsong stive': 'samsung stove',
'chissel': 'chisel',
'rigid compound miter saw': 'ridgid compound miter saw',
'rigid compound miter saw dust pouch': 'ridgid compound miter saw dust pouch',
'shampoo and lotion automatice dispenser': 'shampoo and lotion automatic dispenser',
'wall scone': 'wall sconce',
'rubber for refridgerators': 'rubber for refrigerators',
'water proofing shower membrame': 'waterproofing shower membrane',
'fridigdaire back gas range': 'frigidaire black gas range',
'cabrio dryder': 'cabrio dryer',
'whilrpool cabrio dryer': 'whirlpool cabrio dryer',
'light switcht sensor': 'light switch sensor',
'calutta marble laminate countertop': 'calcutta marble laminate countertop',
'vinylcorner boards 4 inch': 'vinyl corner boards 4 inch',
'plastix box': 'plastic box',
'scurity screen doors': 'security screen doors',
'nonadhesive vinyl flooring': 'non adhesive vinyl flooring',
'trafficmaster interloclk': 'trafficmaster interlock',
'anntenias': 'antennas',
'clothes dryer srand': 'clothes dryer stand',
'eletric water heater': 'electric water heater',
'sharkbike push to connect 3/4': 'sharkbite push to connect 3/4',
'fuel nozzle furnance': 'fuel nozzle furnace',
'ryobi one batery': 'ryobi one battery',
'5/8 floring plywood weatherproof': '5/8 flooring plywood weatherproof',
'mitter saw manual': 'miter saw manual',
'selenoid for dryer': 'solenoid for dryer',
'presure coated wood': 'pressure coated wood',
'composote lumber': 'composite lumber',
'14 awgsoilid wire': '14 awg solid wire',
'welded wire fenching 12 gauge': 'welded wire fencing 12 gauge',
'patio chair cusions': 'patio chair cushions',
'viynl patches': 'vinyl patches',
'7 in. stove pie': '7 in. stove pipe',
'whirlpoolgas stove': 'whirlpool gas stove',
'whirpool microwave 1.4 cu ft': 'whirlpool microwave 1.4 cu ft',
'whirpool refrigerator': 'whirlpool refrigerator',
'3\' nailes': '3\' nails',
'nailer tooal': 'nailer tool',
'weed barier': 'weed barrier',
'oped garage door indicator': 'open garage door indicator',
'styrafoam': 'styrofoam',
'10 foot step laddert': '10 foot step ladder',
'3 1/2 hardwar': '3 1/2 hardware',
'double control shower vavle': 'double control shower valve',
'replacement shower encosure rod': 'replacement shower enclosure rod',
'baby gurad gate': 'baby guard gate',
'joint compund light weight': 'joint compound lightweight',
'sheetrock high preformance joint compound': 'sheetrock high performance joint compound',
'1x2 appearnce boards': '1x2 appearance boards',
'lumber 2x8 composit': 'lumber 2x8 composite',
'floot ball': 'float ball',
'dewalt empact driver': 'dewalt impact driver',
'bosh cordless combo set': 'bosch cordless combo set',
'ryobi 18v battwery': 'ryobi 18v battery',
'kihchen cabinet slidr shelves': 'kitchen cabinet slide shelves',
'chesnut border edging': 'chestnut border edging',
'outdoor seat cushions 24.5 whte': 'outdoor seat cushions 24.5 white',
'12x12 tile msaic': '12x12 tile mosaic',
'skill screwdriver battery': 'skil screwdriver battery',
'manual for airens lawnmower': 'manual for ariens lawn mower',
'gas stabilisor': 'gas stabilizer',
'4 x 4 white pocelain tile': '4 x 4 white porcelain tile',
'rigid pipe cutter': 'ridgid pipe cutter',
'24 regrigerators': '24 refrigerators',
'refrigerato 33 inch wide': 'refrigerator 33 inch wide',
'smudge proof stainless steele': 'smudge proof stainless steel',
'whirpool amana': 'whirlpool amana',
'moen banbury 24 in. doubletowel bar': 'moen banbury 24 in. double towel bar',
'4\' r;ubber top set base': '4\' rubber top set base',
'extension springes': 'extension springs',
'grass string trimmer electric homelight': 'grass string trimmer electric homelite',
'craftman style lights': 'craftsman style lights',
'glacier bay delmare expresso wall mirror': 'glacier bay del mar espresso wall mirror',
'dollie 600 lbs': 'dolly 600 lbs',
'patio tille': 'patio tile',
'eucalptus white board': 'eucalyptus white board',
'vynal tile': 'vinyl tile',
'heat reducing window flim': 'heat reducing window film',
'Porach Light': 'Porch Light',
'brissell zing vacuum bags': 'bissell zing vacuum bags',
'toillet': 'toilet',
'kitchen aid refrigirator light bulb:': 'kitchenaid refrigerator light bulb:',
'chadelier': 'chandelier',
'cararra marble': 'carrara marble',
'coedless makita chainsaw with batteries': 'cordless makita chainsaw with batteries',
'mikita cordless drill': 'makita cordless drill',
'antique brass hindges for doors': 'antique brass hinges for doors',
'riobi battery': 'ryobi battery',
'feerzer': 'freezer',
'schlade wirell door lock': 'schlage wireless door lock',
'water proff board': 'waterproof board',
'celing light holder': 'ceiling light holder',
'wood toold': 'wood tools',
'4 inch insolation': '4 inch insulation',
'Urehtane Foam Sheet': 'Urethane Foam Sheet',
'4 center lavatory facuet': '4 center lavatory faucet',
'Shower facuet': 'Shower faucet',
'electric dyrer heater elemnet': 'electric dryer heater element',
'milluakee drill bits': 'milwaukee drill bits',
'scrren wire': 'screen wire',
'safegaurd 30 synthetic felt': 'safeguard 30 synthetic felt',
'hampden bay chandelier': 'hampton bay chandelier',
'1/2 inch pnumatic stapler': '1/2 inch pneumatic stapler',
'12\' firetreat 2x4': '12\' fire treated 2x4',
'american-standarfairfield elongated one-piece 1.6 gpf toilet': 'american-standard fairfield elongated one-piece 1.6 gpf toilet',
'toilet aquaia': 'toilet aquia',
'Comercial electric': 'Commercial electric',
'light puff defuser': 'light puff diffuser',
'ryobi drill prass': 'ryobi drill press',
'110v ectric dryers': '110v electric dryers',
'FIRE RESTISTANT BOARD': 'FIRE RESISTANT BOARD',
'vinyle plankj': 'vinyl plank',
'cordless backpack vaccume': 'cordless backpack vacuum',
'hampton baysolar bird lights': 'hampton bay solar bird lights',
'kohler chair height elongated toliet': 'kohler chair height elongated toilet',
'electic fireplace': 'electric fireplace',
'hampton bay jmestown': 'hampton bay jamestown',
'surfacemount kitchen sink': 'surface mount kitchen sink',
'rigid wet nozzelsqueegee': 'ridgid wet nozzle squeegee',
'vacumns': 'vacuums',
'gble vent': 'gable vent',
'ventalation': 'ventilation',
'biinds and shades': 'blinds and shades',
'copact drills cordless': 'compact drills cordless',
'ridge 18v hammer': 'ridgid 18v hammer',
'heavy dutty garden hose': 'heavy duty garden hose',
'1/2\' extirior plywood': '1/2\' exterior plywood',
'gutter water reflector': 'gutter water deflector',
'under cabinet led light accesory pack': 'under cabinet led light accessory pack',
'armstroung floor adhesive': 'armstrong floor adhesive',
'whirlpoolstainless steel refrig': 'whirlpool stainless steel refrig',
'black and decker elctric': 'black and decker electric',
'cordless edgere': 'cordless edger',
'white electrtical outlets': 'white electrical outlets',
'tan unmbrella': 'tan umbrella',
'gothic fence picketts': 'gothic fence pickets',
'vinyl 1 bilnd': 'vinyl 1 blinds',
'console tab;le': 'console table',
'T-5 florescent light fixtures': 'T-5 fluorescent light fixtures',
'royobi pedestal grinder wheel': 'ryobi pedestal grinder wheel',
'wall panaling': 'wall paneling',
'PORCH STAIR RAILLING': 'PORCH STAIR RAILING',
'micro fibe': 'microfiber',
'champion toliet part': 'champion toilet parts',
'rr vaccum filter': 'rr vacuum filter',
'exhust fan': 'exhaust fan',
'corragated metal': 'corrugated metal',
'gasolene generaters and inverters': 'gasoline generators and inverters',
'stailess steel top stoves': 'stainless steel top stoves',
'top freezer refrigeratot': 'top freezer refrigerator',
'3/4 inche rock': '3/4 inch rock',
'12 roofing pannel': '12 roofing panel',
'blakck in decker edger': 'black and decker edger',
'tile scrapper': 'tile scraper',
'brick morter': 'brick mortar',
'cement blodks': 'cement blocks',
'unmortified mortor': 'unmodified mortar',
'bifold door hardw': 'bifold door hardware',
'metal scerews': 'metal screws',
'sliding doos for backyard': 'sliding doors for backyard',
'screen fame corner': 'screen frame corner',
'electric lawn mowerectrical': 'electric lawn mower electrical',
'clacer bay all n one sink': 'glacier bay all in one sink',
'sola water fountain': 'solar water fountain',
'closet clothes rackclosetmaid': 'closet clothes rack closetmaid',
'passload': 'paslode',
'kitchen tile backspl': 'kitchen tile backsplash',
'viyle fencing': 'vinyl fencing',
'flexible tourche extension': 'flexible torch extension',
'6 pnl molded': '6 panel molded',
'soild core flush pre hung door': 'solid core flush prehung door',
'convction heater': 'convection heater',
'closet orginizer shoe rack wire': 'closet organizer shoe rack wire',
'freesstanding': 'free standing',
'mmirror closet doors': 'mirror closet doors',
'maratha stewart monogram wreath': 'martha stewart monogram wreath',
'edsel heavy duty 5': 'edsal heavy duty 5',
'11 ft extension cord groud': '11 ft extension cord ground',
'indoor/otdoor extensions cords e176194': 'indoor/outdoor extension cords e176194',
'outdoor extention cords e': 'outdoor extension cords e',
'unface insulation 23 inches wide': 'unfaced insulation 23 inches wide',
'porble toilets': 'portable toilets',
'toilet saftey seat': 'toilet safety seat',
'silca sand': 'silica sand',
'tall 18 in storage cabnet': 'tall 18 in storage cabinet',
'20x8 storge shed': '20 x 8 storage shed',
'rubbermade shed': 'rubbermaid shed',
'rubbermaid resin storage cabnetsn': 'rubbermaid resin storage cabinets',
'cedar wod chips': 'cedar wood chips',
'hidraulic tools': 'hydraulic tools',
'celing fans with lighting and remote': 'ceiling fans with lighting and remote',
'fridigidaire drop in oven': 'frigidaire drop in oven',
'tub surround pices': 'tub surround prices',
'allure flooring oak expresso': 'allure flooring oak espresso',
'pass and seymore light cover switch': 'pass and seymour light cover switch',
'28x54 replacment window': '28x54 replacement windows',
'anderson windows new constraction': 'anderson windows new construction',
'swamp oolers': 'swamp coolers',
'wahing machines': 'washing machines',
'interior primed mdf crown mouldin': 'interior primed mdf crown moulding',
'built in convectionoven': 'built in convection oven',
'flpwers for your garden': 'flowers for your garden',
'closetr rod': 'closet rod',
'unfinished wide bplanked hickory flooring': 'unfinished wide plank hickory flooring',
'48v to 110 invertor': '48v to 110v inverter',
'landscape wateting': 'landscape watering',
'sockets for fluorescence fixtres': 'sockets for fluorescent fixtures',
'woodceramic floor tile': 'wood ceramic floor tile',
'brigsg and stations 500 seris': 'briggs and stations 500 series',
'green carpert': 'green carpet',
'pressure treated step tread 6ft': 'pressure treated stair tread 6ft',
'hand pump gfor water': 'hand pump for water',
'rutic lighting': 'rustic lighting',
'cender blocks': 'cinder blocks',
'talsrar': 'talstar',
'rybi power tools': 'ryobi power tools',
'portercable 6 gal': 'porter cable 6 gal',
'table covers waterproff': 'table covers waterproof',
'solid alium square tubing': 'solid aluminum square tubing',
'deck post jhardware': 'deck post hardware',
'hunter new bronzel fans': 'hunter new bronze fans',
'16d framin': '16d framing',
'moen brushed nickel batharoom': 'moen brushed nickel bathroom',
'barriar plastic': 'barrier plastic',
'window ac/hehat units': 'window ac/heat units',
'icycle lights': 'icicle lights',
'4 gallon expanion': '4 gallon expansion',
'floor mount lawndry seek': 'floor mount laundry sink',
'high addhesion primer': 'high adhesion primer',
'24 gauge wire connectorsa': '24 gauge wire connectors',
'sterio wire for indoor speakers': 'stereo wire for indoor speakers',
'garage bicyclestorage': 'garage bicycle storage',
'how mustall tankless water heater': 'how install tankless water heater',
'chelsea white acrylic oval in rectangl': 'chelsea white acrylic oval in rectangle',
'cleaning jeta for whirlpool': 'cleaning jets for whirlpool',
'bathroom faucet replacment valve': 'bathroom faucet replacement valve',
'3x5 cemet board': '3x5 cement board',
'vaccumm': 'vacuum',
'ghroe shower headstrong shower heads': 'grohe shower headstrong shower heads',
'mial boxes': 'mail boxes',
'claw tups': 'claw tips',
'facia corner brace': 'fascia corner brace',
'pegisas sink top': 'pegasus sink top',
'mirroes for doors': 'mirrors for doors',
'counter depth refridgidere': 'counter depth refrigerator',
'corrigaed fiberglass roofing': 'corrugated fiberglass roofing',
'window airconditionerwith heaters': 'window air conditioners with heaters',
'extention rail for opener': 'extension rail for opener',
'whitecomposite fascia board': 'white composite fascia board',
'vanity topp 31 white': 'vanity top 31 white',
'underhood range fan': 'under hood range fan',
'price pfister trevisa': 'price pfister treviso',
'milwaukee cordlees tools': 'milwaukee cordless tools',
'pendent light': 'pendant light',
'pre-emergent weed contro': 'pre-emergent weed control',
'is this item in stoes?': 'is this item in store?',
'door home secutity': 'door home security',
'3oo watt haalogen bulbs': '3oo watt halogen bulbs',
'96 in flourescent bulbs': '96 in fluorescent bulbs',
'shop ceiling fane': 'shop ceiling fan',
'aaa batteries everready gold': 'aaa batteries eveready gold',
'buth tub faucet': 'bathtub faucet',
'delta montecello tub faucet': 'delta monticello tub faucet',
'ge spring water heater': 'geospring water heater',
'ge water heater egnighter': 'ge water heater igniter',
'31x19 one piecs bathroom sink': '31x19 one piece bathroom sink',
'replacment clips for wire rack': 'replacement clips for wire rack',
'ac air diverer': 'ac air diverter',
'3 sewer pipce': '3 sewer pipe',
'3\' electical pipe': '3\' electrical pipe',
'large outside horizontal storage shed': 'large outdoor horizontal storage shed',
'swing hangar hardware': 'swing hanger hardware',
'dim able balafon flood light': 'dimmable balafon flood light',
'phillips exterior led': 'philips exterior led',
'banity 11 watt light bulb': 'vanity 11 watt light bulb',
'kithchen install': 'kitchen install',
'magnet stainless steel for diswasher': 'magnet stainless steel for dishwasher',
'phone spliter': 'phone splitter',
'receptical': 'receptacle',
'water resistent electrical outlets': 'water resistant electrical outlets',
'kitchenaid superb oven': 'kitchenaid superba oven',
'403esprit 2x4 ceing tile': '403 esprit 2x4 ceiling tile',
'wall excess panel': 'wall access panel',
'drop celing tiles': 'drop ceiling tiles',
'pvc drop in celing tiles': 'pvc drop in ceiling tiles',
'pl gas hose': 'lp gas hose',
'12 v landscaping ligtening fixture': '12v landscape lighting fixture',
'behr white external semigloss paint': 'behr white exterior semi gloss paint',
'GRAGE DOOR OPENER': 'GARAGE DOOR OPENER',
'grage doors': 'garage doors',
'24 inch med oak base': '24 inch medium oak base',
'okeefes working hands': 'o\'keeffe\'s working hands',
'phenofin': 'penofin',
'8 foot galvinezed': '8 foot galvanized',
'12 mobil home air duct': '12 mobile home air duct',
'door hinges for americana refrigator': 'door hinges for americana refrigerator',
'tub drain kit bronz': 'tub drain kit bronze',
'halligon light bulb': 'halogen light bulb',
'husky rachet': 'husky ratchet',
'andersen vnyl windows': 'andersen vinyl windows',
'balwind double cilynder lock': 'baldwin double cylinder lock',
'drop down ceiling ppanel': 'drop down ceiling panel',
'arearugs and mats': 'area rugs and mats',
'dark expresso paint for wood': 'dark espresso paint for wood',
'melamine shelvees': 'melamine shelves',
'mosaic whitel and black tile': 'mosaic white and black tile',
'8 wre wheel': '8 wire wheel',
'9\' plna replament blade': '9\' plane replacement blade',
'saw zall blades': 'sawzall blades',
'pain pot': 'paint pot',
'drain cleaneraner machines': 'drain cleaner machines',
'anderson storm doors pet': 'andersen storm doors pet',
'basement window replacement insructions': 'basement window replacement instructions',
'grill cover brinkman double grill': 'grill cover brinkmann double grill',
'gerber daisies': 'gerbera daisies',
'gerber daisy': 'gerbera daisy',
'exterior wood stainolid color': 'exterior wood stain color',
'2700 br30 led': '2700k br30 led',
'3m wheather stripping': '3m weather stripping',
'barn doorhinges': 'barn door hinges',
'plywood progect': 'plywood project',
'28 guage screen': '28 gauge screen',
'lampsade pendent light': 'lamp shade pendant light',
'kitchen cabiner corner': 'kitchen cabinet corner',
'paatio swings': 'patio swings',
'12 bar chian for echo': '12 bar chain for echo',
'bix max 7x7': 'big max 7x7',
'bathtub faucethandle replacement parts': 'bathtub faucet handle replacement parts',
'prelit spiral trees': 'pre lit spiral trees',
'12 sthel chainsaws': '12 stihl chainsaws',
'10 ft drain house': '10 ft drain hose',
'american standard tiolet flappers': 'american standard toilet flappers',
'solar out doors post lights': 'solar outdoor post lights',
'kitchen cabinet with counertop': 'kitchen cabinet with countertop',
'Painting Cabniets': 'Painting Cabinets',
'18x18 teracota porcelain floor tiles': '18x18 terracotta porcelain floor tiles',
'drywal': 'drywall',
'pencle trim tile': 'pencil trim tile',
'vinyl latice': 'vinyl lattice',
'angle findeer': 'angle finder',
'laminate tile comercial': 'laminate tile commercial',
'couner deep refrigerators': 'counter deep refrigerators',
'chritmas tree': 'christmas tree',
'plug in carbon monoxcide': 'plug in carbon monoxide',
'cabinet handels': 'cabinet handles',
'frigidair drop in': 'frigidaire drop in',
'7\' hex hed bolt': '7\' hex head bolt',
'vent fllters': 'vent filters',
'horizontall': 'horizontal',
'3 x 6 blace tile': '3 x 6 black tile',
'rostoluem spray paint': 'rustoleum spray paint',
'power drill battery an charger': 'power drill battery and charger',
'rayobi blue charger': 'ryobi blue charger',
'robyi': 'ryobi',
'5/4 pressure treaded decking': '5/4 pressure treated decking',
'white carrara herring bome': 'white carrara herringbone',
'sailr blue': 'sailor blue',
'charbroil classic': 'char broil classic',
'14 electric concrete saw with vc-u dch300': '14 electric concrete saw with vac-u dch 300',
'potable air conditioners': 'portable air conditioners',
'fin heating tubeing': 'fin heating tubing',
'fine/line baseboarrd': 'fine/line baseboard',
'hot water heating eliment': 'hot water heating element',
'toiet': 'toilet',
'hole house fan': 'whole house fan',
'montaga bay tile': 'montego bay tile',
'40 gal liquid propan': '40 gal liquid propane',
'4 x 4 pos cap': '4x4 post cap',
'white quartz cointertop': 'white quartz countertop',
'elongated bone toilest': 'elongated bone toilet',
'white acryl paint': 'white acrylic paint',
'foundstion vents': 'foundation vents',
'sqeaky carpet stair kit': 'squeaky carpet stair kit',
'defusiers for floors': 'diffusers for floors',
'8\' galvanized roll top edginh': '8\' galvanized roll top edging',
'marithon water heater element': 'marathon water heater element',
'wirerless light switch': 'wireless light switch',
'moen posi-temp tim kit': 'moen posi-temp trim kit',
'shower dooroil rubbed bronze': 'shower door oil rubbed bronze',
'wireing': 'wiring',
'kitchen aid architecs series 11': 'kitchenaid architect series 11',
'wall oven combon': 'wall oven combo',
'survival babkpack': 'survival backpack',
'wire dstaples': 'wire staples',
'4in drain gratewhite': '4in drain grate white',
'shitch cover': 'switch cover',
'vitarera quartz': 'viatera quartz',
'5/8-in masonary drill bit': '5/8-in masonry drill bit',
'brinkman grill grates': 'brinkmann grill grates',
'pest repellant': 'pest repellent',
'bathun drain plunger': 'bathtub drain plunger',
'incounter gas cook range': 'encounter gas cook range',
'peat moss bails': 'peat moss bales',
'3-piece bath accessory kit in chrom': '3-piece bath accessory kit in chrome',
'alameda hickey laminate': 'alameda hickory laminate',
'flooring moisture barier': 'flooring moisture barrier',
'vinylcove base': 'vinyl cove base',
'ge diswasher': 'ge dishwasher',
'b10 led bub': 'b10 led bulb',
'cub cadetcordless hedge trimmer': 'cub cadet cordless hedge trimmer',
'hampton bay jewelery armoire wht': 'hampton bay jewelry armoire white',
'perenials': 'perennials',
'heat ventss': 'heat vents',
'mobil home glass door': 'mobile home glass door',
'lamanet floor cutter': 'laminate floor cutter',
'on off valvefor tub faucet': 'on off valve for tub faucet',
'assie grill fire and ash': 'aussie grill fire and ash',
'hanging worklight fixtures ceiling': 'hanging work light fixtures ceiling',
'20 amp tamper resitance duplex receptacle': '20 amp tamper resistant duplex receptacle',
'liqwuid nail': 'liquid nail',
'1/2 tee pvcp': '1/2 tee pvc',
'toilet repair kit cadet 3 flowise 2-piece 1.28 gpf round fro': 'toilet repair kit cadet 3 flowise 2-piece 1.28 gpf round front',
'50 amp turn look plug': '50 amp turn lock plug',
'6x6 colunm caps': '6x6 column caps',
'12 valleta': '12 valletta',
'pellitized lime': 'pelletized lime',
'concrete sonic tub': 'concrete sonic tube',
'110 air conditior an heat': '110 air conditioner and heat',
'what is best for settingfence posts in soil?': 'what is best for setting fence posts in soil?',
'washer dryer folding worksurface': 'washer dryer folding work surface',
'outdoor spigot spliter': 'outdoor spigot splitter',
'alumiunm gate': 'aluminum gate',
'lawm mower': 'lawn mower',
'door floor plate slideing doors': 'door floor plate sliding doors',
'akkegro': 'allegro',
'wead burner': 'weed burner',
'galvinized nails 3': 'galvanized nails 3',
'artifical turf border': 'artificial turf border',
'oppeuss light trim ring': 'oppeus light trim ring',
'12 ft john boat': '12ft jon boat',
'outdoor coucg': 'outdoor couch',
'drywall panel hoisst': 'drywall panel hoist',
'ego hainsaw': 'ego chainsaw',
'hibascus plant': 'hibiscus plant',
'pullbehind fertilizer spreader': 'pull behind fertilizer spreader',
'door latch uard': 'door latch guard',
'water suppy box': 'water supply box',
'octagon eve vents': 'octagon eave vents',
'el ctrical s ez': 'electrical sez',
'varnishe': 'varnish',
'klien rg6': 'klein rg6',
'floor matt': 'floor mat',
'60 shower ddor': '60 shower door',
'blue tapeexhaust fan/light': 'blue tape exhaust fan/light',
'rocks hydrophonics': 'rocks hydroponics',
'mesquito spray': 'mosquito spray',
'alumiun grove in': 'aluminum grove in',
'lithonia outdoor wall paks': 'lithonia outdoor wall packs',
'60 in. shower door brushed nicker': '60 in. shower door brushed nickel',
'makit 12v': 'makita 12v',
'black and yellow non skip tape': 'black and yellow non skid tape',
'skylifghts': 'skylights',
'led hale gin g9': 'led halogen g9',
'electrical pipe flexable': 'electrical pipe flexible',
'emt stroas': 'emt straps',
'ridged 1 emt conduit': 'rigid 1 emt conduit',
'baliey window roller shades': 'bailey window roller shades',
'hampton bay reswood valley 5 pc patio seating set with fire': 'hampton bay redwood valley 5 pc patio seating set with fire',
'lawn grass catchbag': 'lawn grass catcher bag',
'1/4 lauwan under layment': '1/4 lauan underlayment',
'window tintinig': 'window tinting',
'4 inch round bellbox cover': '4 inch round bell box cover',
'vinal latice fence': 'vinyl lattice fence',
'solar pest repelers': 'solar pest repellers',
'barn doorspring latches': 'barn door spring latches',
'3 gauge copper phhn': '3 gauge copper thhn',
'three wire hottube': 'three wire hot tub',
'shope cloths': 'shop clothes',
'bbostitch tool set': 'bostitch tool set',
'outdoor hightop dining': 'outdoor high top dining',
'delata raincan': 'delta raincan',
'soap wash maching tilde': 'soap wash machine tilde',
'16 ftdecking boards': '16 ft decking boards',
'1 amp receptical': '1 amp receptacle',
'outdoor gfi': 'outdoor gfci',
'bbq burner replacment': 'bbq burner replacement',
'levin 25 wat usb': 'levin 25 watt usb',
'delta diverte rhandle in rb': 'delta diverter handle in rb',
'3 pane craftsman door': '3 panel craftsman door',
'charolettetown': 'charlottetown',
'raised toelit sseat': 'raised toilet seat',
'webber spirit gas grill': 'weber spirit gas grill',
'adapter for extention cord': 'adapter for extension cord',
'bathrub and shower wall kits': 'bathtub and shower wall kits',
'sofit vents 4x16': 'soffit vents 4 x 16',
'1/2 inch isp water supply line': '1/2 inch ips water supply line',
'eurothem thermostatic valve': 'eurotherm thermostatic valve',
'plactic totes 36 inches wide': 'plastic totes 36 inches wide',
'pest control diat': 'pest control diet',
'black cobwoys star': 'black cowboys star',
'whirpool oven 5.1': 'whirlpool oven 5.1',
'min fridges for campers': 'mini fridges for campers',
'howards restore a finish': 'howards restor a finish',
'ge just cut fraiser fur': 'ge just cut fraser fir',
'25 watt warmlight bulb': '25 watt warm light bulb',
'kichen island': 'kitchen island',
'duel mount stainless steel sinks': 'dual mount stainless steel sinks',
'home sevalance cameras': 'home surveillance cameras',
'marbel vinyl tile': 'marble vinyl tile',
'30 entry door 9 litr': '30 entry door 9 lite',
'roxul sale n sound': 'roxul safe n sound',
'4 guage use': '4 gauge use',
'jigsaw tblades': 'jigsaw t blades',
'jigsaww blades': 'jigsaw blades',
'clawfoot tub cutain': 'clawfoot tub curtain',
'raised garden ed': 'raised garden bed',
'58.75x80 sliding glass door': '58.75x 80 sliding glass door',
'1/4 nich tee': '1/4 inch tee',
'alluminun wire splice': 'aluminum wire splice',
'2 sheet metal screrw': '2 sheet metal screw',
'non electically conductive epoxy': 'non electrically conductive epoxy',
'led fluoreecent light replacement': 'led fluorescent light replacement',
't8 8 ft 4-light flourescent fixture': 't8 8 ft 4-light fluorescent fixture',
'othor ant killer': 'ortho ant killer',
'spectacide for lawnscarpenter ants': 'spectracide for lawns carpenter ants',
'ccurved shower door': 'curved shower door',
'4in pvc electrcial boxes': '4in pvc electrical boxes',
'hampton bay fan replacemtn': 'hampton bay fan replacement',
'6\' remodel can valted celing cans': '6\' remodel can vaulted ceiling cans',
'roman tub faucers': 'roman tub faucets',
'flourescent paint by rustoleum': 'fluorescent paint by rustoleum',
'hidden fastners': 'hidden fasteners',
'otdoor sola': 'outdoor solar',
'solar post l8ghts': 'solar post lights',
'plus 3 tintet': 'plus 3 tinted',
'barbeque tools': 'barbecue tools',
'circular flourecent lights': 'circular fluorescent lights',
'rain barrells': 'rain barrels',
'gagarage storage cabinets': 'garage storage cabinets',
'brown blasplash tile': 'brown backsplash tile',
'evap cooler theromsat': 'evap cooler thermostat',
'undergroud telephone wire': 'underground telephone wire',
'cop mail adapter': 'cop male adapter',
'set crews for glass': 'set screws for glass',
'roybi lazer circular saw': 'ryobi laser circular saw',
'walnuit stain': 'walnut stain',
'ruber door extension': 'rubber door extension',
'home decorators cinamon': 'home decorators cinnamon',
'apoxy patch': 'epoxy patch',
'batroom fan heater light': 'bathroom fan heater light',
'commercial radient ceiling heaters': 'commercial radiant ceiling heaters',
'surveilance camera': 'surveillance camera',
'tub facet set': 'tub faucet set',
'solistone pebbble': 'solistone pebble',
'1 1/4 galvenized steel pipe fittings': '1 1/4 galvanized steel pipe fittings',
'22.4 cubit feet refrigerator': '22.4 cubic feet refrigerator',
'behr premium plus ultrta': 'behr premium plus ultra',
'autoficial grass': 'artificial grass',
'huskey scocket set': 'husky socket set',
'husky black toll boxes': 'husky black tool boxes',
'isunderlayment requiered for metal roof': 'is underlayment required for metal roof',
'safety glass with perscription': 'safety glass with prescription',
'polished brass 8 spread lavitory faucet': 'polished brass 8 spread lavatory faucet',
'heat only therostats': 'heat only thermostats',
'65 watt dim able': '65 watt dimmable',
'1-1/4 pocket hole screwsw': '1-1/4 pocket hole screws',
'wwod floor runner': 'wood floor runner',
'bostic wood floor glue': 'bostik wood floor glue',
'hand shovles': 'hand shovels',
'garage orgnize': 'garage organizer',
'diamond plate storge unit': 'diamond plate storage unit',
'silcone': 'silicone',
'packing suplies': 'packing supplies',
'ridgid planner': 'ridgid planer',
'shower fiberglas': 'shower fiberglass',
'curtain rod wrp': 'curtain rod wrap',
'fire place accessories gas loggs': 'fireplace accessories gas logs',
'recesseingd light housing': 'recessed light housing',
'100 amps circuit braker': '100 amps circuit breaker',
'delta satin nickle shower systems': 'delta satin nickel shower systems',
'auqatic shower & bath': 'aquatic shower',
'termini mosquito garlic spray': 'terminix mosquito garlic spray',
'arbourist safety climbing belt': 'arborist safety climbing belt',
'vynal wood fence': 'vinyl wood fence',
'acrylic primere': 'acrylic primer',
'20\' facia board': '20\' fascia board',
'17 1/2 high tolite': '17 1/2 high toilet',
'howard restore a finish': 'howard restor a finish',
'tub enclouseure with tub': 'tub enclosure with tub',
'leaf guards for stomr windows': 'leaf guards for storm windows',
'sliding tub soors': 'sliding tub doors',
'amdry wallpanel': 'amdry wall panel',
'22.1 refrierator': '22.1 refrigerator',
'fram boxes': 'frame boxes',
'patio tbricks': 'patio bricks',
'6 foot treshold': '6 foot threshold',
'florencet light cover': 'fluorescent light cover',
'taracota drain pan': 'terracotta drain pan',
'smaller single deadbolt lock': 'small single deadbolt lock',
'lmainate boards': 'laminate boards',
'acuria lattace panels': 'acurio lattice panels',
'adirondeck cusion': 'adirondack cushion',
'oscilating fan': 'oscillating fan',
'washing machine plug adapator': 'washing machine plug adapter',
'concrette pier': 'concrete pier',
'southren gray tile': 'southern gray tile',
'dealt portable table saw table': 'dewalt portable table saw table',
'matte heat resistant pain': 'matte heat resistant paint',
'White Temper Resistant Duplex Outlet': 'White Tamper Resistant Duplex Outlet',
'screws for deckin': 'screws for decking',
'20 gl. hose end sprayer': '20 gal. hose end sprayer',
'sliding door storage cabi nets': 'sliding door storage cabinets',
'tinted masonary sealer': 'tinted masonry sealer',
'kids toilet seateat': 'kids toilet seat eat',
'anderson storm door screen roller': 'andersen storm door screen roller',
'vaccuum cleaners for hardwood and carpet': 'vacuum cleaners for hardwood and carpet',
'copper baluseter': 'copper baluster',
'aluninion circular blade': 'aluminium circular blade',
'ceiling light nickle 2-light': 'ceiling light nickel 2-light',
'adirondac, patio chair': 'adirondack, patio chair',
'flourescent tube': 'fluorescent tube',
'polyurethane adhesiv': 'polyurethane adhesive',
'extirior clear spray paint': 'exterior clear spray paint',
'outdoor faucwts': 'outdoor faucets',
'asphaul based coating': 'asphalt based coating',
'3/8 couipling': '3/8 coupling',
'2x4x10 pressure treater': '2x4x10 pressure treated',
'koehler faucet': 'kohler faucet',
'led rop light clips': 'led rope light clips',
'square d double brakers': 'square d double breakers',
'30 inchesbathroom vanity': '30 inches bathroom vanity',
'1/2 \' copper fiting': '1/2 \' copper fitting',
'capital cap for colum': 'capital cap for column',
'grass turf pavewrs': 'grass turf pavers',
'lowvoltage indoor accent lights': 'low voltage indoor accent lights',
'dremel minimate cordless moto tool': 'dremel minimite cordless moto tool',
'96 right hand miter tyhoon ice': '96 right hand miter typhoon ice',
'magnet base tool loight': 'magnetic base tool light',
'robi 18v saw': 'ryobi 18v saw',
'5 light hanging chandielier': '5 light hanging chandelier',
'Moem faucet repair': 'Moen faucet repair',
'3x6 daltile white 101 kohler': '3x6 daltile white k101 kohler',
'lock cmbo': 'lock combo',
'trimmer/edger\'s, gas powered': 'trimmer/edgers, gas powered',
'generaor for fridge': 'generator for fridge',
'led light bulbs dimable spot': 'led light bulbs dimmable spot',
'outdoor seatting cushions': 'outdoor seating cushions',
'full size frigde': 'full size fridge',
'ASHPHALT SEALER': 'ASPHALT SEALER',
'behr ultra pint': 'behr ultra paint',
'emparador mosaic bamboo brick': 'emperador mosaic bamboo brick',
'bath mirror cabintes': 'bath mirror cabinets',
'floor squeege': 'floor squeegee',
'squeege': 'squeegee',
'allure golden oaksku579331': 'allure golden oak sku 579331',
'artificial turf for petrs': 'artificial turf for pets',
'8 foot florescent light bulb': '8 foot fluorescent light bulb',
'3x3 diamond thread plate': '3x3 diamond tread plate',
'handical rail': 'handicap rail',
'moen grab bar securemount': 'moen grab bar secure mount',
'ceiling mount electical box': 'ceiling mount electrical box',
'stainless steal hose clamps': 'stainless steel hose clamps',
'sod grass san agustino': 'sod grass san agustin',
'bateries 9v': 'batteries 9v',
'kohler brushed nickle framless shower doors': 'kohler brushed nickel frameless shower doors',
'mirro shower doors': 'mirror shower doors',
'daylillies': 'daylilies',
'fridgedaire fridge': 'frigidaire fridge',
'storage buiding 12\' x 20\'': 'storage building 12\' x 20\'',
'pvc valvez': 'pvc valves',
'socket magnectic extension': 'socket magnetic extension',
'shop vac aacessories': 'shop vac accessories',
'roll jp door': 'roll up door',
'rollup door': 'roll up door',
'steibler eltron': 'stiebel eltron',
'liquid itght non metalic': 'liquid tight non metallic',
'metalic lquid tight': 'metallic liquid tight',
'22 bin plastic drawer parts storage organiz': '22 bin plastic drawer parts storage organizer',
'marroon roof screws': 'maroon roof screws',
'battery opererated lighting': 'battery operated lighting',
'roybi pop up': 'ryobi pop up',
'connectorv 30': 'connector 30',
'ge gfi braker 30amp': 'ge gfci breaker 30 amp',
'pipe swer': 'pipe sewer',
'treaded pvc pipe fitting': 'threaded pvc pipe fitting',
'cornewr bathtub': 'corner bathtub',
'whirlpool apron bathtup': 'whirlpool apron bathtub',
'veranda facia': 'veranda fascia',
'rrecessed light trim ring': 'recessed light trim ring',
'1 light steele sconce': '1 light steel sconce',
'7\' 90 elboq': '7\' 90 elbow',
'drawer guides and slides': 'drawer glides and slides',
'christmsa dog': 'christmas dog',
'light weight coccrete': 'lightweight concrete',
'hardwoo flooring 2 1/4 in': 'hardwood flooring 2 1/4 in',
'garden hose filter attactchent': 'garden hose filter attachment',
'milwaukie saw blades': 'milwaukee saw blades',
'dewalt extention cord': 'dewalt extension cord',
'hampton bay high gloss jabot laminate': 'hampton bay high gloss jatoba laminate',
'20v blacker and decker charger': '20v black and decker charger',
'15 water depth bathub': '15 water depth bathtub',
'magnetized wall covering': 'magnetic wall covering',
'fire brick and morter': 'fire brick and mortar',
'anderson french wood patio door 400 series': 'andersen frenchwood patio door 400 series',
'outdoor baners': 'outdoor banners',
'osciallating blade to cut tile': 'oscillating blade to cut tile',
'one way valae': 'one way valve',
'black decker matris': 'black decker matrix',
'makita skill saw': 'makita skil saw',
'tuscon patio pavers': 'tucson patio pavers',
'plastic florring': 'plastic flooring',
'fungicidal seed innoculant': 'fungicidal seed inoculant',
'pcv coated hardware cloth': 'pvc coated hardware cloth',
'2x2 ceiling tilepantq22s': '2x2 ceiling tile paint 22s',
'rectangulat wihite ceramic sink bathroom': 'rectangular white ceramic sink bathroom',
'battery operataed wall light': 'battery operated wall light',
'72 inchtrack light': '72 inch track light',
'suny citrus fertilizer': 'sunny citrus fertilizer',
'48 inch aluminum shower curtin rod': '48 inch aluminum shower curtain rod',
'dehumidifyer': 'dehumidifier',
'earthquaike': 'earthquake',
'phillips led sparkle light bulbs': 'philips led sparkle light bulbs',
'metalic silver spray': 'metallic silver spray',
'all retaing wall': 'all retaining wall',
'high temperate sealant': 'high temperature sealant',
'greecian white porcelein marble': 'greecian white porcelain marble',
'shelves stailess stel': 'shelves stainless steel',
'wallmounted garage shelves': 'wall mounted garage shelves',
'remote meat thermom': 'remote meat thermometer',
'pvc threaded elbo': 'pvc threaded elbow',
'summit 20 in elctric range': 'summit 20 in electric range',
'groung fault electric outlet': 'ground fault electrical outlet',
'prenneols flower seeds': 'perennials flower seeds',
'hyrdaulic oil for kohler': 'hydraulic oil for kohler',
'hot/cold porcelin handles': 'hot/cold porcelain handles',
'white vanites with tops': 'white vanities with tops',
'exterier door keypad': 'exterior door keypad',
'purpor power': 'purple power',
'automatic drower closer': 'automatic drawer closer',
'potable firepace': 'portable fireplace',
'azelas': 'azaleas',
'mta distributions log splitter': 'mta distributors log splitter',
'standing town rack': 'standing towel rack',
'zinser stain cover': 'zinsser stain cover',
'weed trimer push type': 'weed trimmer push type',
'centipe grass seed': 'centipede grass seed',
'36 curved showered curtain rod': '36 curved shower curtain rod',
'4 quck grip 101': '4 quick grip 101',
'metal gringing weel 5/8': 'metal grinding wheel 5/8',
'weelbarrow': 'wheelbarrow',
'baraar emy': 'bazaar emy',
'wetbar sink and faucet': 'wet bar sink and faucet',
'perenial flowers': 'perennial flowers',
'infred turkey fryer': 'infrared turkey fryer',
'oil rubbed bronse bathroom lighting': 'oil rubbed bronze bathroom lighting',
'solor power lighting for exterior': 'solar power lighting for exterior',
'infloor heating antifreeze': 'in floor heating antifreeze',
'galvinized conduit pipe': 'galvanized conduit pipe',
'double curtain rod connecter': 'double curtain rod connector',
'drop cieling tiles 2ft by 4 ft': 'drop ceiling tiles 2ft by 4ft',
'plug in led night lite photocell': 'plug in led night light photocell',
'rough limber': 'rough lumber',
'48x48 windoww': '48x48 window',
'high intensity t5 flourescent lights': 'high intensity t5 fluorescent lights',
'brinly hardy 40 inc tow behind': 'brinly hardy 40 inch tow behind',
'ornge 5x7 rugs': 'orange 5x7 rugs',
'kitchenmaid built-in double drawer': 'kitchenaid built-in double drawer',
'safety latter': 'safety ladder',
'blind replacemetn': 'blind replacement',
'stainless steeel collated nails': 'stainless steel collated nails',
'hang rials barnyard doors': 'hang rails barnyard doors',
'tall black toliet': 'tall black toilet',
'fint tube': 'find tube',
'24 inches rerefrigerator': '24 inches refrigerator',
'ge microwave wall oven comb': 'ge microwave wall oven combo',
'presure treated': 'pressure treated',
'husky 46 9 drawer mobil': 'husky 46 9 drawer mobile',
'apartment size ge refrigertor stainless steel': 'apartment size ge refrigerator stainless steel',
'penedtrating stain': 'penetrating stain',
'briggsstraton 11 horse air filter': 'briggs stratton 11 horse air filter',
'hoovwe cordless vacuum cleaners': 'hoover cordless vacuum cleaners',
'tumbler dryer hose and claps': 'tumble dryer hose and clamps',
'antique truch': 'antique truck',
'hohler black and tan': 'kohler black and tan',
'spray and forget house nad deck': 'spray and forget house and deck',
'apriaire humidifier water panel': 'aprilaire humidifier water panel',
'unsanded groutr': 'unsanded grout',
'60 wat soft watt 2700k a19 dimibal led': '60 watt soft watt 2700k a19 dimmable led',
'7.5 mconnection for 9000 btu': '7.5 connection for 9000 btu',
'dimer switch and fan control': 'dimmer switch and fan control',
'granitecounter top cararra': 'granite countertop carrara',
'20 amp decor outlet ivory': '20 amp decora outlet ivory',
'rock wall papper': 'rock wallpaper',
'thin set fray': 'thin set gray',
'glass mirrior doors 72x80': 'glass mirror doors 72x80',
'heirloom whie': 'heirloom white',
'wood shelfing': 'wood shelving',
'kohler top mont bathroom sink': 'kohler top mount bathroom sink',
'outdoor dust to dawn light': 'outdoor dusk to dawn light',
'windowbalance': 'window balance',
'gunstock oak liamate': 'gunstock oak laminate',
'gardden benches': 'garden benches',
'strended electrical wire': 'stranded electrical wire',
'counter refinsher': 'counter refinishing',
'unfinished wood p-lant stand': 'unfinished wood plant stand',
'celing fan 60': 'ceiling fan 60',
'porta nailor': 'porta nailer',
't fittin': 't fitting',
'bousch lazer level gll2-80p': 'bosch laser level gll2-80p',
'2 1/2 inch nail boxe': '2 1/2 inch nail box',
'bonda body filler': 'bondo body filler',
'window manganetic lock': 'window magnetic lock',
'cat 5 cable uv restance': 'cat 5 cable uv resistance',
'3 4 toilet phlange': '3 4 toilet flange',
'aa batteried': 'aa batteries',
'6 pvc flixible coupling pipe': '6 pvc flexible coupling pipe',
'7 footaluminum awning': '7 foot aluminum awning',
'carburator': 'carburetor',
'water mainfold': 'water manifold',
'kholer bathroom wall lights': 'kohler bathroom wall lights',
'toro belt pully': 'toro belt pulley',
'paper lawn tefuse bags': 'paper lawn refuse bags',
'wadrobe moving boxes': 'wardrobe moving boxes',
'ultra clarifer, pool': 'ultra clarifier, pool',
'trash caninet slide': 'trash cabinet slide',
'craftig pvc cabinets': 'crafting pvc cabinets',
'plastic organozers': 'plastic organizers',
'rj45 crinp tool': 'rj45 crimp tool',
'darby 18 inch dishwasher': 'danby 18 inch dishwasher',
'10 x 10 gaxebo garden house': '10x10 gazebo garden house',
'colonial caseing': 'colonial casing',
'tarp for outsid furniture': 'tarp for outside furniture',
'phlne batteries': 'phone batteries',
'eatrhwise mower blades': 'earthwise mower blades',
'outdoor artifical lawn': 'outdoor artificial lawn',
'dual mount porcelin kitchen sinks': 'dual mount porcelain kitchen sinks',
'sflexible shower': 'flexible shower',
'savfavieh rug pad': 'safavieh rug pad',
'tigerwood perigo laminate flooring': 'tigerwood pergo laminate flooring',
'2\' flourescent lighting': '2\' fluorescent lighting',
'concerte stair railings': 'concrete stair railings',
'indoor infered heaters': 'indoor infrared heaters',
'tensil ties': 'tinsel ties',
'20 ampweather proof recepticles': '20 amp weatherproof receptacles',
'hdmi cabl': 'hdmi cable',
'matage double oven ranges': 'maytag double oven ranges',
'navarra sierra passage doorknob set': 'navarra sierra passage door knob set',
'outdoor furniture cover martha steward': 'outdoor furniture cover martha stewart',
'divonshire': 'devonshire',
'marine grade painr': 'marine grade paint',
'counter and appliance gaperaser': 'counter and appliance gap eraser',
'whirpool range hood 36': 'whirlpool range hood 36',
'flourecent': 'fluorescent',
'drain spoutts': 'drain spouts',
'1/4 shut off velves': '1/4 shut off valves',
'porta cool': 'portacool',
'yard walll': 'yard wall',
'kohler elongaterd toilet seat': 'kohler elongated toilet seat',
'kohler lighted tolet seats': 'kohler lighted toilet seats',
'cree led bub 6-pack': 'cree led bulb 6-pack',
'concrere chisel': 'concrete chisel',
'pedistal sink, 27\'': 'pedestal sink, 27\'',
'florsent replacement diffuser': 'fluorescent replacement diffuser',
'chlorox': 'clorox',
'core aeretor': 'core aerator',
'water proofing connector': 'waterproof connector',
'washer/dryr': 'washer/dryer',
'cambria java refridgerator': 'cambria java refrigerator',
'decrotive metal deck rail incecerts': 'decorative metal deck rail inserts',
'whirl pool water heater pilot': 'whirlpool water heater pilot',
'siemens double pole gfi': 'siemens double pole gfci',
'hampton bay alenxander oak': 'hampton bay alexander oak',
'32 inchvinyl screen doors': '32 inch vinyl screen doors',
'hamptonbay shaker cabinets wall': 'hampton bay shaker cabinets wall',
'3/8 entension': '3/8 extension',
'10x12 outdoor gazabos': '10x12 outdoor gazebos',
'seet metal tools': 'sheet metal tools',
'boch gll': 'bosch gll',
'dealt 8v screwdriver': 'dewalt 8v screwdriver',
'hand heald showers and ada grab bars': 'hand held showers and ada grab bars',
'200 amp outdoor circut breaker panel': '200 amp outdoor circuit breaker panel',
'fingerprint lockset': 'fingerprint locks',
'weekender powerwasher extension arms': 'weekender power washer extension arms',
'makita drill batterie charger': 'makita drill battery charger',
'ridgid fan': 'rigid fan',
'swifer wet cloth': 'swiffer wet cloth',
'hot water recirculator': 'hot water recirculation',
'riding mower blabes': 'riding mower blades',
'chain sherpeners': 'chain sharpeners',
'relief valve for rudd hot water heater': 'relief valve for ruud hot water heater',
'ceiling light brackt': 'ceiling light bracket',
'perferated pipe': 'perforated pipe',
'bath room sink accecories': 'bathroom sink accessories',
'ding room set': 'dining room set',
'2 ton expoxy': '2 ton epoxy',
'cutkler hammer breaker': 'cutler hammer breaker',
'red color cauking': 'red color caulking',
'strap and t hindge': 'strap and t hinge',
'screw driver 10 iches': 'screwdriver 10 inches',
'shower glass slelves': 'shower glass shelves',
'playststion 4 destiny bundle': 'playstation 4 destiny bundle',
'air conditiooning filter 14\'': 'air conditioning filter 14\'',
'sliding reversable patio door': 'sliding reversible patio door',
'rust oleam pinters touch black': 'rust oleum painters touch black',
'apron sink firecaly two bowl': 'apron sink fireclay two bowl',
'condesate pump': 'condensate pump',
'bronze outdoor ceiling dan': 'bronze outdoor ceiling fan',
'8 guage wire': '8 gauge wire',
'capacitor for quaterhorse motor 110 volts': 'capacitor for quarter horse motor 110 volts',
'anderson storm doors antique bronze': 'andersen storm doors antique bronze',
'gas enthonal free': 'gas ethanol free',
'is item at homedop': 'is item at home depot',
'drain stopper exstension': 'drain stopper extension',
'no tresspassing': 'no trespassing',
'100 gallon storage ben': '100 gallon storage bin',
'paint hardner': 'paint hardener',
'mystick permanent adhesive value pack': 'mystik permanent adhesive value pack',
'clear vlyvynal an rolls': 'clear polyvinyl and rolls',
'kliz primers': 'kilz primers',
'one way scrue removal tool': 'one way screw removal tool',
'stainless dishwaser smugde proof': 'stainless dishwasher smudge proof',
'hex shank drill bitt sets': 'hex shank drill bit sets',
'3.9 high effeciency front load washer': '3.9 high efficiency front load washer',
'concret patio floor': 'concrete patio floor',
'in the ground rodiron plant hanger': 'in the ground rod iron plant hanger',
'anderson storm door series 2500 sandtone polished brass': 'andersen storm door series 2500 sandstone polished brass',
'stainless steele screws': 'stainless steel screws',
'spray sealent for showers': 'spray sealant for showers',
'split line air conditioing': 'split line air conditioning',
'water softner pellet': 'water softener pellet',
'shelac': 'shellac',
'helti tools': 'hilti tools',
'PHILLIPS POST LIGHT BULB': 'PHILIPS POST LIGHT BULB',
'post light bulbl': 'post light bulb',
'tiolet': 'toilet',
'indoor home decor raindeer': 'indoor home decor reindeer',
'dinning tables': 'dining tables',
'patio dinning tables': 'patio dining tables',
'dremel router acessary': 'dremel router accessory',
'accordion door harware': 'accordion door hardware',
'edget tape': 'edge tape',
'verneer edging tool': 'veneer edging tool',
'drywall fastner': 'drywall fastener',
'heat pump acessories': 'heat pump accessories',
'scroll saw spirsl blade': 'scroll saw spiral blade',
'kitchen mat boack': 'kitchen mat black',
'chamberlain chain and pulliepaarts': 'chamberlain chain and pulley parts',
'swivle fitting for gas': 'swivel fitting for gas',
'SOLDERING IRORN': 'SOLDERING IRON',
'oaint marker': 'paint marker',
'upsidedowncan marker paint': 'upside down can marker paint',
'rope chritsmas lights': 'rope christmas lights',
'shower curtin rod': 'shower curtain rod',
'scoaring pads': 'scouring pads',
'spring set for price fister': 'spring set for price pfister',
'laquer thinner': 'lacquer thinner',
'mout faucet water filter': 'mount faucet water filter',
'NEUMATIC DOOR ARM': 'PNEUMATIC DOOR ARM',
'ceiling tile square fotage': 'ceiling tile square footage',
'ne angle base': 'neo angle base',
'1/4 in.-20 x 1 in. stainless steel flat-head socket cap scre': '1/4 in.-20 x 1 in. stainless steel flat-head socket cap screw',
'flexable pipe for propane': 'flexible pipe for propane',
'daltile accent peices': 'daltile accent pieces',
'specticide weed and grass rtu refill': 'spectracide weed and grass rtu refill',
'wood ddeck kits': 'wood deck kits',
'closetmaid hang9ing shelf': 'closetmaid hanging shelf',
'asb shower with curtian': 'asb shower with curtain',
'ptouch labeling tape': 'p touch labeling tape',
'misquito': 'mosquito',
'yard fooger': 'yard fogger',
'plastic splash guarf': 'plastic splash guard',
'3 light celling mount': '3 light ceiling mount',
'textered wallpaper': 'textured wallpaper',
'thermostat w remote senser': 'thermostat w remote sensor',
'spray oil prier': 'spray oil primer',
'maxx shower door': 'maax shower door',
'corion shower base': 'corian shower base',
'stapler hammers': 'staple hammers',
'2in non metalic standing coupling': '2in non metallic standing coupling',
'backyard xs capes': 'backyard xscapes',
'kraylon non skid': 'krylon non skid',
'pendent lights wit conversion kits': 'pendant lights with conversion kits',
'american wood charllotesville natural hickory': 'american wood charlottesville natural hickory',
'1/0 aqg': '1/0 awg',
'artci shag rug': 'arctic shag rug',
'omen single hole bathroom faucet': 'moen single hole bathroom faucet',
'john deere d100 sereissnow blade': 'john deere d100 series snow blade',
'brownbrick wallpaper': 'brown brick wallpaper',
'clear corrougated sheets': 'clear corrugated sheets',
'pressuer control valve': 'pressure control valve',
'white acryllic sheet': 'white acrylic sheet',
'wg307work jaw saw': 'wg307 worx jawsaw',
'plaskolight ceiling panel': 'plaskolite ceiling panel',
'charger y maintainer': 'charger and maintainer',
'waterless urinal conversion kist': 'waterless urinal conversion kit',
'hot water heating recirculitating pumps': 'hot water heater recirculating pumps',
'two gang carlton switch red dpt': 'two gang carlton switch red dot',
'kohler shower cartidges': 'kohler shower cartridges',
'rigid portable tool boxes': 'ridgid portable tool boxes',
'magniflier lamp': 'magnifier lamp',
'irragation controler': 'irrigation controller',
'minala rope': 'manila rope',
'wood sculture tool': 'wood sculpture tool',
'combination fan and lightwall switches': 'combination fan and light wall switches',
'acid stian': 'acid stain',
'bathtub deck mouted faucet with sprayer': 'bathtub deck mounted faucet with sprayer',
'attachments for zero turn touro': 'attachments for zero turn toro',
'wood pellats for grills': 'wood pellets for grills',
'whirpool 7000 washer': 'whirlpool 7000 washer',
'kitchenover sink lighting': 'kitchen over sink lighting',
'pegasus antique black side spalsh': 'pegasus antique black side splash',
'lock tight pl': 'loctite pl',
'landscasping ms international polish black stone': 'landscaping ms international polish black stone',
'1.4 cubit ft micro wave': '1.4 cubic ft microwave',
'square soffet vents': 'square soffit vents',
'exterior for pastic shutters': 'exterior for plastic shutters',
'exterior hous shutters': 'exterior house shutters',
'nutone ventiliation fan parts': 'nutone ventilation fan parts',
'belt anf tie rack': 'belt and tie rack',
'no elecetrity lights': 'no electricity lights',
'merola porcelain mosiac': 'merola porcelain mosaic',
'knotches': 'notches',
'savavieh soho': 'safavieh soho',
'double doors with security licks': 'double doors with security locks',
'glass tile backsp gpxtpnrf': 'glass tile backsp gpx pnrf',
'cabibet shelf pins': 'cabinet shelf pins',
'kolher repair': 'kohler repair',
'mantle brakets': 'mantle brackets',
'masonry painnt': 'masonry paint',
'muliti locks': 'multi locks',
'serger sewimg machine': 'serger sewing machine',
'mirror installation hardwawrd': 'mirror installation hardware',
'walnut porcelian': 'walnut porcelain',
'40 airens mulching kit': '40 ariens mulching kit',
'porcelaine cleaner': 'porcelain cleaner',
'monococcon 8x8 ceramic azuvi tile': 'monococcion 8x8 ceramic azuvi tile',
'black patioo set': 'black patio set',
'3/8 viyl j channel': '3/8 vinyl j channel',
'5/8 j chann': '5/8 j channel',
'home alerty': 'home alert',
'linen storage cabnit': 'linen storage cabinet',
'natur gas heat': 'natural gas heat',
'repacement toilet handle': 'replacement toilet handle',
'poyurethane clear satin': 'polyurethane clear satin',
'garbage desposal': 'garbage disposal',
'fire restaint paint': 'fire resistant paint',
'bathroom floting ball': 'bathroom floating ball',
'kitchen aid processer': 'kitchenaid processor',
'fire extinguishhers': 'fire extinguishers',
'trex fenc': 'trex fence',
'circular sawshop vac': 'circular saw shop vac',
'arylic wood paint': 'acrylic wood paint',
'appache mills plush tiles': 'apache mills plush tiles',
'phillips tuvpl-l 36': 'philips tuv pl-l 36',
'framed inerior door': 'framed interior door',
'end squicky floor': 'end squeaky floor',
'hoover prower scub deluxe': 'hoover power scrub deluxe',
'pernennial grass seed': 'perennial grass seed',
'phone linesplice connectors': 'phone line splice connectors',
'grow boz and pots': 'grow box and pots',
'organic leafgrow soil': 'organic leaf grow soil',
'6 foot pation table': '6 foot patio table',
'replacement patio unbrella pole': 'replacement patio umbrella pole',
'exteriro door 30 * 80': 'exterior door 30 * 80',
'oilrubbed bronze 3/8in riser': 'oil rubbed bronze 3/8in riser',
'latge storage containers': 'large storage containers',
'fridgidaire water filter': 'frigidaire water filter',
'sheeking for log cabin': 'seeking for log cabin',
'modern shower facuet': 'modern shower faucet',
'mirror, brushed nichel': 'mirror, brushed nickel',
'antic brass chandelier': 'antique brass chandelier',
'bufflo box wrench': 'buffalo box wrench',
'armstrong hardwood flooring422250z5p': 'armstrong hardwood flooring 422250z5p',
'mixet math faucet': 'mixet bath faucet',
'24 port patch pane': '24 port patch panel',
'black postlantern': 'black post lantern',
'needel valve': 'needle valve',
'wood ballusters': 'wood balusters',
'sharkbite sprinler': 'sharkbite sprinkler',
'1/2 hp genie screw drive garage door openner': '1/2 hp genie screw drive garage door opener',
'black dimmable gimble lights': 'black dimmable gimbal lights',
'power gable mount attic fac': 'power gable mount attic fan',
'door threshholds': 'door thresholds',
'rubber office chair sweel': 'rubber office chair wheel',
'16x7 garage door sandtone': '16x7 garage door sandstone',
'dal tile 12x24 porcelaine black tile': 'daltile 12x24 porcelain black tile',
'non ferroue saw blade': 'non ferrous saw blade',
'aluminum three way swich': 'aluminum three way switch',
'racheting wrench': 'ratcheting wrench',
'shower wal hook': 'shower wall hook',
'inflatable pool pumper': 'inflatable pool pump',
'cub cadet 46 balde': 'cub cadet 46 blade',
'spade terminalsnylon insulated': 'spade terminals nylon insulated',
'jimmyproof lock': 'jimmy proof lock',
'braSS pie fittings': 'braSS pipe fittings',
'brushed nichol hanging lights': 'brushed nickel hanging lights',
'lockbox keydoor lock': 'lockbox key door lock',
'white cabnet 30 inch base': 'white cabinet 30 inch base',
'ryobi replacemet batteries': 'ryobi replacement batteries',
'bath bord': 'bath board',
'aerp garden': 'aerogarden',
'white sign lettters': 'white sign letters',
'sqaure vessel sink': 'square vessel sink',
'i beam brackest': 'i beam brackets',
'paint for aluminun siding': 'paint for aluminum siding',
'digital temp monotor': 'digital temp monitor',
'floatinf shelving': 'floating shelving',
'light buld for stinger zapper': 'light bulb for stinger zapper',
'custom counterto': 'custom countertop',
'replacement delta faucet cartrigdge': 'replacement delta faucet cartridge',
'laundry bnasket': 'laundry basket',
'air conditon cooper soft': 'air conditioner copper soft',
'wood qwik bolts': 'wood kwik bolts',
'bolt conrete anchors': 'bolt concrete anchors',
'outdoor dining se?': 'outdoor dining set?',
'glass sheet mosiacs': 'glass sheet mosaics',
'whites parkle': 'white sparkle',
'fiskers titanium 1 1/2 loppers': 'fiskars titanium 1 1/2 loppers',
'cement mason bit': 'cement masonry bit',
'bananna leaves plant': 'banana leaves plant',
'fi nish screws': 'finish screws',
'tolet handle left hand': 'toilet handle left hand',
'sika repair shp': 'sika repair shop',
'murry circuit breakers 20 amps': 'murray circuit breakers 20 amps',
'hand pipe theader': 'hand pipe threader',
'powermate walkbehind trimmer': 'powermate walk behind trimmer',
'metal clothes handing carts': 'metal clothes hanging carts',
'electric radiatior heat': 'electric radiator heat',
'shopvac filter hepa': 'shop vac filter hepa',
'hampton bay fenving': 'hampton bay fencing',
'knife sharppener': 'knife sharpener',
'atttic heat barrier': 'attic heat barrier',
'wondow curtains': 'window curtains',
'american standard town square widespread facet': 'american standard town square widespread faucet',
'5.0 chest freezerz': '5.0 chest freezers',
'20 amp surger protector': '20 amp surge protector',
'f 30 flourescent light fixture': 'f30 fluorescent light fixture',
'1/2 inch rubber lep tips': '1/2 inch rubber leg tips',
'threader rod end coupler': 'threaded rod end coupler',
'lamated counter tops': 'laminate countertops',
'railing kit system round ballusters': 'railing kit system round balusters',
'sintetic grass': 'synthetic grass',
'landry sink': 'laundry sink',
'solar led light dust to dawn': 'solar led light dusk to dawn',
'pegro xp coffee step': 'pergo xp coffee step',
'maytag two door refridgerator': 'maytag two door refrigerator',
'reprobramable combination lock': 'programmable combination lock',
'pnematic flooring nails 16 gauge': 'pneumatic flooring nailer 16 gauge',
'outide dog kennel': 'outside dog kennel',
'6 incn door knocker': '6 inch door knocker',
'non programmable vertical thermost': 'non programmable vertical thermostat',
'windser light coco': 'windsor light coco',
'cooling towes': 'cooling towers',
'glacier bay shower catridge': 'glacier bay shower cartridge',
'ge discontinnued top freezers': 'ge discontinued top freezers',
'security camaras': 'security cameras',
'toiles partes': 'toilet parts',
'pegasus ntique brass': 'pegasus antique brass',
'water pic shower head chrome': 'waterpik shower head chrome',
'85 gall tall 4500': '85 gal tall 4500',
'contempery ceiling fans': 'contemporary ceiling fans',
'toile seat lid': 'toilet seat lid',
'milwaukee noncontact tester': 'milwaukee non contact tester',
'emser ocuntry': 'emser country',
'front screen for a gazeebo': 'front screen for a gazebo',
'fatpack 18v': 'fat pack 18v',
'bathroom kraft made': 'bathroom kraftmaid',
'1/4 qk connect x 1/8 mip': '1/4 quick connect x 1/8 mip',
'plate for faucet stoper': 'plate for faucet stopper',
'femaie gas fitting quick disonnect': 'female gas fitting quick disconnect',
'recesse light bulbs': 'recessed light bulbs',
'3m 60926 vapor catridges': '3m 60926 vapor cartridges',
'weather strip for commerial door': 'weather strip for commercial door',
'arcadia mettal locks': 'arcadia metal locks',
'gekko gauges': 'gecko gauges',
'frigidaire water firlters': 'frigidaire water filters',
'30 par haolgen bulbs': '30 par halogen bulbs',
'red devil scraperreplacement bldes': 'red devil scraper replacement blades',
'gcfi outlet': 'gfci outlet',
'mohawk oak wood fllors': 'mohawk oak wood floors',
'all porpose stools': 'all purpose stools',
'primered floor molding': 'primed floor molding',
'glass cleaner concintrete': 'glass cleaner concentrate',
'30 amp surface mount recepticle': '30 amp surface mount receptacle',
'60 x 100 aluminun mesh': '60 x 100 aluminum mesh',
'tile border black and whit': 'tile border black and white',
'peir mount black': 'pier mount black',
'xtra wide baby gates': 'extra wide baby gates',
'roffing caulk': 'roofing caulk',
'1/2 inc pvc treaded connector': '1/2 inch pvc threaded connector',
'electric hock for lift': 'electric shock for lift',
'greak': 'greek',
'airfilter 20x24': 'air filter 20x24',
'extenion cord storage': 'extension cord storage',
'shluter': 'schluter',
'circular saw rrip fence': 'circular saw rip fence',
'HEATED TOLIET SEAT': 'HEATED TOILET SEAT',
'rount magnet': 'round magnet',
'handi cap sink faucett': 'handicap sink faucet',
'arc fault circute breaker 1pole 15 amp': 'arc fault circuit breaker 1 pole 15 amp',
'oreck full reease carpet cleaner': 'oreck full release carpet cleaner',
'min split mounting brackets': 'mini split mounting brackets',
'kholer sink 20x17': 'kohler sink 20x17',
'heavy duty extensoion cordyellow only': 'heavy duty extension cord yellow only',
'3 newll post': '3 newel post',
'veraluz 4 light bathroom vanity': 'varaluz 4 light bathroom vanity',
'anual combo': 'annual combo',
'ciling pan': 'ceiling pan',
'syllicone lube': 'silicone lube',
'hdx 20\' hight velocity floor fan': 'hdx 20\' high velocity floor fan',
'30 inch kitchenaide cooktops': '30 inch kitchenaid cooktops',
'kusshuln concrete mixer': 'kushlan concrete mixer',
'roles of concreate mesh': 'roles of concrete mesh',
'hardward for pull out waste bin': 'hardware for pull out waste bin',
'glass towel bar braket': 'glass towel bar bracket',
'living room cabnets': 'living room cabinets',
'1-1/4 extention pvc': '1-1/4 extension pvc',
'metal double gain boxes': 'metal double gang boxes',
'fabric umbella': 'fabric umbrella',
'club cadet 46 belt': 'cub cadet 46 belt',
'window air conditionerriding lawn mowers': 'window air conditioner riding lawn mowers',
'digital cammera': 'digital camera',
'prppane pan': 'propane pan',
'oride plant': 'pride plant',
'home decorator outoddor patio cordless shades': 'home decorator outdoor patio cordless shades',
'1x1 square tubeing': '1x1 square tubing',
'water filter for frigidaire refrigirator': 'water filter for frigidaire refrigerator',
'linier track pendant': 'linear track pendant',
'medal stud finder': 'metal stud finder',
'mke m12 heated hoddie kit': 'mke m12 heated hoodie kit',
'bilt in pool': 'built in pool',
'buit in shower base': 'built in shower base',
'grohsafe roughin valve 35015': 'grohsafe rough in valve 35015',
'tank insualation': 'tank insulation',
'khols double toilet bowl': 'kohl\'s double toilet bowl',
'atlantiic can racks': 'atlantic can racks',
'skylites': 'skylights',
'kwikset passive door knob': 'kwikset passage door knob',
'loadspeaker': 'loudspeaker',
'koehler enamel cast iron sink': 'kohler enameled cast iron sink',
'tood handle lock': 'todd handle lock',
'sable brow grout': 'sable brown grout',
'rewd bird feeder': 'red bird feeder',
'lilac aera rug': 'lilac area rug',
'lightsavannah 3-light burnished ing fixtures': 'light savannah 3-light burnished ing fixtures',
'clear vynil for patio': 'clear vinyl for patio',
'intersate battery': 'interstate battery',
'jeldewen prairie mission door': 'jeld wen prairie mission door',
'honey oak tmolding': 'honey oak t molding',
'COMPLET SHOWER KIT': 'COMPLETE SHOWER KIT',
'36\' florescent light bulb': '36\' fluorescent light bulb',
'melon sunbrellap': 'melon sunbrella',
'28 kg washign machine': '28 kg washing machine',
'metal trash cas': 'metal trash cans',
'front door with side transome': 'front door with side transom',
'tribecia': 'tribeca',
'exterior shutters byrgundy': 'exterior shutters burgundy',
'light switchvers for little girls': 'light switches for little girls',
'miraposa whirlpool tub': 'mariposa whirlpool tub',
'schoolhouse pendqnt light': 'schoolhouse pendant light',
'cablrail': 'cable rail',
'vinly seat cleaner': 'vinyl seat cleaner',
'metal 3 tiertrolley': 'metal 3 tier trolley',
'white pendant uplight': 'white pendant light',
'lbathroom vanity lights chrome 3': 'bathroom vanity lights chrome 3',
'brushed nickel knobw': 'brushed nickel knobs',
'Renassaince': 'Renaissance',
'simpon strong tie wedge': 'simpson strong tie wedge',
'silocone repairs': 'silicone repairs',
'chocolate brown blackspash': 'chocolate brown backsplash',
'portabel tabel, plastic': 'portable table, plastic',
'safavieh courtyard dark biege area rug': 'safavieh courtyard dark beige area rug',
'theromometer smart': 'thermometer smart',
'hummngbird feeders': 'hummingbird feeders',
'diverter handels': 'diverter handles',
'dynamic desighn planters': 'dynamic design planters',
'pri meld flush bi fold doors': 'primed flush bifold doors',
'fisher and penkel': 'fisher and paykel',
'price of 1 gal beher marquee paint': 'price of 1 gal behr marquee paint',
'makersbot': 'makerbot',
'shelter logic sun sahde': 'shelterlogic sun shade',
'moen 4 port pex vavle': 'moen 4 port pex valve',
'ceiling fan extension wre': 'ceiling fan extension wire',
'single knobreplacement for shower kohler': 'single knob replacement for shower kohler',
'high gloss waterborne acrylic enamal': 'high gloss waterborne acrylic enamel',
'cattale': 'cattle',
'double deountable': 'double demountable',
'fantsastic': 'fantastic',
'milwaulkee battery charger': 'milwaukee battery charger',
'tandom 30 20': 'tandem 30 20',
'schluter kurdie': 'schluter kerdi',
'square buckes': 'square buckets',
'pro series vinal post': 'pro series vinyl post',
'krud cutter rust': 'krud kutter rust',
'warm espresso distresed': 'warm espresso distressed',
'levinton phone tv combo': 'leviton phone tv combo',
'makita planner knives': 'makita planer knives',
'barictric walk in tubs': 'bariatric walk in tubs',
'woper blades': 'wiper blades',
'kidcraft 18 doll furniture': 'kidkraft 18 doll furniture',
'stickon shower wall tower': 'stick on shower wall tower',
'riding lawn mower accesores': 'riding lawn mower accessories',
'towel bar nickel gracier 18\'': 'towel bar nickel glacier 18\'',
'compreshion repair kit': 'compression repair kit',
'huskie air compressors accessories': 'husky air compressors accessories',
'36 inch neo angle glass doooors': '36 inch neo angle glass doors',
'gerber cohort fine edg knife': 'gerber cohort fine edge knife',
'work force prpane heatr': 'workforce propane heater',
'progress lighting nottingdon': 'progress lighting nottington',
'dog leash atachments': 'dog leash attachments',
'elaphent ear': 'elephant ear',
'veeneer wood tape': 'veneer wood tape',
'siccsers': 'scissors',
'klien folding 6ft ruler': 'klein folding 6ft ruler',
'wall socket covedrs': 'wall socket covers',
'klein 8 inch plies': 'klein 8 inch pliers',
'screen doors: screen tight doors 32 in. unfinished wood t-ba': 'screen doors: screen tight doors 32 in. unfinished wood t-bar',
'g e dishwaaher': 'g e dishwasher',
'white semigloass': 'white semi gloss',
'shop swiming pools': 'shop swimming pools',
'rectangular baulaster': 'rectangular baluster',
'cedar 0roofing shingles': 'cedar roofing shingles',
'prehung door fanlite': 'prehung door fan lite',
'martha suart carpet tobacco leaf': 'martha stewart carpet tobacco leaf',
'furnance gas upflow': 'furnace gas upflow',
'spalted m aple': 'spalted maple',
'crimpling pleirs': 'crimping pliers',
'cold stem for glacer bay faucets': 'cold stem for glacier bay faucets',
'holegen flood light 35w': 'halogen flood light 35w',
'ridgid ipact wrench': 'rigid impact wrench',
'twin wsher dryer gas': 'twin washer dryer gas',
'Diamond HArd Acrylic Enamal': 'Diamond HArd Acrylic Enamel',
'stainless steel wall pannels': 'stainless steel wall panels',
'perenial bulb': 'perennial bulb',
'caroilne avenue 36 in single vanity in white marble top in l': 'caroline avenue 36 in single vanity in white marble top in l',
'broadway collectionchrome vanity fixture': 'broadway collection chrome vanity fixture',
'vogoro flower': 'vigoro flower',
'guarge parnel': 'gauge panel',
'sweeep pan': 'sweep pan',
'dewalt magnetic drive quide': 'dewalt magnetic drive guide',
'milwuakee magnetic drive guide': 'milwaukee magnetic drive guide',
'stainlss steel wire wheels': 'stainless steel wire wheels',
'deltile 3x6 ceramic blue': 'daltile 3x6 ceramic blue',
'discontinuedbrown and tan area rug': 'discontinued brown and tan area rug',
'frost protectionm': 'frost protection',
'5 tier chandalier': '5 tier chandelier',
'perry hickory laminte': 'perry hickory laminate',
'carpet chessnut': 'carpet chestnut',
'midnight blue irridecent': 'midnight blue iridescent',
'under cabinet black flourescent': 'under cabinet black fluorescent',
'concord charcole runner': 'concord charcoal runner',
'gibrallar post series cedar post': 'gibraltar post series cedar post',
'jefrrey court 3x12': 'jeffrey court 3x12',
'baking panb': 'baking pan',
'dustless ginder': 'dustless grinder',
'paw print doorbe;;': 'paw print doorbell;;',
'rustolium paint american accesnts': 'rustoleum paint american accents',
'costum key': 'custom key',
'halh circle glass shelf': 'half circle glass shelf',
'pedestial snk': 'pedestal sink',
'cordless celullar': 'cordless cellular',
'scounces wall light outside': 'sconces wall light outside',
'gas powere wood chipper': 'gas powered wood chipper',
'hampton bay brillant maple laminate': 'hampton bay brilliant maple laminate',
't8 flourescent bulbs 4 ft 2 pack': 't8 fluorescent bulbs 4 ft 2 pack',
'leminate floor alexandrea': 'laminate floor alexandria',
'reflector 50w flurecent': 'reflector 50w fluorescent',
'he xl 44 range': 'ge xl44 range',
'branch protctor paint': 'branch protector paint',
'rehargeable aa batteries for landscape lighting': 'rechargeable aa batteries for landscape lighting',
'msa safet work hat': 'msa safety work hat',
'conemporary hanging outdoor light fixture': 'contemporary hanging outdoor light fixture',
'piano door hing': 'piano door hinge',
'kohler whole houser generator': 'kohler whole house generator',
'dynasty collecion': 'dynasty collection',
'chesapeke nightstand in cherry': 'chesapeake nightstand in cherry',
'kohler glas shower door 4ft': 'kohler glass shower door 4ft',
'apartment size refreidgerator': 'apartment size refrigerator',
'centerpise': 'centerprise',
'motar for large tilw': 'mortar for large tile',
'bathroom lightning 48 inch': 'bathroom lighting 48 inch',
'panle clamp': 'panel clamp',
'roll up door fo shed': 'roll up door for shed',
'oil rubbed bronze airgap for dishwasher': 'oil rubbed bronze air gap for dishwasher',
'multi plub adapter': 'multi plug adapter',
'decorative clarance': 'decorative clarence',
'tamper resistant combo outet black': 'tamper resistant combo outlet black',
'polyurethane collors': 'polyurethane colors',
'scrool lever': 'scroll lever',
'gentec smoke detector': 'gentex smoke detector',
'kohler claxton biscuit sink': 'kohler caxton biscuit sink',
'strapping for cielings': 'strapping for ceilings',
'wall mounteddrop leaf table': 'wall mounted drop leaf table',
'chamberlain intercomm': 'chamberlain intercom',
'sumpter oask': 'sumpter oak',
'torino chandler 5 light bn': 'torino chandelier 5 light bn',
'allure red mahoghany': 'allure red mahogany',
'ge personal eletrical home security': 'ge personal electric home security',
'for rent sighn': 'for rent sign',
'coper clad aluminum': 'copper clad aluminum',
'homeywell cool moisture humidifier filters': 'honeywell cool moisture humidifier filters',
'hdc fairlawm jasper cane': 'hdc fairlawn jasper cane',
'wire fen c e': 'wire fence',
'cap screww everbilt 1/4in x2in': 'cap screw everbilt 1/4in x2in',
'metal urathane': 'metal urethane',
'blitz colth': 'blitz cloth',
'commercial accunts': 'commercial accounts',
'electic chainsaw worx': 'electric chainsaw worx',
'power toll accesories': 'power tool accessories',
'leviton - decora 3 gang midway nylon wall plate - light almo': 'leviton - decora 3 gang midway nylon wall plate - light almond',
'pond filter mediumpond filter pads': 'pond filter media pond filter pads',
'tall wine cabnet': 'tall wine cabinet',
'bulk calking': 'bulk caulking',
'insolated cooler with a strap': 'insulated cooler with a strap',
'concete placer': 'concrete placer',
'transmissin leak stopper': 'transmission leak stopper',
'toilet in buisk': 'toilet in buick',
'black wire hidder': 'black wire hider',
'braid trim ceramic title molding': 'braid trim ceramic tile molding',
'laundry tub fosets valves': 'laundry tub faucets valves',
'schlage plymoth orbit oil rubbed bronze': 'schlage plymouth orbit oil rubbed bronze',
'romanic poetry flat interior paint': 'romantic poetry flat interior paint',
'worklight 500 watt bullbs': 'worklight 500 watt bulbs',
'elvies ornament': 'elvis ornament',
'dpcam camera': 'dropcam camera',
'clorine tabs for septic': 'chlorine tabs for septic',
'interor door framed': 'interior door frame',
'hot dipped galvanized screwes': 'hot dipped galvanized screws',
'14 ft. w x29 ft. l x 14 ft.h': '14 ft. w x 29 ft. x 14 ft.h',
'water resistent top': 'water resistant top',
'galvinize 2 in box of screws': 'galvanized 2 in box of screws',
'taupe teasure carpet': 'taupe treasure carpet',
'nickle vanity lighting mosaics': 'nickel vanity lighting mosaics',
'heat circualtor': 'heat circulator',
'flexible pvc joing': 'flexible pvc joint',
'14 metal abresive blade': '14 metal abrasive blade',
'foldin g patio doors': 'folding patio doors',
'primeline mirror sliding doors': 'prime line mirror sliding doors',
'sanora maple flooring': 'sonora maple flooring',
'plastic paint containwes with lid': 'plastic paint containers with lid',
'deck fasting systems': 'deck fastening systems',
'long handled squeege window cleaning': 'long handled squeegee window cleaning',
'lsnd scape trim edger': 'landscape trim edger',
'rust oleum aged iron': 'rustoleum aged iron',
'redi ledge cooner': 'redi ledge corner',
'milwakee work radio': 'milwaukee work radio',
'progress piedmot': 'progress piedmont',
'home security camera cablee': 'home security camera cable',
'white rock daltale': 'white rock daltile',
'japenes lilacs': 'japanese lilacs',
'thickrubber mat': 'thick rubber mat',
'topdown bottom up shades': 'top down bottom up shades',
'locktite 9oz 2in1 premium sealant': 'loctite 9oz 2in1 premium sealant',
'evaporative thermstate': 'evaporative thermostat',
'red devil paint cleanaer': 'red devil paint cleaner',
'beer wine refrigeratr': 'beer wine refrigerator',
'forced air vents covrs': 'forced air vents covers',
'ew drops marquee paint': 'dew drops marquee paint',
'kitchen sink and fawcet black dual mount': 'kitchen sink and faucet black dual mount',
'dimmable fluoreecent': 'dimmable fluorescent',
'textured 6 pannel hollow core primed composite prehung inter': 'textured 6 panel hollow core primed composite prehung inter',
'dakato 4 light': 'dakota 4 light',
'playset handels': 'playset handles',
'vauhhan hammers': 'vaughan hammers',
'sterling frosted glass shower ath doors': 'sterling frosted glass shower bath doors',
'autom tic drawer lite': 'automatic drawer light',
'all trellisses': 'all trellises',
'american standard 5324.019 enlongate toilet seat': 'american standard 5324.019 elongated toilet seat',
'15 in built in maytag trash compactorr': '15 in built in maytag trash compactor',
'3 butto pico pj-3b': '3 button pico pj-3b',
'ligth': 'light',
'sissors': 'scissors'
}
|
{
"content_hash": "14c0cebd6064d3ff6359359d13cfe475",
"timestamp": "",
"source": "github",
"line_count": 3366,
"max_line_length": 133,
"avg_line_length": 54.69815805109923,
"alnum_prop": 0.6907187937908035,
"repo_name": "dnc1994/Kaggle-Playground",
"id": "dac7dbc945ac1ce6c195944b3ae731363a7e36da",
"size": "184114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "typo_dict.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "605424"
},
{
"name": "Python",
"bytes": "205420"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V2alpha1CrossVersionObjectReference(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, kind=None, name=None):
"""
V2alpha1CrossVersionObjectReference - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'kind': 'str',
'name': 'str'
}
self.attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'name': 'name'
}
self._api_version = api_version
self._kind = kind
self._name = name
@property
def api_version(self):
"""
Gets the api_version of this V2alpha1CrossVersionObjectReference.
API version of the referent
:return: The api_version of this V2alpha1CrossVersionObjectReference.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V2alpha1CrossVersionObjectReference.
API version of the referent
:param api_version: The api_version of this V2alpha1CrossVersionObjectReference.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this V2alpha1CrossVersionObjectReference.
Kind of the referent; More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds\"
:return: The kind of this V2alpha1CrossVersionObjectReference.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V2alpha1CrossVersionObjectReference.
Kind of the referent; More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds\"
:param kind: The kind of this V2alpha1CrossVersionObjectReference.
:type: str
"""
if kind is None:
raise ValueError("Invalid value for `kind`, must not be `None`")
self._kind = kind
@property
def name(self):
"""
Gets the name of this V2alpha1CrossVersionObjectReference.
Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names
:return: The name of this V2alpha1CrossVersionObjectReference.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V2alpha1CrossVersionObjectReference.
Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names
:param name: The name of this V2alpha1CrossVersionObjectReference.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "43eecf3abf35c81bbc783fc052669118",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 112,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.5657467532467533,
"repo_name": "skuda/client-python",
"id": "9bf180ead63f3a99d312d331280ccf9ce34cdd97",
"size": "4945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v2alpha1_cross_version_object_reference.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5907789"
},
{
"name": "Shell",
"bytes": "8195"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('share', '0001_initial'),
('djcelery', '0001_initial'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotUserMigration('edu.huskiecommons'),
),
migrations.RunPython(
code=share.robot.RobotOauthTokenMigration('edu.huskiecommons'),
),
migrations.RunPython(
code=share.robot.RobotScheduleMigration('edu.huskiecommons'),
),
]
|
{
"content_hash": "cce41dbc126686059a36081f3124b643",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 75,
"avg_line_length": 25.166666666666668,
"alnum_prop": 0.6225165562913907,
"repo_name": "zamattiac/SHARE",
"id": "656796955d1e041e2de128dc38117037706e5861",
"size": "676",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "providers/edu/huskiecommons/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3690"
},
{
"name": "HTML",
"bytes": "1582"
},
{
"name": "Python",
"bytes": "1517988"
},
{
"name": "Shell",
"bytes": "633"
}
],
"symlink_target": ""
}
|
import base64
import datetime
import uuid
from oslo.config import cfg
from oslo.serialization import jsonutils
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.plugins.v3 import user_data
from nova.compute import api as compute_api
from nova.compute import flavors
from nova import db
from nova import exception
from nova.network import manager
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.image import fake
CONF = cfg.CONF
FAKE_UUID = fakes.FAKE_UUID
def fake_gen_uuid():
return FAKE_UUID
def return_security_group(context, instance_id, security_group_id):
pass
class ServersControllerCreateTest(test.TestCase):
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
CONF.set_override('extensions_blacklist', 'os-user-data',
'osapi_v3')
self.no_user_data_controller = servers.ServersController(
extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
user_data.ATTRIBUTE_NAME: None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
def _test_create_extra(self, params, no_image=False,
override_controller=None):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
if no_image:
server.pop('imageRef', None)
server.update(params)
body = dict(server=server)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
if override_controller:
server = override_controller.create(req, body=body).obj['server']
else:
server = self.controller.create(req, body=body).obj['server']
return server
def test_create_instance_with_user_data_disabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(
params,
override_controller=self.no_user_data_controller)
def test_create_instance_with_user_data_enabled(self):
params = {user_data.ATTRIBUTE_NAME: base64.b64encode('fake')}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIn('user_data', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data(self):
value = base64.b64encode("A random string")
params = {user_data.ATTRIBUTE_NAME: value}
server = self._test_create_extra(params)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_user_data(self):
value = "A random string"
params = {user_data.ATTRIBUTE_NAME: value}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
|
{
"content_hash": "eb4e84d5e4a97e907c741b267afcc5b6",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 77,
"avg_line_length": 37.88826815642458,
"alnum_prop": 0.6001179593040401,
"repo_name": "badock/nova",
"id": "9ee2c5022a2671a74dfaf3246372ebb965a41f97",
"size": "7445",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/api/openstack/compute/plugins/v3/test_user_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groff",
"bytes": "112"
},
{
"name": "PLpgSQL",
"bytes": "2958"
},
{
"name": "Python",
"bytes": "15441440"
},
{
"name": "Shell",
"bytes": "20796"
},
{
"name": "Smarty",
"bytes": "693857"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
from __future__ import division
import argparse
from tabulate import tabulate
from pyafm.roll_up import transaction_to_student_step
from pyafm.models import afm
from pyafm.models import afms
def read_datashop_student_step(step_file, model_id=None):
header = {v: i for i, v in enumerate(
step_file.readline().rstrip().split('\t'))}
kc_mods = [v[4:-1] for v in header if v[0:2] == "KC"]
kc_mods.sort()
if model_id is None:
print()
print('Found these KC models:')
for i, val in enumerate(kc_mods):
print(" (%i) %s" % (i+1, val))
print()
model_id = int(input("Enter the number of which one you want to use: "))-1
model = "KC (%s)" % (kc_mods[model_id])
opp = "Opportunity (%s)" % (kc_mods[model_id])
kcs = []
opps = []
y = []
stu = []
student_label = []
item_label = []
for line in step_file:
data = line.rstrip().split('\t')
kc_labels = [kc for kc in data[header[model]].split("~~") if kc != ""]
if not kc_labels:
continue
kcs.append({kc: 1 for kc in kc_labels})
kc_opps = [o for o in data[header[opp]].split("~~") if o != ""]
opps.append({kc: int(kc_opps[i])-1 for i, kc in enumerate(kc_labels)})
if data[header['First Attempt']] == "correct":
y.append(1)
else:
y.append(0)
student = data[header['Anon Student Id']]
stu.append({student: 1})
student_label.append(student)
item = data[header['Problem Name']] + "##" + data[header['Step Name']]
item_label.append(item)
return (kcs, opps, y, stu, student_label, item_label)
def main():
parser = argparse.ArgumentParser(description='Process datashop file.')
parser.add_argument('-ft', choices=["student_step", "transaction"],
help='the type of file to load (default="student_step")',
default="student_step")
parser.add_argument('student_data', type=argparse.FileType('r'),
help="the student data file in datashop format")
parser.add_argument('-m', choices=["AFM", "AFM+S"],
help='the model to use (default="AFM+S")',
default="AFM+S")
parser.add_argument('-nfolds', type=int, default=3,
help="the number of cross validation folds, when using cv (default=3).")
parser.add_argument('-seed', type=int, default=None,
help='the seed used for shuffling in cross validation to ensure comparable'
'folds between runs (default=None).')
parser.add_argument('-report', choices=['all', 'cv', 'kcs', 'kcs+stu'], default='all',
help='model values to report after fitting (default=all).')
args = parser.parse_args()
if args.ft == "transaction":
ssr_file = transaction_to_student_step(args.student_data)
ssr_file = open(ssr_file, 'r')
else:
ssr_file = args.student_data
kcs, opps, y, stu, student_label, item_label = read_datashop_student_step(
ssr_file)
if args.m == "AFM":
scores, kc_vals, coef_s = afm(kcs, opps, y, stu,
student_label, item_label, args.nfolds, args.seed)
print()
if args.report in ['all', 'cv']:
print(tabulate([scores], ['Unstratified CV', 'Stratified CV', 'Student CV', 'Item CV'],
floatfmt=".3f"))
print()
if args.report in ['all', 'kcs', 'kcs+stu']:
print(tabulate(sorted(kc_vals), ['KC Name', 'Intercept (logit)',
'Intercept (prob)', 'Slope'],
floatfmt=".3f"))
print()
if args.report in ['all', 'kcs+stu']:
print(tabulate(sorted(coef_s), ['Anon Student Id', 'Intercept (logit)',
'Intercept (prob)'],
floatfmt=".3f"))
elif args.m == "AFM+S":
scores, kc_vals, coef_s = afms(kcs, opps, y, stu,
student_label, item_label, args.nfolds, args.seed)
print()
if args.report in ['all', 'cv']:
print(tabulate([scores], ['Unstratified CV', 'Stratified CV', 'Student CV', 'Item CV'],
floatfmt=".3f"))
print()
if args.report in ['all', 'kcs', 'kcs+stu']:
print(tabulate(sorted(kc_vals), ['KC Name', 'Intercept (logit)',
'Intercept (prob)', 'Slope'],
floatfmt=".3f"))
print()
if args.report in ['all', 'kcs+stu']:
print(tabulate(sorted(coef_s), ['Anon Student Id', 'Intercept (logit)',
'Intercept (prob)'],
floatfmt=".3f"))
else:
raise ValueError("Model type not supported")
if __name__ == "__main__":
main()
|
{
"content_hash": "5d4b5ee6c9bf40830010145110cf0b11",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 99,
"avg_line_length": 37.01428571428571,
"alnum_prop": 0.5189116171362408,
"repo_name": "cmaclell/pyAFM",
"id": "441c087f365b6372f63425f864124db58507bd6c",
"size": "5182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyafm/process_datashop.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39676"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.imdb import make_url as make_imdb_url
try:
from flexget.plugins.filter.movie_queue import queue_get
except ImportError:
raise plugin.DependencyError(issued_by='emit_movie_queue', missing='movie_queue')
log = logging.getLogger('emit_movie_queue')
class EmitMovieQueue(object):
"""Use your movie queue as an input by emitting the content of it"""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'year': {'type': 'boolean'},
'quality': {'type': 'boolean'},
'queue_name': {'type': 'string'}
},
'additionalProperties': False
}
]
}
def prepare_config(self, config):
if isinstance(config, bool):
config = {}
config.setdefault('year', True)
config.setdefault('quality', False)
config.setdefault('queue_name', 'default')
return config
def on_task_input(self, task, config):
if not config:
return
config = self.prepare_config(config)
entries = []
queue_name = config.get('queue_name')
with Session() as session:
for queue_item in queue_get(session=session, downloaded=False, queue_name=queue_name):
entry = Entry()
# make sure the entry has IMDB fields filled
entry['url'] = ''
if queue_item.imdb_id:
entry['imdb_id'] = queue_item.imdb_id
entry['imdb_url'] = make_imdb_url(queue_item.imdb_id)
if queue_item.tmdb_id:
entry['tmdb_id'] = queue_item.tmdb_id
plugin.get_plugin_by_name('tmdb_lookup').instance.lookup(entry)
# check if title is a imdb url (leftovers from old database?)
# TODO: maybe this should be fixed at the queue_get ...
if 'http://' in queue_item.title:
log.debug('queue contains url instead of title')
if entry.get('movie_name'):
entry['title'] = entry['movie_name']
else:
log.error('Found imdb url in imdb queue, but lookup failed: %s' % entry['title'])
continue
else:
# normal title
entry['title'] = queue_item.title
# Add the year and quality if configured to (make sure not to double it up)
if config.get('year') and entry.get('movie_year') \
and unicode(entry['movie_year']) not in entry['title']:
entry['title'] += ' %s' % entry['movie_year']
# TODO: qualities can now be ranges.. how should we handle this?
if config.get('quality') and queue_item.quality != 'ANY':
log.info('quality option of emit_movie_queue is disabled while we figure out how to handle ranges')
# entry['title'] += ' %s' % queue_item.quality
entries.append(entry)
log.debug('Added title and IMDB id to new entry: %s - %s' %
(entry['title'], entry['imdb_id']))
return entries
@event('plugin.register')
def register_plugin():
plugin.register(EmitMovieQueue, 'emit_movie_queue', api_ver=2)
|
{
"content_hash": "dc0b9b3f319915ad40f92806db0e8c58",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 119,
"avg_line_length": 39.09574468085106,
"alnum_prop": 0.5376870748299319,
"repo_name": "cvium/Flexget",
"id": "65cc3438f2cb7d03b5d7ce1f7de809896444261c",
"size": "3675",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "flexget/plugins/input/emit_movie_queue.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4878"
},
{
"name": "HTML",
"bytes": "28181"
},
{
"name": "JavaScript",
"bytes": "44914"
},
{
"name": "Python",
"bytes": "2383289"
}
],
"symlink_target": ""
}
|
from unittest import TestCase
from mock import Mock
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim
from cloudshell.cp.vcenter.commands.connect_dvswitch import VirtualSwitchConnectCommand
from cloudshell.cp.vcenter.common.vcenter.task_waiter import SynchronousTaskWaiter
from cloudshell.cp.vcenter.common.vcenter.vmomi_service import pyVmomiService
from cloudshell.cp.vcenter.network.dvswitch.creator import DvPortGroupCreator
from cloudshell.cp.vcenter.network.dvswitch.name_generator import DvPortGroupNameGenerator
from cloudshell.cp.vcenter.network.vlan.factory import VlanSpecFactory
from cloudshell.cp.vcenter.network.vlan.range_parser import VLanIdRangeParser
from cloudshell.cp.vcenter.network.vnic.vnic_service import VNicService
from cloudshell.cp.vcenter.vm.dvswitch_connector import VmNetworkMapping, VirtualSwitchToMachineConnector
from cloudshell.cp.vcenter.vm.portgroup_configurer import VirtualMachinePortGroupConfigurer
from cloudshell.cp.vcenter.vm.vnic_to_network_mapper import VnicToNetworkMapper
from cloudshell.tests.utils.testing_credentials import TestCredentials
class VirtualSwitchToMachineCommandIntegrationTest(TestCase):
def integration_test_connect_A(self):
py_vmomi_service = pyVmomiService(SmartConnect, Disconnect)
cred = TestCredentials()
si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port)
synchronous_task_waiter = SynchronousTaskWaiter()
mapper = VnicToNetworkMapper(DvPortGroupNameGenerator())
dv_port_group_creator = DvPortGroupCreator(py_vmomi_service, synchronous_task_waiter)
port_group_name_generator = DvPortGroupNameGenerator()
virtual_machine_port_group_configurer = VirtualMachinePortGroupConfigurer(py_vmomi_service,
synchronous_task_waiter,
mapper,
VNicService(),
port_group_name_generator)
mapping = VmNetworkMapping()
mapping.vlan_id = [vim.NumericRange(start=65, end=65)]
mapping.dv_port_name = DvPortGroupNameGenerator().generate_port_group_name(65, 'Trunk')
mapping.dv_switch_name = 'dvSwitch'
mapping.dv_switch_path = 'QualiSB'
mapping.vlan_spec = vim.dvs.VmwareDistributedVirtualSwitch.TrunkVlanSpec()
connector = VirtualSwitchToMachineConnector(
dv_port_group_creator,
virtual_machine_port_group_configurer)
vm = py_vmomi_service.find_vm_by_name(si, 'QualiSB/Raz', '2')
# Act
connector.connect_by_mapping(si, vm, [mapping], None, [], Mock(), 'True')
pass
def integration_test_connect_B(self):
py_vmomi_service = pyVmomiService(SmartConnect, Disconnect)
cred = TestCredentials()
si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port)
vm_uuid = py_vmomi_service.find_vm_by_name(si, 'QualiSB/Boris', 'Boris2-win7').config.uuid
mapping = VmNetworkMapping()
mapping.vlan_id = '114'
# mapping.dv_port_name = 65
mapping.dv_switch_name = 'dvSwitch'
mapping.dv_switch_path = 'QualiSB'
mapping.vlan_spec = 'Trunk'
vlan_spec = VlanSpecFactory()
range_fac = VLanIdRangeParser()
synchronous_task_waiter = SynchronousTaskWaiter()
name_gen = DvPortGroupNameGenerator()
mapper = VnicToNetworkMapper(name_gen)
dv_port_group_creator = DvPortGroupCreator(py_vmomi_service, synchronous_task_waiter)
virtual_machine_port_group_configurer = VirtualMachinePortGroupConfigurer(py_vmomi_service,
synchronous_task_waiter,
mapper,
VNicService())
connector = VirtualSwitchToMachineConnector(dv_port_group_creator, virtual_machine_port_group_configurer)
command = VirtualSwitchConnectCommand(py_vmomi_service, connector, name_gen, vlan_spec, range_fac, Mock())
command.connect_to_networks(si, vm_uuid, [mapping], 'QualiSB/anetwork', [], 'True')
def test_integration(self):
self.integration_test_connect_A()
self.integration_test_connect_B()
def test_dicconnect_bulk(self):
py_vmomi_service = pyVmomiService(SmartConnect, Disconnect)
cred = TestCredentials()
si = py_vmomi_service.connect(cred.host, cred.username, cred.password, cred.port)
vm = py_vmomi_service.find_vm_by_name(si, 'QualiSB/Alex', 'test_25bf07ee')
mac_address = '00:50:56:a2:5f:43'
vnics = [device.backing.network for device in vm.config.hardware.device
if isinstance(device, vim.vm.device.VirtualEthernetCard)
and hasattr(device.backing, 'network')
and hasattr(device, 'macAddress')
and device.macAddress == mac_address]
pass
|
{
"content_hash": "9c8f13d18e82ef69d7682a8d72d60e3c",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 114,
"avg_line_length": 53.95918367346939,
"alnum_prop": 0.6467473524962178,
"repo_name": "QualiSystems/vCenterShell",
"id": "0db028a8fed59ae9bff8e82e94ef95af12dcb2a7",
"size": "5288",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "integration/integration_commands/integration_test_connect.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8339"
},
{
"name": "Makefile",
"bytes": "7672"
},
{
"name": "Python",
"bytes": "629506"
},
{
"name": "Shell",
"bytes": "646"
}
],
"symlink_target": ""
}
|
c = get_config()
#------------------------------------------------------------------------------
# NbConvertApp configuration
#------------------------------------------------------------------------------
# This application is used to convert notebook files (*.ipynb) to various other
# formats.
#
# WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.
# NbConvertApp will inherit config from: BaseIPythonApplication, Application
# The IPython profile to use.
# c.NbConvertApp.profile = u'default'
# The export format to be used.
# c.NbConvertApp.export_format = 'html'
# List of notebooks to convert. Wildcards are supported. Filenames passed
# positionally will be added to the list.
# c.NbConvertApp.notebooks = []
# PostProcessor class used to write the results of the conversion
# c.NbConvertApp.postprocessor_class = u''
# Writer class used to write the results of the conversion
# c.NbConvertApp.writer_class = 'FilesWriter'
# Set the log level by value or name.
# c.NbConvertApp.log_level = 30
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.NbConvertApp.extra_config_file = u''
# Whether to create profile dir if it doesn't exist
# c.NbConvertApp.auto_create = False
# overwrite base name use for output files. can only be used when converting one
# notebook at a time.
# c.NbConvertApp.output_base = ''
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
# c.NbConvertApp.ipython_dir = u''
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.NbConvertApp.copy_config_files = False
# The date format used by logging formatters for %(asctime)s
# c.NbConvertApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# The Logging format template
# c.NbConvertApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.NbConvertApp.verbose_crash = False
# Whether to apply a suffix prior to the extension (only relevant when
# converting to notebook format). The suffix is determined by the exporter, and
# is usually '.nbconvert'.
# c.NbConvertApp.use_output_suffix = True
# Whether to overwrite existing config files when copying
# c.NbConvertApp.overwrite = False
#------------------------------------------------------------------------------
# NbConvertBase configuration
#------------------------------------------------------------------------------
# Global configurable class for shared config
#
# Useful for display data priority that might be use by many transformers
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.NbConvertBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.NbConvertBase.default_language = 'ipython'
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# Exporter configuration
#------------------------------------------------------------------------------
# Class containing methods that sequentially run a list of preprocessors on a
# NotebookNode object and then return the modified NotebookNode object and
# accompanying resources dict.
# Extension of the file that should be written to disk
# c.Exporter.file_extension = '.txt'
# List of preprocessors, by name or namespace, to enable.
# c.Exporter.preprocessors = []
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.Exporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#------------------------------------------------------------------------------
# HTMLExporter configuration
#------------------------------------------------------------------------------
# Exports a basic HTML document. This exporter assists with the export of HTML.
# Inherit from it if you are writing your own HTML template and need custom
# preprocessors/filters. If you don't need custom preprocessors/ filters, just
# change the 'template_file' config option.
# HTMLExporter will inherit config from: TemplateExporter, Exporter
#
# c.HTMLExporter.jinja_variable_block_start = ''
#
# c.HTMLExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.HTMLExporter.raw_mimetypes = []
# Name of the template file to use
# c.HTMLExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.HTMLExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.HTMLExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.HTMLExporter.file_extension = '.txt'
#
# c.HTMLExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.HTMLExporter.filters = {}
#
# c.HTMLExporter.jinja_comment_block_start = ''
#
# c.HTMLExporter.jinja_logic_block_end = ''
#
# c.HTMLExporter.jinja_logic_block_start = ''
#
# c.HTMLExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.HTMLExporter.preprocessors = []
#------------------------------------------------------------------------------
# LatexExporter configuration
#------------------------------------------------------------------------------
# Exports to a Latex template. Inherit from this class if your template is
# LaTeX based and you need custom tranformers/filters. Inherit from it if you
# are writing your own HTML template and need custom tranformers/filters. If
# you don't need custom tranformers/filters, just change the 'template_file'
# config option. Place your template in the special "/latex" subfolder of the
# "../templates" folder.
# LatexExporter will inherit config from: TemplateExporter, Exporter
#
# c.LatexExporter.jinja_variable_block_start = '((('
#
# c.LatexExporter.jinja_variable_block_end = ')))'
# formats of raw cells to be included in this Exporter's output.
# c.LatexExporter.raw_mimetypes = []
# Name of the template file to use
# c.LatexExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.LatexExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.LatexExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.LatexExporter.file_extension = '.txt'
#
# c.LatexExporter.jinja_comment_block_end = '=))'
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.LatexExporter.filters = {}
#
# c.LatexExporter.jinja_comment_block_start = '((='
#
# c.LatexExporter.jinja_logic_block_end = '*))'
#
# c.LatexExporter.jinja_logic_block_start = '((*'
#
# c.LatexExporter.template_extension = '.tplx'
# List of preprocessors, by name or namespace, to enable.
# c.LatexExporter.preprocessors = []
#------------------------------------------------------------------------------
# MarkdownExporter configuration
#------------------------------------------------------------------------------
# Exports to a markdown document (.md)
# MarkdownExporter will inherit config from: TemplateExporter, Exporter
#
# c.MarkdownExporter.jinja_variable_block_start = ''
#
# c.MarkdownExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.MarkdownExporter.raw_mimetypes = []
# Name of the template file to use
# c.MarkdownExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.MarkdownExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.MarkdownExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.MarkdownExporter.file_extension = '.txt'
#
# c.MarkdownExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.MarkdownExporter.filters = {}
#
# c.MarkdownExporter.jinja_comment_block_start = ''
#
# c.MarkdownExporter.jinja_logic_block_end = ''
#
# c.MarkdownExporter.jinja_logic_block_start = ''
#
# c.MarkdownExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.MarkdownExporter.preprocessors = []
#------------------------------------------------------------------------------
# NotebookExporter configuration
#------------------------------------------------------------------------------
# Exports to an IPython notebook.
# NotebookExporter will inherit config from: Exporter
# The nbformat version to write. Use this to downgrade notebooks.
# c.NotebookExporter.nbformat_version = 4
# Extension of the file that should be written to disk
# c.NotebookExporter.file_extension = '.txt'
# List of preprocessors, by name or namespace, to enable.
# c.NotebookExporter.preprocessors = []
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.NotebookExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#------------------------------------------------------------------------------
# PDFExporter configuration
#------------------------------------------------------------------------------
# Writer designed to write to PDF files
# PDFExporter will inherit config from: LatexExporter, TemplateExporter,
# Exporter
# File extensions of temp files to remove after running.
# c.PDFExporter.temp_file_exts = ['.aux', '.bbl', '.blg', '.idx', '.log', '.out']
#
# c.PDFExporter.jinja_variable_block_start = '((('
#
# c.PDFExporter.jinja_logic_block_start = '((*'
# Whether to display the output of latex commands.
# c.PDFExporter.verbose = False
# formats of raw cells to be included in this Exporter's output.
# c.PDFExporter.raw_mimetypes = []
# Shell command used to run bibtex.
# c.PDFExporter.bib_command = [u'bibtex', u'{filename}']
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.PDFExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.PDFExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.PDFExporter.file_extension = '.txt'
#
# c.PDFExporter.jinja_comment_block_end = '=))'
#
# c.PDFExporter.jinja_variable_block_end = ')))'
#
# c.PDFExporter.template_extension = '.tplx'
# List of preprocessors, by name or namespace, to enable.
# c.PDFExporter.preprocessors = []
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.PDFExporter.filters = {}
#
# c.PDFExporter.jinja_comment_block_start = '((='
# Name of the template file to use
# c.PDFExporter.template_file = u'default'
# How many times latex will be called.
# c.PDFExporter.latex_count = 3
#
# c.PDFExporter.jinja_logic_block_end = '*))'
# Shell command used to compile latex.
# c.PDFExporter.latex_command = [u'pdflatex', u'{filename}']
#------------------------------------------------------------------------------
# PythonExporter configuration
#------------------------------------------------------------------------------
# Exports a Python code file.
# PythonExporter will inherit config from: TemplateExporter, Exporter
#
# c.PythonExporter.jinja_variable_block_start = ''
#
# c.PythonExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.PythonExporter.raw_mimetypes = []
# Name of the template file to use
# c.PythonExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.PythonExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.PythonExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.PythonExporter.file_extension = '.txt'
#
# c.PythonExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.PythonExporter.filters = {}
#
# c.PythonExporter.jinja_comment_block_start = ''
#
# c.PythonExporter.jinja_logic_block_end = ''
#
# c.PythonExporter.jinja_logic_block_start = ''
#
# c.PythonExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.PythonExporter.preprocessors = []
#------------------------------------------------------------------------------
# RSTExporter configuration
#------------------------------------------------------------------------------
# Exports restructured text documents.
# RSTExporter will inherit config from: TemplateExporter, Exporter
#
# c.RSTExporter.jinja_variable_block_start = ''
#
# c.RSTExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.RSTExporter.raw_mimetypes = []
# Name of the template file to use
# c.RSTExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.RSTExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.RSTExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.RSTExporter.file_extension = '.txt'
#
# c.RSTExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.RSTExporter.filters = {}
#
# c.RSTExporter.jinja_comment_block_start = ''
#
# c.RSTExporter.jinja_logic_block_end = ''
#
# c.RSTExporter.jinja_logic_block_start = ''
#
# c.RSTExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.RSTExporter.preprocessors = []
#------------------------------------------------------------------------------
# SlidesExporter configuration
#------------------------------------------------------------------------------
# Exports HTML slides with reveal.js
# SlidesExporter will inherit config from: HTMLExporter, TemplateExporter,
# Exporter
#
# c.SlidesExporter.jinja_variable_block_start = ''
#
# c.SlidesExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.SlidesExporter.raw_mimetypes = []
# Name of the template file to use
# c.SlidesExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.SlidesExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.SlidesExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.SlidesExporter.file_extension = '.txt'
#
# c.SlidesExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.SlidesExporter.filters = {}
#
# c.SlidesExporter.jinja_comment_block_start = ''
#
# c.SlidesExporter.jinja_logic_block_end = ''
#
# c.SlidesExporter.jinja_logic_block_start = ''
#
# c.SlidesExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.SlidesExporter.preprocessors = []
#------------------------------------------------------------------------------
# TemplateExporter configuration
#------------------------------------------------------------------------------
# Exports notebooks into other file formats. Uses Jinja 2 templating engine to
# output new formats. Inherit from this class if you are creating a new
# template type along with new filters/preprocessors. If the filters/
# preprocessors provided by default suffice, there is no need to inherit from
# this class. Instead, override the template_file and file_extension traits via
# a config file.
#
# - citation2latex - highlight2html - filter_data_type - markdown2html -
# markdown2rst - get_lines - ansi2latex - strip_ansi - add_prompts -
# comment_lines - ascii_only - markdown2latex - escape_latex - add_anchor -
# ipython2python - posix_path - highlight2latex - path2url - prevent_list_blocks
# - ansi2html - wrap_text - indent - strip_dollars - html2text -
# strip_files_prefix
# TemplateExporter will inherit config from: Exporter
#
# c.TemplateExporter.jinja_variable_block_start = ''
#
# c.TemplateExporter.jinja_variable_block_end = ''
# formats of raw cells to be included in this Exporter's output.
# c.TemplateExporter.raw_mimetypes = []
# Name of the template file to use
# c.TemplateExporter.template_file = u'default'
# List of preprocessors available by default, by name, namespace, instance, or
# type.
# c.TemplateExporter.default_preprocessors = ['IPython.nbconvert.preprocessors.coalesce_streams', 'IPython.nbconvert.preprocessors.SVG2PDFPreprocessor', 'IPython.nbconvert.preprocessors.ExtractOutputPreprocessor', 'IPython.nbconvert.preprocessors.CSSHTMLHeaderPreprocessor', 'IPython.nbconvert.preprocessors.RevealHelpPreprocessor', 'IPython.nbconvert.preprocessors.LatexPreprocessor', 'IPython.nbconvert.preprocessors.ClearOutputPreprocessor', 'IPython.nbconvert.preprocessors.ExecutePreprocessor', 'IPython.nbconvert.preprocessors.HighlightMagicsPreprocessor']
#
# c.TemplateExporter.template_path = ['.']
# Extension of the file that should be written to disk
# c.TemplateExporter.file_extension = '.txt'
#
# c.TemplateExporter.jinja_comment_block_end = ''
# Dictionary of filters, by name and namespace, to add to the Jinja environment.
# c.TemplateExporter.filters = {}
#
# c.TemplateExporter.jinja_comment_block_start = ''
#
# c.TemplateExporter.jinja_logic_block_end = ''
#
# c.TemplateExporter.jinja_logic_block_start = ''
#
# c.TemplateExporter.template_extension = '.tpl'
# List of preprocessors, by name or namespace, to enable.
# c.TemplateExporter.preprocessors = []
#------------------------------------------------------------------------------
# CSSHTMLHeaderPreprocessor configuration
#------------------------------------------------------------------------------
# Preprocessor used to pre-process notebook for HTML output. Adds IPython
# notebook front-end CSS and Pygments CSS to HTML output.
# CSSHTMLHeaderPreprocessor will inherit config from: Preprocessor,
# NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.CSSHTMLHeaderPreprocessor.default_language = 'ipython'
# CSS highlight class identifier
# c.CSSHTMLHeaderPreprocessor.highlight_class = '.highlight'
#
# c.CSSHTMLHeaderPreprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.CSSHTMLHeaderPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# ClearOutputPreprocessor configuration
#------------------------------------------------------------------------------
# Removes the output from all code cells in a notebook.
# ClearOutputPreprocessor will inherit config from: Preprocessor, NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.ClearOutputPreprocessor.default_language = 'ipython'
#
# c.ClearOutputPreprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.ClearOutputPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# ConvertFiguresPreprocessor configuration
#------------------------------------------------------------------------------
# Converts all of the outputs in a notebook from one format to another.
# ConvertFiguresPreprocessor will inherit config from: Preprocessor,
# NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.ConvertFiguresPreprocessor.default_language = 'ipython'
# Format the converter writes
# c.ConvertFiguresPreprocessor.to_format = u''
#
# c.ConvertFiguresPreprocessor.enabled = False
# Format the converter accepts
# c.ConvertFiguresPreprocessor.from_format = u''
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.ConvertFiguresPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# ExecutePreprocessor configuration
#------------------------------------------------------------------------------
# Executes all the cells in a notebook
# ExecutePreprocessor will inherit config from: Preprocessor, NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.ExecutePreprocessor.default_language = 'ipython'
# If execution of a cell times out, interrupt the kernel and continue executing
# other cells rather than throwing an error and stopping.
# c.ExecutePreprocessor.interrupt_on_timeout = False
#
# c.ExecutePreprocessor.enabled = False
# The time to wait (in seconds) for output from executions.
# c.ExecutePreprocessor.timeout = 30
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.ExecutePreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# ExtractOutputPreprocessor configuration
#------------------------------------------------------------------------------
# Extracts all of the outputs from the notebook file. The extracted outputs
# are returned in the 'resources' dictionary.
# ExtractOutputPreprocessor will inherit config from: Preprocessor,
# NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.ExtractOutputPreprocessor.default_language = 'ipython'
#
# c.ExtractOutputPreprocessor.output_filename_template = '{unique_key}_{cell_index}_{index}{extension}'
#
# c.ExtractOutputPreprocessor.extract_output_types = set(['image/png', 'application/pdf', 'image/jpeg', 'image/svg+xml'])
#
# c.ExtractOutputPreprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.ExtractOutputPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# HighlightMagicsPreprocessor configuration
#------------------------------------------------------------------------------
# Detects and tags code cells that use a different languages than Python.
# HighlightMagicsPreprocessor will inherit config from: Preprocessor,
# NbConvertBase
# Syntax highlighting for magic's extension languages. Each item associates a
# language magic extension such as %%R, with a pygments lexer such as r.
# c.HighlightMagicsPreprocessor.languages = {}
#
# c.HighlightMagicsPreprocessor.enabled = False
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.HighlightMagicsPreprocessor.default_language = 'ipython'
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.HighlightMagicsPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# LatexPreprocessor configuration
#------------------------------------------------------------------------------
# Preprocessor for latex destined documents.
#
# Mainly populates the `latex` key in the resources dict, adding definitions for
# pygments highlight styles.
# LatexPreprocessor will inherit config from: Preprocessor, NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.LatexPreprocessor.default_language = 'ipython'
#
# c.LatexPreprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.LatexPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# Preprocessor configuration
#------------------------------------------------------------------------------
# A configurable preprocessor
#
# Inherit from this class if you wish to have configurability for your
# preprocessor.
#
# Any configurable traitlets this class exposed will be configurable in profiles
# using c.SubClassName.attribute = value
#
# you can overwrite :meth:`preprocess_cell` to apply a transformation
# independently on each cell or :meth:`preprocess` if you prefer your own logic.
# See corresponding docstring for informations.
#
# Disabled by default and can be enabled via the config by
# 'c.YourPreprocessorName.enabled = True'
# Preprocessor will inherit config from: NbConvertBase
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.Preprocessor.default_language = 'ipython'
#
# c.Preprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.Preprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# RevealHelpPreprocessor configuration
#------------------------------------------------------------------------------
# RevealHelpPreprocessor will inherit config from: Preprocessor, NbConvertBase
# The URL prefix for reveal.js. This can be a a relative URL for a local copy of
# reveal.js, or point to a CDN.
#
# For speaker notes to work, a local reveal.js prefix must be used.
# c.RevealHelpPreprocessor.url_prefix = 'reveal.js'
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.RevealHelpPreprocessor.default_language = 'ipython'
#
# c.RevealHelpPreprocessor.enabled = False
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.RevealHelpPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
#------------------------------------------------------------------------------
# SVG2PDFPreprocessor configuration
#------------------------------------------------------------------------------
# Converts all of the outputs in a notebook from SVG to PDF.
# SVG2PDFPreprocessor will inherit config from: ConvertFiguresPreprocessor,
# Preprocessor, NbConvertBase
# Format the converter accepts
# c.SVG2PDFPreprocessor.from_format = u''
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.SVG2PDFPreprocessor.default_language = 'ipython'
#
# c.SVG2PDFPreprocessor.enabled = False
# Format the converter writes
# c.SVG2PDFPreprocessor.to_format = u''
# The command to use for converting SVG to PDF
#
# This string is a template, which will be formatted with the keys to_filename
# and from_filename.
#
# The conversion call must read the SVG from {from_flename}, and write a PDF to
# {to_filename}.
# c.SVG2PDFPreprocessor.command = u''
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.SVG2PDFPreprocessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# The path to Inkscape, if necessary
# c.SVG2PDFPreprocessor.inkscape = u''
#------------------------------------------------------------------------------
# FilesWriter configuration
#------------------------------------------------------------------------------
# Consumes nbconvert output and produces files.
# FilesWriter will inherit config from: WriterBase, NbConvertBase
# List of the files that the notebook references. Files will be included with
# written output.
# c.FilesWriter.files = []
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.FilesWriter.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# Directory to write output to. Leave blank to output to the current directory
# c.FilesWriter.build_directory = ''
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.FilesWriter.default_language = 'ipython'
# When copying files that the notebook depends on, copy them in relation to this
# path, such that the destination filename will be os.path.relpath(filename,
# relpath). If FilesWriter is operating on a notebook that already exists
# elsewhere on disk, then the default will be the directory containing that
# notebook.
# c.FilesWriter.relpath = ''
#------------------------------------------------------------------------------
# StdoutWriter configuration
#------------------------------------------------------------------------------
# Consumes output from nbconvert export...() methods and writes to the stdout
# stream.
# StdoutWriter will inherit config from: WriterBase, NbConvertBase
# List of the files that the notebook references. Files will be included with
# written output.
# c.StdoutWriter.files = []
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.StdoutWriter.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.StdoutWriter.default_language = 'ipython'
#------------------------------------------------------------------------------
# WriterBase configuration
#------------------------------------------------------------------------------
# Consumes output from nbconvert export...() methods and writes to a useful
# location.
# WriterBase will inherit config from: NbConvertBase
# List of the files that the notebook references. Files will be included with
# written output.
# c.WriterBase.files = []
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.WriterBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.WriterBase.default_language = 'ipython'
#------------------------------------------------------------------------------
# PostProcessorBase configuration
#------------------------------------------------------------------------------
# PostProcessorBase will inherit config from: NbConvertBase
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.PostProcessorBase.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.PostProcessorBase.default_language = 'ipython'
#------------------------------------------------------------------------------
# ServePostProcessor configuration
#------------------------------------------------------------------------------
# Post processor designed to serve files
#
# Proxies reveal.js requests to a CDN if no local reveal.js is present
# ServePostProcessor will inherit config from: PostProcessorBase, NbConvertBase
# The IP address to listen on.
# c.ServePostProcessor.ip = '127.0.0.1'
# URL prefix for reveal.js
# c.ServePostProcessor.reveal_prefix = 'reveal.js'
# DEPRECATED default highlight language, please use language_info metadata
# instead
# c.ServePostProcessor.default_language = 'ipython'
# port for the server to listen on.
# c.ServePostProcessor.port = 8000
# An ordered list of preferred output type, the first encountered will usually
# be used when converting discarding the others.
# c.ServePostProcessor.display_data_priority = ['text/html', 'application/pdf', 'text/latex', 'image/svg+xml', 'image/png', 'image/jpeg', 'text/plain']
# Should the browser be opened automatically?
# c.ServePostProcessor.open_in_browser = True
# URL for reveal.js CDN.
# c.ServePostProcessor.reveal_cdn = 'https://cdn.jsdelivr.net/reveal.js/2.6.2'
|
{
"content_hash": "51de30a30dca1249db03f54288cce303",
"timestamp": "",
"source": "github",
"line_count": 969,
"max_line_length": 562,
"avg_line_length": 39.99484004127967,
"alnum_prop": 0.6820539285253515,
"repo_name": "scollis/high_resolution_hydrology",
"id": "cea881056555adf208b57995b271029b300ea356",
"size": "38800",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "cluster/profile_mpi0/ipython_nbconvert_config.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "567244"
},
{
"name": "Python",
"bytes": "230279"
},
{
"name": "Shell",
"bytes": "140"
},
{
"name": "TeX",
"bytes": "267113"
}
],
"symlink_target": ""
}
|
import argparse
import threading
import socket
import queue
import enum
DEFAULT_LOCAL_IP_ADDRESS = '0.0.0.0'
DEFAULT_LOCAL_PORT_NUMBER = 8888
DEFAULT_REMOTE_IP_ADDRESS = 'localhost'
DEFAULT_REMOTE_PORT_NUMBER = 9999
DEFAULT_BUFFER_SIZE = 1024
class LocalStatus(enum.Enum):
SERVER_INITIALIZED = 'Local socket initialized.'
SERVER_LISTENING = 'Local socket listening.'
SERVER_SHUTDOWN = 'Local socket shutdown.'
class RemoteStatus(enum.Enum):
HANDSHAKE_INITIALIZED = 'Handshake initialized.'
HANDSHAKE_SUCCESSFUL = 'Handshake successful.'
class Connection:
def __init__(self, local_ip_address, local_port_number, remote_ip_address,
remote_port_number, buffer_size):
self.local_socket = None
self.local_ip_address = local_ip_address
self.local_port_number = local_port_number
self.remote_ip_address = remote_ip_address
self.remote_port_number = remote_port_number
self.buffer_size = buffer_size
self.local_queue = queue.Queue()
self.local_status = None
self.local_thread = threading.Thread(name='localThread',
target=self.open_local_socket)
self.local_thread.daemon = True
self.local_thread.start()
self.remote_status = None
self.handshake_thread = threading.Thread(name='handshakeThread',
target=self.initiate_handshake)
self.handshake_thread.daemon = True
self.handshake_thread.start()
def initiate_handshake(self):
self.remote_status = RemoteStatus.HANDSHAKE_INITIALIZED
print (self.remote_status)
while True:
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
remote_socket.connect((self.remote_ip_address, self.remote_port_number))
remote_socket.send('SYN\n')
if remote_socket.recv(self.buffer_size) == 'ACK\n':
remote_socket.send('SYN-ACK\n')
remote_socket.shutdown(socket.SHUT_WR)
remote_socket.close()
self.remote_status = RemoteStatus.HANDSHAKE_SUCCESSFUL
print (self.remote_status)
else:
pass
break
except socket.error:
continue
def send(self, message):
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
remote_socket.connect((self.remote_ip_address, self.remote_port_number))
print ('Remote socket sent:'), message
remote_socket.send(message + '\n')
remote_socket.shutdown(socket.SHUT_WR)
remote_socket.close()
except socket.error:
pass
def get_message(self):
if not self.local_queue.empty():
return self.local_queue.get()
else:
return None
def open_local_socket(self):
self.local_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.local_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.local_socket.bind((self.local_ip_address, self.local_port_number))
self.local_socket.listen(1)
self.local_status = LocalStatus.SERVER_LISTENING
print (self.local_status)
while True:
try:
connection, address = self.local_socket.accept()
message = connection.recv(self.buffer_size)
if message == 'SYN\n':
connection.send('ACK\n')
else:
print ('Local socket received:'), message.rstrip()
self.local_queue.put(message)
except socket.error:
break
def close_server_socket(self):
try:
self.local_socket.shutdown(socket.SHUT_RD)
self.local_socket.close()
self.local_status = LocalStatus.SERVER_SHUTDOWN
print (self.local_status)
except socket.error:
pass
self.local_thread.stop = True
self.handshake_thread.stop = True
def main(local_ip_address, local_port_number,
remote_ip_address, remote_port_number,
buffer_size):
return Connection(local_ip_address, local_port_number,
remote_ip_address, remote_port_number,
buffer_size)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-lip', '--localIPAddress', help='local IP address',
required=False, default=DEFAULT_LOCAL_IP_ADDRESS)
parser.add_argument('-lpn', '--localPortNumber', help='local port number',
required=False, type=int, default=str(DEFAULT_LOCAL_PORT_NUMBER))
parser.add_argument('-rip', '--remoteIPAddress', help='remote IP address',
required=False, default=DEFAULT_REMOTE_IP_ADDRESS)
parser.add_argument('-rpn', '--remotePortNumber', help='remote port number',
required=False, type=int, default=str(DEFAULT_REMOTE_PORT_NUMBER))
parser.add_argument('-bs', '--buffer_size', help='buffer size',
required=False, type=int, default=str(DEFAULT_BUFFER_SIZE))
args = parser.parse_args()
print ('localIpAddress:'), args.localIPAddress
print ('localPortNumber:'), args.localPortNumber
print ('localIpAddress:'), args.remoteIPAddress
print ('localPortNumber:'), args.remotePortNumber
print ('buffer_size:'), args.buffer_size
main(args.localIPAddress, args.localPortNumber,
args.remoteIPAddress, args.remotePortNumber,
args.buffer_size)
|
{
"content_hash": "d10ea4b8a854459901c53b5e13951b4d",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 90,
"avg_line_length": 38.07051282051282,
"alnum_prop": 0.6033002188920694,
"repo_name": "MSU-NASA-RMC/client",
"id": "1463022f5ee8e656d8bc595a46d7b4ad25cc25df",
"size": "5939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "connection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27405"
}
],
"symlink_target": ""
}
|
import hashlib
import json
from keystone.common import controller
from keystone import exception
class CredentialV3(controller.V3Controller):
collection_name = 'credentials'
member_name = 'credential'
def __init__(self):
super(CredentialV3, self).__init__()
self.get_member_from_driver = self.credential_api.get_credential
def _assign_unique_id(self, ref):
# Generates and assigns a unique identifer to
# a credential reference.
if ref.get('type', '').lower() == 'ec2':
try:
blob = json.loads(ref.get('blob'))
except (ValueError, TypeError):
raise exception.ValidationError(
message=_('Invalid blob in credential'))
if not blob or not isinstance(blob, dict):
raise exception.ValidationError(attribute='blob',
target='credential')
if blob.get('access') is None:
raise exception.ValidationError(attribute='access',
target='blob')
ref = ref.copy()
ref['id'] = hashlib.sha256(blob['access']).hexdigest()
return ref
else:
return super(CredentialV3, self)._assign_unique_id(ref)
@controller.protected()
def create_credential(self, context, credential):
ref = self._assign_unique_id(self._normalize_dict(credential))
ref = self.credential_api.create_credential(ref['id'], ref)
return CredentialV3.wrap_member(context, ref)
@controller.protected()
def list_credentials(self, context):
refs = self.credential_api.list_credentials()
return CredentialV3.wrap_collection(context, refs)
@controller.protected()
def get_credential(self, context, credential_id):
ref = self.credential_api.get_credential(credential_id)
return CredentialV3.wrap_member(context, ref)
@controller.protected()
def update_credential(self, context, credential_id, credential):
self._require_matching_id(credential_id, credential)
ref = self.credential_api.update_credential(credential_id, credential)
return CredentialV3.wrap_member(context, ref)
@controller.protected()
def delete_credential(self, context, credential_id):
return self.credential_api.delete_credential(credential_id)
|
{
"content_hash": "d5cdaaa8d9ef97093c4521d549bfc32a",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 78,
"avg_line_length": 39.11290322580645,
"alnum_prop": 0.6251546391752577,
"repo_name": "cloudbau/keystone",
"id": "bddd64e7ab17db5db28e1cec0c1342e9fcdacc97",
"size": "3056",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "keystone/credential/controllers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "2383366"
},
{
"name": "Shell",
"bytes": "11206"
}
],
"symlink_target": ""
}
|
class Bot(object):
pass
|
{
"content_hash": "983ff36eb9e2f4c8d52c78d3c3aebd35",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 18,
"avg_line_length": 14,
"alnum_prop": 0.6428571428571429,
"repo_name": "instagrambot/instapro",
"id": "087b317fadc6ecec6c1480db5b84e7ee2acd1119",
"size": "28",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "instabot/bot/bot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34613"
}
],
"symlink_target": ""
}
|
from cocos.actions import *
import cocos
import pyglet
class TextLayer(cocos.layer.Layer):
"""
Layer that setups text labels and apply some actions on these labels
"""
def __init__(self):
super(TextLayer, self).__init__()
window_width, window_height = cocos.director.director.get_window_size()
label = cocos.text.Label("WoW Python Vigo!",
font_name='Comic Sans MS',
font_size=48,
color=(255, 255, 255, 255),
anchor_x='center',
anchor_y='center')
label.position = window_width*0.5, window_height*0.2
label.do(Repeat(JumpTo(label.position, height=100, duration=2)))
self.add(label, z=1)
label2 = cocos.text.Label("Dat Scene",
font_name='Comic Sans MS',
font_size=52,
color=(255, 0, 255, 255),
anchor_x='center',
anchor_y='center')
label2.position = window_width*0.8, window_height*0.8
label2.do(Repeat(ScaleBy(1.1, 0.2) + Reverse(ScaleBy(1.1, 0.2))))
self.add(label2, z=1)
label3 = cocos.text.Label("Such effects",
font_name='Comic Sans MS',
font_size=40,
color=(0, 255, 255, 255),
anchor_x='center',
anchor_y='center')
label3.position = window_width*0.25, window_height*0.6
label3.do(Repeat(FadeTo(0, 1) + FadeTo(255, 1)))
self.add(label3, z=1)
class DogeLayer(cocos.layer.Layer):
"""
Layer that represents Doge and his jumping sunglasses!
"""
def __init__(self):
super(DogeLayer, self).__init__()
window_width, window_height = cocos.director.director.get_window_size()
self.position = 0, 100
sprite = cocos.sprite.Sprite('images/sunglasses.png')
sprite.scale = 2.5
sprite.rotation = 6
sprite.position = window_width*0.5, window_height
sprite.do(JumpTo((window_width*0.4, window_height*0.6), duration=1) +
Repeat(RotateBy(10, 0.6) + Reverse(RotateBy(10, 0.6))))
self.add(sprite, z=1)
sprite = cocos.sprite.Sprite('images/doge.jpg')
sprite.position = window_width*0.5, window_height*0.4
self.add(sprite, z=0)
if __name__ == '__main__':
# We configure the base assets dir
pyglet.resource.path.append('../../../assets/')
pyglet.resource.reindex()
# Scenes and layers configuration
cocos.director.director.init(width=1024, height=768)
main_scene = cocos.scene.Scene()
main_scene.add(TextLayer(), z=1)
main_scene.add(DogeLayer(), z=0)
cocos.director.director.run(main_scene)
|
{
"content_hash": "2dd4b6c5b1875c8b21593dcfa9d49777",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 79,
"avg_line_length": 36.93827160493827,
"alnum_prop": 0.5220588235294118,
"repo_name": "adoankim/python-vigo-gamedev",
"id": "ecfc41a0c6e71c091f6622bec08237a3c0fc2b03",
"size": "4325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pyvigo/structure_and_effects/actions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "208391"
},
{
"name": "HTML",
"bytes": "122806"
},
{
"name": "JavaScript",
"bytes": "381504"
},
{
"name": "Python",
"bytes": "26343"
}
],
"symlink_target": ""
}
|
from django.shortcuts import render
from rest_framework import viewsets
from .models import Job
from .serializers import JobSerializer
class JobViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows jobs to be viewed or edited.
"""
queryset = Job.objects.all()
serializer_class = JobSerializer
|
{
"content_hash": "2b7bd617bb58f273388d3d947910a5df",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 57,
"avg_line_length": 24.846153846153847,
"alnum_prop": 0.7492260061919505,
"repo_name": "hamster-dev/hamster-core",
"id": "ba48492a7d4a6f37478a9d45998c930590820690",
"size": "323",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hamster/jobs/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Puppet",
"bytes": "4258"
},
{
"name": "Python",
"bytes": "15598"
},
{
"name": "Shell",
"bytes": "6716"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, url
from .views import EventList, EventDetail
from .feeds import EventFeed
urlpatterns = patterns('',
url(r'^$', view=EventList.as_view(), name="event_list"),
url(r'^ical/$', view=EventFeed(), name="event_list_ical"),
url(r'^(?P<slug>[\w\d-]+)-(?P<pk>\d+)/$', view=EventDetail.as_view(), name="event_detail"),
)
|
{
"content_hash": "4561a0ca6298f7bf7fc5366cd96f4a64",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 95,
"avg_line_length": 33.27272727272727,
"alnum_prop": 0.6502732240437158,
"repo_name": "rva-data/connector-events",
"id": "52d9670721572c29bde1b43f0f7c00ae63b3f010",
"size": "366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "34612"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
}
|
DOCUMENTATION = '''
---
module: xattr
version_added: "1.3"
short_description: set/retrieve extended attributes
description:
- Manages filesystem user defined extended attributes, requires that they are enabled
on the target filesystem and that the setfattr/getfattr utilities are present.
options:
name:
required: true
default: None
aliases: ['path']
description:
- The full path of the file/object to get the facts of
key:
required: false
default: None
description:
- The name of a specific Extended attribute key to set/retrieve
value:
required: false
default: None
description:
- The value to set the named name/key to, it automatically sets the C(state) to 'set'
state:
required: false
default: get
choices: [ 'read', 'present', 'all', 'keys', 'absent' ]
description:
- defines which state you want to do.
C(read) retrieves the current value for a C(key) (default)
C(present) sets C(name) to C(value), default if value is set
C(all) dumps all data
C(keys) retrieves all keys
C(absent) deletes the key
follow:
required: false
default: yes
choices: [ 'yes', 'no' ]
description:
- if yes, dereferences symlinks and sets/gets attributes on symlink target,
otherwise acts on symlink itself.
author: Brian Coca
'''
EXAMPLES = '''
# Obtain the extended attributes of /etc/foo.conf
- xattr: name=/etc/foo.conf
# Sets the key 'foo' to value 'bar'
- xattr: path=/etc/foo.conf key=user.foo value=bar
# Removes the key 'foo'
- xattr: name=/etc/foo.conf key=user.foo state=absent
'''
import operator
def get_xattr_keys(module,path,follow):
cmd = [ module.get_bin_path('getfattr', True) ]
# prevents warning and not sure why it's not default
cmd.append('--absolute-names')
if not follow:
cmd.append('-h')
cmd.append(path)
return _run_xattr(module,cmd)
def get_xattr(module,path,key,follow):
cmd = [ module.get_bin_path('getfattr', True) ]
# prevents warning and not sure why it's not default
cmd.append('--absolute-names')
if not follow:
cmd.append('-h')
if key is None:
cmd.append('-d')
else:
cmd.append('-n %s' % key)
cmd.append(path)
return _run_xattr(module,cmd,False)
def set_xattr(module,path,key,value,follow):
cmd = [ module.get_bin_path('setfattr', True) ]
if not follow:
cmd.append('-h')
cmd.append('-n %s' % key)
cmd.append('-v %s' % value)
cmd.append(path)
return _run_xattr(module,cmd)
def rm_xattr(module,path,key,follow):
cmd = [ module.get_bin_path('setfattr', True) ]
if not follow:
cmd.append('-h')
cmd.append('-x %s' % key)
cmd.append(path)
return _run_xattr(module,cmd,False)
def _run_xattr(module,cmd,check_rc=True):
try:
(rc, out, err) = module.run_command(' '.join(cmd), check_rc=check_rc)
except Exception, e:
module.fail_json(msg="%s!" % e.strerror)
#result = {'raw': out}
result = {}
for line in out.splitlines():
if re.match("^#", line) or line == "":
pass
elif re.search('=', line):
(key, val) = line.split("=")
result[key] = val.strip('"')
else:
result[line] = ''
return result
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, aliases=['path']),
key = dict(required=False, default=None),
value = dict(required=False, default=None),
state = dict(required=False, default='read', choices=[ 'read', 'present', 'all', 'keys', 'absent' ], type='str'),
follow = dict(required=False, type='bool', default=True),
),
supports_check_mode=True,
)
path = module.params.get('name')
key = module.params.get('key')
value = module.params.get('value')
state = module.params.get('state')
follow = module.params.get('follow')
if not os.path.exists(path):
module.fail_json(msg="path not found or not accessible!")
changed=False
msg = ""
res = {}
if key is None and state in ['present','absent']:
module.fail_json(msg="%s needs a key parameter" % state)
# All xattr must begin in user namespace
if key is not None and not re.match('^user\.',key):
key = 'user.%s' % key
if (state == 'present' or value is not None):
current=get_xattr(module,path,key,follow)
if current is None or not key in current or value != current[key]:
if not module.check_mode:
res = set_xattr(module,path,key,value,follow)
changed=True
res=current
msg="%s set to %s" % (key, value)
elif state == 'absent':
current=get_xattr(module,path,key,follow)
if current is not None and key in current:
if not module.check_mode:
res = rm_xattr(module,path,key,follow)
changed=True
res=current
msg="%s removed" % (key)
elif state == 'keys':
res=get_xattr_keys(module,path,follow)
msg="returning all keys"
elif state == 'all':
res=get_xattr(module,path,None,follow)
msg="dumping all"
else:
res=get_xattr(module,path,key,follow)
msg="returning %s" % key
module.exit_json(changed=changed, msg=msg, xattr=res)
# import module snippets
from ansible.module_utils.basic import *
main()
|
{
"content_hash": "d1aa197ae4d92657e5cfadcf33b40d47",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 125,
"avg_line_length": 29.026315789473685,
"alnum_prop": 0.6054397098821396,
"repo_name": "marcusramberg/dotfiles",
"id": "94115ae3b51fb6f200bb81c5f42a8e279f4ef765",
"size": "6186",
"binary": false,
"copies": "60",
"ref": "refs/heads/main",
"path": "bin/.venv-ansible-venv/lib/python2.6/site-packages/ansible/modules/core/files/xattr.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "4704"
},
{
"name": "Emacs Lisp",
"bytes": "66056"
},
{
"name": "JavaScript",
"bytes": "11846"
},
{
"name": "Jinja",
"bytes": "285"
},
{
"name": "Lua",
"bytes": "136578"
},
{
"name": "Nix",
"bytes": "9136"
},
{
"name": "Perl",
"bytes": "8914"
},
{
"name": "PowerShell",
"bytes": "51840"
},
{
"name": "Python",
"bytes": "9699218"
},
{
"name": "Ruby",
"bytes": "24218"
},
{
"name": "Shell",
"bytes": "416759"
},
{
"name": "Vim Script",
"bytes": "4033"
}
],
"symlink_target": ""
}
|
import sys
from PyQt4 import QtGui, QtCore, Qt
import math
class QtGauge(QtGui.QWidget):
value = 1
def __init__(self):
super(QtGauge, self).__init__()
self.initUI()
def setValue(self,value):
self.value = value
def getValue(self):
return self.value
def initUI(self):
hbox = QtGui.QHBoxLayout(self)
lbl = QtGui.QLabel(self)
hbox.addWidget(lbl)
self.setLayout(hbox)
self.setGeometry(0, 0,600,600)
self.move(300, 200)
self.setWindowTitle('Dial Guage')
self.show()
def paintEvent(self, e):
painter = QtGui.QPainter()
painter.begin(self)
dial = QtGui.QPixmap("bg.png")
#painter.drawPixmap(50, 50, 600, 600, dial)
painter.setRenderHint(painter.Antialiasing)
rect = e.rect()
gauge_rect = QtCore.QRect(rect)
size = gauge_rect.size()
pos = gauge_rect.center()
gauge_rect.moveCenter( QtCore.QPoint(pos.x()-size.width(), pos.y()-size.height()) )
gauge_rect.setSize(size*.9)
gauge_rect.moveCenter(pos)
refill_rect = QtCore.QRect(gauge_rect)
size = refill_rect.size()
pos = refill_rect.center()
refill_rect.moveCenter( QtCore.QPoint(pos.x()-size.width(), pos.y()-size.height()) )
# smaller than .9 == thicker gauge
refill_rect.setSize(size*.9)
refill_rect.moveCenter(pos)
painter.setPen(QtCore.Qt.NoPen)
painter.drawPixmap(rect, dial)
painter.save()
grad = QtGui.QConicalGradient(QtCore.QPointF(gauge_rect.center()), 270.0)
grad.setColorAt(.75, QtCore.Qt.green)
grad.setColorAt(.5, QtCore.Qt.yellow)
grad.setColorAt(.1, QtCore.Qt.red)
painter.setBrush(grad)
#painter.drawPie(gauge_rect, 225.0*16, self._value*16)
painter.drawPie(gauge_rect, 225.0*16, -270*self.value*16)
painter.restore()
painter.setBrush(QtGui.QBrush(dial.scaled(rect.size())))
painter.drawEllipse(refill_rect)
super(QtGauge,self).paintEvent(e)
painter.end()
def main():
app = QtGui.QApplication(sys.argv)
ex = QtGauge()
ex.setValue(0.6)
print ex.getValue()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
{
"content_hash": "f506473112273c568ac82eccacd36ffb",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 92,
"avg_line_length": 27.46511627906977,
"alnum_prop": 0.5829805249788315,
"repo_name": "3WiseMen/python",
"id": "eb26f8c06bdfb0e1c6be6890318f8bf112866e3b",
"size": "2362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "14. PyQT/ProgressBar2.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5638580"
}
],
"symlink_target": ""
}
|
import os
import shutil
import hashlib
import json
import stat
import six
from git.repo import Repo
from lockfile import LockFile
from st2actions.runners.pythonrunner import Action
from st2common.util.green import shell
ALL_PACKS = '*'
PACK_REPO_ROOT = 'packs'
MANIFEST_FILE = 'pack.yaml'
CONFIG_FILE = 'config.yaml'
GITINFO_FILE = '.gitinfo'
PACK_RESERVE_CHARACTER = '.'
STACKSTORM_CONTRIB_REPOS = [
'st2contrib',
'st2incubator'
]
#####
# !!!!!!!!!!!!!!
# !!! README !!!
# !!!!!!!!!!!!!!
#
# This NEEDS a rewrite. Too many features and far too many assumption
# to keep this impl straight any longer. If you only want to read code do
# so at your own peril.
#
# If you are here to fix a bug or add a feature answer these questions -
# 1. Am I fixing a broken feature?
# 2. Is this the only module in which to fix the bug?
# 3. Am I sure this is a bug fix and not a feature?
#
# Only if you can emphatically answer 'YES' to allow about questions you should
# touch this file. Else, be warned you might loose a part of you soul or sanity.
#####
PACK_GROUP_CFG_KEY = 'pack_group'
class DownloadGitRepoAction(Action):
def __init__(self, config=None):
super(DownloadGitRepoAction, self).__init__(config=config)
self._subtree = None
self._repo_url = None
def run(self, packs, repo_url, abs_repo_base, verifyssl=True, branch='master', subtree=False):
cached_repo_url, cached_branch, cached_subtree = self._lookup_cached_gitinfo(
abs_repo_base, packs)
if not repo_url:
repo_url = cached_repo_url
if not branch:
branch = cached_branch
# Making the assumption that is no repo_url change was required
# the subtree nature should be inferred from cached value.
if repo_url == cached_repo_url:
subtree = cached_subtree
self._subtree = self._eval_subtree(repo_url, subtree)
self._repo_url = self._eval_repo_url(repo_url)
repo_name = self._eval_repo_name(self._repo_url)
lock_name = hashlib.md5(repo_name).hexdigest() + '.lock'
with LockFile('/tmp/%s' % (lock_name)):
abs_local_path = self._clone_repo(repo_url=self._repo_url, verifyssl=verifyssl,
branch=branch)
try:
if self._subtree:
# st2-contrib repo has a top-level packs folder that actually contains the
pack_abs_local_path = os.path.join(abs_local_path, PACK_REPO_ROOT)
# resolve ALL_PACK here to avoid wild-cards
if ALL_PACKS in packs:
packs = os.listdir(pack_abs_local_path)
else:
pack_abs_local_path = abs_local_path
self._tag_pack(pack_abs_local_path, packs, self._subtree)
result = self._move_packs(abs_repo_base, packs, pack_abs_local_path, self._subtree)
finally:
self._cleanup_repo(abs_local_path)
return self._validate_result(result=result, packs=packs, repo_url=self._repo_url)
@staticmethod
def _clone_repo(repo_url, verifyssl=True, branch='master'):
user_home = os.path.expanduser('~')
# Assuming git url is of form git@github.com:user/git-repo.git
repo_name = DownloadGitRepoAction._eval_repo_name(repo_url)
abs_local_path = os.path.join(user_home, repo_name)
# Disable SSL cert checking if explictly asked
if not verifyssl:
os.environ['GIT_SSL_NO_VERIFY'] = 'true'
# Shallow clone the repo to avoid getting all the metadata. We only need HEAD of a
# specific branch so save some download time.
Repo.clone_from(repo_url, abs_local_path, branch=branch, depth=1)
return abs_local_path
def _move_packs(self, abs_repo_base, packs, abs_local_path, subtree):
result = {}
for pack in packs:
if subtree:
abs_pack_temp_location = os.path.join(abs_local_path, pack)
else:
abs_pack_temp_location = abs_local_path
desired, message = DownloadGitRepoAction._is_desired_pack(abs_pack_temp_location, pack)
if desired:
to = abs_repo_base
dest_pack_path = os.path.join(abs_repo_base, pack)
if os.path.exists(dest_pack_path):
self.logger.debug('Removing existing pack %s in %s to replace.', pack,
dest_pack_path)
# Ensure to preserve any existing configuration
old_config_file = os.path.join(dest_pack_path, CONFIG_FILE)
new_config_file = os.path.join(abs_pack_temp_location, CONFIG_FILE)
if os.path.isfile(old_config_file):
shutil.move(old_config_file, new_config_file)
shutil.rmtree(dest_pack_path)
self.logger.debug('Moving pack from %s to %s.', abs_pack_temp_location, to)
shutil.move(abs_pack_temp_location, to)
# post move fix all permissions.
self._apply_pack_permissions(pack_path=dest_pack_path)
message = 'Success.'
elif message:
message = 'Failure : %s' % message
result[pack] = (desired, message)
return result
def _apply_pack_permissions(self, pack_path):
"""
Will recursively apply permission 770 to pack and its contents.
"""
# 1. switch owner group to configuered group
pack_group = self.config.get(PACK_GROUP_CFG_KEY, None)
if pack_group:
shell.run_command(['sudo', 'chgrp', '-R', pack_group, pack_path])
# 2. Setup the right permissions and group ownership
# These mask is same as mode = 775
mode = stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH
os.chmod(pack_path, mode)
# Yuck! Since os.chmod does not support chmod -R walk manually.
for root, dirs, files in os.walk(pack_path):
for d in dirs:
os.chmod(os.path.join(root, d), mode)
for f in files:
os.chmod(os.path.join(root, f), mode)
@staticmethod
def _is_desired_pack(abs_pack_path, pack_name):
# path has to exists.
if not os.path.exists(abs_pack_path):
return (False, 'Pack "%s" not found or it\'s missing a "pack.yaml" file.' %
(pack_name))
# Must be a dir.
if not os.path.isdir(abs_pack_path):
return (False, '%s is not a expected directory structure.' % (pack_name))
# should not include reserve characters
if PACK_RESERVE_CHARACTER in pack_name:
return (False, 'Pack name "%s" contains reserve character "%s"' %
(pack_name, PACK_RESERVE_CHARACTER))
# must contain a manifest file. Empty file is ok for now.
if not os.path.isfile(os.path.join(abs_pack_path, MANIFEST_FILE)):
return (False, 'Pack is missing a manifest file (%s).' % (MANIFEST_FILE))
return (True, '')
@staticmethod
def _cleanup_repo(abs_local_path):
# basic lock checking etc?
if os.path.isdir(abs_local_path):
shutil.rmtree(abs_local_path)
@staticmethod
def _validate_result(result, packs, repo_url):
atleast_one_success = False
sanitized_result = {}
for k, v in six.iteritems(result):
atleast_one_success |= v[0]
sanitized_result[k] = v[1]
if not atleast_one_success:
message_list = []
message_list.append('No packs were downloaded from repository "%s".\n' % (repo_url))
message_list.append('Errors:')
for pack, value in result.items():
success, error = value
if success:
continue
message_list.append(' - %s: %s' % (pack, error))
message = '\n'.join(message_list)
raise Exception(message)
return sanitized_result
@staticmethod
def _eval_subtree(repo_url, subtree):
match = False
for stackstorm_repo_name in STACKSTORM_CONTRIB_REPOS:
if stackstorm_repo_name in repo_url:
match = True
break
return subtree | match
@staticmethod
def _eval_repo_url(repo_url):
"""Allow passing short GitHub style URLs"""
if not repo_url:
raise Exception('No valid reo_url provided or could be inferred.')
has_git_extension = repo_url.endswith('.git')
if len(repo_url.split('/')) == 2 and "git@" not in repo_url:
url = "https://github.com/{}".format(repo_url)
else:
url = repo_url
return url if has_git_extension else "{}.git".format(url)
@staticmethod
def _lookup_cached_gitinfo(abs_repo_base, packs):
"""
This method will try to lookup the repo_url from the first pack in the list
of packs. It works under some strict assumptions -
1. repo_url was not originally specified
2. all packs from from same repo
3. gitinfo was originally added by this action
"""
repo_url = None
branch = None
subtree = False
if len(packs) < 1:
raise Exception('No packs specified.')
gitinfo_location = os.path.join(abs_repo_base, packs[0], GITINFO_FILE)
if not os.path.exists(gitinfo_location):
return repo_url, branch, subtree
with open(gitinfo_location, 'r') as gitinfo_fp:
gitinfo = json.load(gitinfo_fp)
repo_url = gitinfo.get('repo_url', None)
branch = gitinfo.get('branch', None)
subtree = gitinfo.get('subtree', False)
return repo_url, branch, subtree
@staticmethod
def _eval_repo_name(repo_url):
"""
Evaluate the name of the repo.
https://github.com/StackStorm/st2contrib.git -> st2contrib
https://github.com/StackStorm/st2contrib -> st2contrib
git@github.com:StackStorm/st2contrib.git -> st2contrib
git@github.com:StackStorm/st2contrib -> st2contrib
"""
last_forward_slash = repo_url.rfind('/')
next_dot = repo_url.find('.', last_forward_slash)
# If dot does not follow last_forward_slash return till the end
if next_dot < last_forward_slash:
return repo_url[last_forward_slash + 1:]
return repo_url[last_forward_slash + 1:next_dot]
def _tag_pack(self, pack_root, packs, subtree):
"""Add git information to pack directory for retrieval later"""
repo = Repo(pack_root)
payload = {
'repo_url': repo.remotes[0].url,
'branch': repo.active_branch.name,
'ref': repo.head.commit.hexsha,
'subtree': subtree
}
for pack in packs:
pack_dir = os.path.join(pack_root, pack) if subtree else pack_root
if not os.path.exists(pack_dir):
self.logger.warn('%s is missing. Expected location "%s".', pack, pack_dir)
continue
info_file = os.path.join(pack_dir, GITINFO_FILE)
with open(info_file, "w") as gitinfo:
gitinfo.write(json.dumps(payload))
|
{
"content_hash": "0d21cd30761c0d19334fcc9c4e801f94",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 99,
"avg_line_length": 38.32214765100671,
"alnum_prop": 0.5854640980735552,
"repo_name": "alfasin/st2",
"id": "a8a6f33312cf391f132927432ae2633ca2655798",
"size": "12200",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contrib/packs/actions/pack_mgmt/download.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "36110"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "2907491"
},
{
"name": "Shell",
"bytes": "16363"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
}
|
import logging
LOG = logging.getLogger(__name__)
try:
from spark.job_server_api import get_api as get_spark_api
except ImportError, e:
LOG.exception('Spark is not enabled')
from notebook.connectors.base import Api
class SparkBatchApi(Api):
def execute(self, notebook, snippet):
api = get_spark_api(self.user)
if snippet['type'] == 'jar':
properties = {
'file': snippet['properties'].get('app_jar'),
'className': snippet['properties'].get('class'),
'args': snippet['properties'].get('arguments'),
}
elif snippet['type'] == 'py':
properties = {
'file': snippet['properties'].get('py_file'),
'args': snippet['properties'].get('argument'),
}
else:
properties = {
'file': snippet['properties'].get('app_jar'),
'className': snippet['properties'].get('class'),
'args': snippet['properties'].get('arguments'),
'pyFiles': snippet['properties'].get('py_file'),
'files': snippet['properties'].get('files'),
# driverMemory
# driverCores
# executorMemory
# executorCores
# archives
}
response = api.submit_batch(properties)
return {
'id': response['id'],
'has_result_set': True,
'properties': []
}
def check_status(self, notebook, snippet):
api = get_spark_api(self.user)
state = api.get_batch_status(snippet['result']['handle']['id'])
return {
'status': state,
}
def get_log(self, notebook, snippet, startFrom=0, size=None):
api = get_spark_api(self.user)
return api.get_batch_log(snippet['result']['handle']['id'], startFrom=startFrom, size=size)
def close_statement(self, snippet):
api = get_spark_api(self.user)
session_id = snippet['result']['handle']['id']
if session_id is not None:
api.close_batch(session_id)
return {
'session': session_id,
'status': 0
}
else:
return {'status': -1} # skipped
def cancel(self, notebook, snippet):
# Batch jobs do not support interruption, so close statement instead.
return self.close_statement(snippet)
def progress(self, snippet, logs):
return 50
|
{
"content_hash": "e45c7301a28de98c31353488628e2864",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 95,
"avg_line_length": 27.951219512195124,
"alnum_prop": 0.5815881326352531,
"repo_name": "xq262144/hue",
"id": "01ed31cee89667623efb9f0cc58047d519dbd5b3",
"size": "3084",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/libs/notebook/src/notebook/connectors/spark_batch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3096"
},
{
"name": "Batchfile",
"bytes": "41710"
},
{
"name": "C",
"bytes": "2692409"
},
{
"name": "C++",
"bytes": "199897"
},
{
"name": "CSS",
"bytes": "521820"
},
{
"name": "Emacs Lisp",
"bytes": "11704"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Go",
"bytes": "6671"
},
{
"name": "Groff",
"bytes": "16669"
},
{
"name": "HTML",
"bytes": "24188238"
},
{
"name": "Java",
"bytes": "575404"
},
{
"name": "JavaScript",
"bytes": "4987047"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "Makefile",
"bytes": "144341"
},
{
"name": "Mako",
"bytes": "3052598"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "PLSQL",
"bytes": "13774"
},
{
"name": "PLpgSQL",
"bytes": "3646"
},
{
"name": "Perl",
"bytes": "3499"
},
{
"name": "PigLatin",
"bytes": "328"
},
{
"name": "Python",
"bytes": "44291483"
},
{
"name": "Shell",
"bytes": "44147"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "Thrift",
"bytes": "278712"
},
{
"name": "Visual Basic",
"bytes": "2884"
},
{
"name": "XSLT",
"bytes": "518588"
}
],
"symlink_target": ""
}
|
from AlgorithmImports import *
### <summary>
### This algorithm shows how to set a custom security initializer.
### A security initializer is run immediately after a new security object
### has been created and can be used to security models and other settings,
### such as data normalization mode
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="securities and portfolio" />
### <meta name="tag" content="trading and orders" />
class CustomSecurityInitializerAlgorithm(QCAlgorithm):
def Initialize(self):
# set our initializer to our custom type
self.SetBrokerageModel(BrokerageName.InteractiveBrokersBrokerage)
func_security_seeder = FuncSecuritySeeder(Func[Security, BaseData](self.custom_seed_function))
self.SetSecurityInitializer(CustomSecurityInitializer(self.BrokerageModel, func_security_seeder, DataNormalizationMode.Raw))
self.SetStartDate(2013,10,1)
self.SetEndDate(2013,11,1)
self.AddEquity("SPY", Resolution.Hour)
def OnData(self, data):
if not self.Portfolio.Invested:
self.SetHoldings("SPY", 1)
def custom_seed_function(self, security):
resolution = Resolution.Hour
df = self.History(security.Symbol, 1, resolution)
if df.empty:
return None
last_bar = df.unstack(level=0).iloc[-1]
date_time = last_bar.name.to_pydatetime()
open = last_bar.open.values[0]
high = last_bar.high.values[0]
low = last_bar.low.values[0]
close = last_bar.close.values[0]
volume = last_bar.volume.values[0]
return TradeBar(date_time, security.Symbol, open, high, low, close, volume, Extensions.ToTimeSpan(resolution))
class CustomSecurityInitializer(BrokerageModelSecurityInitializer):
'''Our custom initializer that will set the data normalization mode.
We sub-class the BrokerageModelSecurityInitializer so we can also
take advantage of the default model/leverage setting behaviors'''
def __init__(self, brokerageModel, securitySeeder, dataNormalizationMode):
'''Initializes a new instance of the CustomSecurityInitializer class with the specified normalization mode
brokerageModel -- The brokerage model used to get fill/fee/slippage/settlement models
securitySeeder -- The security seeder to be used
dataNormalizationMode -- The desired data normalization mode'''
self.base = BrokerageModelSecurityInitializer(brokerageModel, securitySeeder)
self.dataNormalizationMode = dataNormalizationMode
def Initialize(self, security):
'''Initializes the specified security by setting up the models
security -- The security to be initialized
seedSecurity -- True to seed the security, false otherwise'''
# first call the default implementation
self.base.Initialize(security)
# now apply our data normalization mode
security.SetDataNormalizationMode(self.dataNormalizationMode)
|
{
"content_hash": "e7ca1647675f202e22f779345d74d54e",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 132,
"avg_line_length": 43.628571428571426,
"alnum_prop": 0.7066142763588736,
"repo_name": "jameschch/Lean",
"id": "366972507cc0540db954100071cf04e59c15ed35",
"size": "3742",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Algorithm.Python/CustomSecurityInitializerAlgorithm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2540"
},
{
"name": "C#",
"bytes": "15402085"
},
{
"name": "Dockerfile",
"bytes": "1226"
},
{
"name": "F#",
"bytes": "1723"
},
{
"name": "HTML",
"bytes": "2607907"
},
{
"name": "Java",
"bytes": "852"
},
{
"name": "Jupyter Notebook",
"bytes": "16348"
},
{
"name": "Python",
"bytes": "654580"
},
{
"name": "Shell",
"bytes": "2307"
},
{
"name": "Visual Basic",
"bytes": "2448"
}
],
"symlink_target": ""
}
|
class shapes_index():
SPHERE = 0
ELLIPSOID = 1
BOX = 2
CYLINDER = 3
CONVEXHULL = 4
TRIANGLEMESH = 5
BARREL = 6
CAPSULE = 7
CONE = 8
ROUNDEDBOX = 9
ROUNDEDCYL = 10
ROUNDEDCONE = 11
BEZIER = 12
def import_shapes(filepath):
shapes_data = []
with open(filepath, 'r') as FH:
lines = FH.readlines()
# Read number of bodies, visual assets, joints, and TSDA elements (first line)
line = lines[0].strip().split(',')
num_bodies = int(line[0])
num_assets = int(line[1])
##print ("File: ", filepath, " Num bodies: ", num_bodies, " Num assets: ", num_assets)
# Read only visual assets
for il in range(1 + num_bodies, 1 + num_bodies + num_assets):
line = lines[il].strip().split(',')
# Extract information common to all assets
body_id, active, x, y, z, e0, e1, e2, e3, r, g, b, shape_type = line[:13:]
data = [int(shape_type), int(body_id), active, float(x), float(y), float(z), float(e0), float(e1), float(e2), float(e3), float(r), float(g), float(b)]
# Read asset-specific data
if int(shape_type) == shapes_index.TRIANGLEMESH:
try:
path = line[13]
data.extend([path.strip('"')])
except:
print("Record: ", lines[il])
raise Exception("Failed while trying to parse trimesh data.")
elif int(shape_type) == shapes_index.SPHERE:
try:
rad = line[13]
data.extend([float(rad)])
except:
print("Record: ", lines[il])
raise Exception("Failed while trying to parse sphere data.")
elif int(shape_type) == shapes_index.BOX:
try:
size_x, size_y, size_z = line[13:16:]
data.extend([float(size_x), float(size_y), float(size_z)])
except:
print("Record: ", lines[il])
raise Exception("Failed while trying to parse box data.")
elif int(shape_type) == shapes_index.ELLIPSOID:
try:
size_x, size_y, size_z = line[13:16:]
data.extend([float(size_x), float(size_y), float(size_z)])
except:
print("Record: ", lines[il])
raise Exception("Failed while trying to parse ellipsoid data.")
elif int(shape_type) == shapes_index.CYLINDER:
try:
rad, p1x, p1y, p1z, p2x, p2y, p2z = line[13:20:]
data.extend([float(rad), float(p1x), float(p1y), float(p1z), float(p2x), float(p2y), float(p2z)])
except:
print("Record: ", lines[il])
raise Exception("Failed while trying to parse cyilinder data.")
else:
print('Unsupported shape type')
continue
# Append to list of shape data
shapes_data.append(data)
return shapes_data
if __name__ == "__main__":
data = import_shapes('data1.dat')
print(data)
|
{
"content_hash": "fd3ed02d1a4c07e68ee5e1532acfbd09",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 163,
"avg_line_length": 38.174418604651166,
"alnum_prop": 0.4907097167225099,
"repo_name": "rserban/chrono",
"id": "e3ce65790506fb57725a725af9e1be0d5270e80b",
"size": "3283",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "src/demos/tools/blender_py/reader.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7528"
},
{
"name": "C",
"bytes": "2409870"
},
{
"name": "C++",
"bytes": "30022151"
},
{
"name": "CMake",
"bytes": "735935"
},
{
"name": "CSS",
"bytes": "170326"
},
{
"name": "Cuda",
"bytes": "1232062"
},
{
"name": "Dockerfile",
"bytes": "3279"
},
{
"name": "Forth",
"bytes": "169197"
},
{
"name": "GLSL",
"bytes": "4925"
},
{
"name": "HTML",
"bytes": "7922"
},
{
"name": "Inno Setup",
"bytes": "24125"
},
{
"name": "JavaScript",
"bytes": "4731"
},
{
"name": "Lex",
"bytes": "3433"
},
{
"name": "Lua",
"bytes": "651"
},
{
"name": "MATLAB",
"bytes": "35942"
},
{
"name": "POV-Ray SDL",
"bytes": "44795"
},
{
"name": "PowerShell",
"bytes": "115"
},
{
"name": "Python",
"bytes": "833451"
},
{
"name": "SWIG",
"bytes": "316928"
},
{
"name": "Shell",
"bytes": "4782"
}
],
"symlink_target": ""
}
|
"""
Nearly identical to xrange.py, by Dan Crosta, from
https://github.com/dcrosta/xrange.git
This is included here in the ``future`` package rather than pointed to as
a dependency because there is no package for ``xrange`` on PyPI. It is
also tweaked to appear like a regular Python 3 ``range`` object rather
than a Python 2 xrange.
From Dan Crosta's README:
"A pure-Python implementation of Python 2.7's xrange built-in, with
some features backported from the Python 3.x range built-in (which
replaced xrange) in that version."
Read more at
https://late.am/post/2012/06/18/what-the-heck-is-an-xrange
"""
from math import ceil
from collections import Sequence, Iterator
from future.utils import PY3
class newrange(Sequence):
"""
Pure-Python backport of Python 3's range object. See `the CPython
documentation for details:
<http://docs.python.org/py3k/library/functions.html#range>`_
"""
def __init__(self, *args):
if len(args) == 1:
start, stop, step = 0, args[0], 1
elif len(args) == 2:
start, stop, step = args[0], args[1], 1
elif len(args) == 3:
start, stop, step = args
else:
raise TypeError('range() requires 1-3 int arguments')
try:
start, stop, step = int(start), int(stop), int(step)
except ValueError:
raise TypeError('an integer is required')
if step == 0:
raise ValueError('range() arg 3 must not be zero')
elif step < 0:
stop = min(stop, start)
else:
stop = max(stop, start)
self._start = start
self._stop = stop
self._step = step
self._len = (stop - start) // step + bool((stop - start) % step)
def __repr__(self):
if self._step == 1:
return 'range(%d, %d)' % (self._start, self._stop)
return 'range(%d, %d, %d)' % (self._start, self._stop, self._step)
def __eq__(self, other):
return isinstance(other, newrange) and \
self._start == other._start and \
self._stop == other._stop and \
self._step == other._step
def __len__(self):
return self._len
def index(self, value):
"""Return the 0-based position of integer `value` in
the sequence this range represents."""
diff = value - self._start
quotient, remainder = divmod(diff, self._step)
if remainder == 0 and 0 <= quotient < self._len:
return abs(quotient)
raise ValueError('%r is not in range' % value)
def count(self, value):
"""Return the number of ocurrences of integer `value`
in the sequence this range represents."""
# a value can occur exactly zero or one times
return int(value in self)
def __contains__(self, value):
"""Return ``True`` if the integer `value` occurs in
the sequence this range represents."""
try:
self.index(value)
return True
except ValueError:
return False
def __reversed__(self):
"""Return a range which represents a sequence whose
contents are the same as the sequence this range
represents, but in the opposite order."""
sign = self._step / abs(self._step)
last = self._start + ((self._len - 1) * self._step)
return newrange(last, self._start - sign, -1 * self._step)
def __getitem__(self, index):
"""Return the element at position ``index`` in the sequence
this range represents, or raise :class:`IndexError` if the
position is out of range."""
if isinstance(index, slice):
return self.__getitem_slice(index)
if index < 0:
# negative indexes access from the end
index = self._len + index
if index < 0 or index >= self._len:
raise IndexError('range object index out of range')
return self._start + index * self._step
def __getitem_slice(self, slce):
"""Return a range which represents the requested slce
of the sequence represented by this range.
"""
start, stop, step = slce.start, slce.stop, slce.step
if step == 0:
raise ValueError('slice step cannot be 0')
start = start or self._start
stop = stop or self._stop
if start < 0:
start = max(0, start + self._len)
if stop < 0:
stop = max(start, stop + self._len)
if step is None or step > 0:
return newrange(start, stop, step or 1)
else:
rv = reversed(self)
rv._step = step
return rv
def __iter__(self):
"""Return an iterator which enumerates the elements of the
sequence this range represents."""
return rangeiterator(self)
class rangeiterator(Iterator):
"""An iterator for a :class:`range`.
"""
def __init__(self, rangeobj):
self._range = rangeobj
# Intialize the "last outputted value" to the value
# just before the first value; this simplifies next()
self._last = self._range._start - self._range._step
self._count = 0
def __iter__(self):
"""An iterator is already an iterator, so return ``self``.
"""
return self
def next(self):
"""Return the next element in the sequence represented
by the range we are iterating, or raise StopIteration
if we have passed the end of the sequence."""
self._last += self._range._step
self._count += 1
if self._count > self._range._len:
raise StopIteration()
return self._last
__all__ = ['newrange']
|
{
"content_hash": "6c064b536d16483ae452d3e03fb8e697",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 74,
"avg_line_length": 32.77840909090909,
"alnum_prop": 0.5768764083896689,
"repo_name": "krischer/python-future",
"id": "2438d20536355f1b0199fcd99aaa5c9f1677e6be",
"size": "5769",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/future/types/newrange.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2917437"
},
{
"name": "Shell",
"bytes": "539"
}
],
"symlink_target": ""
}
|
import binascii
from sqlalchemy import event
from sqlalchemy import Table
from sqlalchemy.sql import expression
from sqlalchemy.sql import type_coerce
from sqlalchemy.types import UserDefinedType
# Python datatypes
class GisElement(object):
"""Represents a geometry value."""
def __str__(self):
return self.desc
def __repr__(self):
return "<%s at 0x%x; %r>" % (
self.__class__.__name__,
id(self),
self.desc,
)
class BinaryGisElement(GisElement, expression.Function):
"""Represents a Geometry value expressed as binary."""
def __init__(self, data):
self.data = data
expression.Function.__init__(
self, "ST_GeomFromEWKB", data, type_=Geometry(coerce_="binary")
)
@property
def desc(self):
return self.as_hex
@property
def as_hex(self):
return binascii.hexlify(self.data)
class TextualGisElement(GisElement, expression.Function):
"""Represents a Geometry value expressed as text."""
def __init__(self, desc, srid=-1):
self.desc = desc
expression.Function.__init__(
self, "ST_GeomFromText", desc, srid, type_=Geometry
)
# SQL datatypes.
class Geometry(UserDefinedType):
"""Base PostGIS Geometry column type."""
name = "GEOMETRY"
def __init__(self, dimension=None, srid=-1, coerce_="text"):
self.dimension = dimension
self.srid = srid
self.coerce = coerce_
class comparator_factory(UserDefinedType.Comparator):
"""Define custom operations for geometry types."""
# override the __eq__() operator
def __eq__(self, other):
return self.op("~=")(other)
# add a custom operator
def intersects(self, other):
return self.op("&&")(other)
# any number of GIS operators can be overridden/added here
# using the techniques above.
def _coerce_compared_value(self, op, value):
return self
def get_col_spec(self):
return self.name
def bind_expression(self, bindvalue):
if self.coerce == "text":
return TextualGisElement(bindvalue)
elif self.coerce == "binary":
return BinaryGisElement(bindvalue)
else:
assert False
def column_expression(self, col):
if self.coerce == "text":
return func.ST_AsText(col, type_=self)
elif self.coerce == "binary":
return func.ST_AsBinary(col, type_=self)
else:
assert False
def bind_processor(self, dialect):
def process(value):
if isinstance(value, GisElement):
return value.desc
else:
return value
return process
def result_processor(self, dialect, coltype):
if self.coerce == "text":
fac = TextualGisElement
elif self.coerce == "binary":
fac = BinaryGisElement
else:
assert False
def process(value):
if value is not None:
return fac(value)
else:
return value
return process
def adapt(self, impltype):
return impltype(
dimension=self.dimension, srid=self.srid, coerce_=self.coerce
)
# other datatypes can be added as needed.
class Point(Geometry):
name = "POINT"
class Curve(Geometry):
name = "CURVE"
class LineString(Curve):
name = "LINESTRING"
# ... etc.
# DDL integration
# PostGIS historically has required AddGeometryColumn/DropGeometryColumn
# and other management methods in order to create PostGIS columns. Newer
# versions don't appear to require these special steps anymore. However,
# here we illustrate how to set up these features in any case.
def setup_ddl_events():
@event.listens_for(Table, "before_create")
def before_create(target, connection, **kw):
dispatch("before-create", target, connection)
@event.listens_for(Table, "after_create")
def after_create(target, connection, **kw):
dispatch("after-create", target, connection)
@event.listens_for(Table, "before_drop")
def before_drop(target, connection, **kw):
dispatch("before-drop", target, connection)
@event.listens_for(Table, "after_drop")
def after_drop(target, connection, **kw):
dispatch("after-drop", target, connection)
def dispatch(event, table, bind):
if event in ("before-create", "before-drop"):
regular_cols = [
c for c in table.c if not isinstance(c.type, Geometry)
]
gis_cols = set(table.c).difference(regular_cols)
table.info["_saved_columns"] = table.c
# temporarily patch a set of columns not including the
# Geometry columns
table.columns = expression.ColumnCollection(*regular_cols)
if event == "before-drop":
for c in gis_cols:
bind.execute(
select(
[
func.DropGeometryColumn(
"public", table.name, c.name
)
],
autocommit=True,
)
)
elif event == "after-create":
table.columns = table.info.pop("_saved_columns")
for c in table.c:
if isinstance(c.type, Geometry):
bind.execute(
select(
[
func.AddGeometryColumn(
table.name,
c.name,
c.type.srid,
c.type.name,
c.type.dimension,
)
],
autocommit=True,
)
)
elif event == "after-drop":
table.columns = table.info.pop("_saved_columns")
setup_ddl_events()
# illustrate usage
if __name__ == "__main__":
from sqlalchemy import (
create_engine,
MetaData,
Column,
Integer,
String,
func,
select,
)
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine(
"postgresql://scott:tiger@localhost/test", echo=True
)
metadata = MetaData(engine)
Base = declarative_base(metadata=metadata)
class Road(Base):
__tablename__ = "roads"
road_id = Column(Integer, primary_key=True)
road_name = Column(String)
road_geom = Column(Geometry(2))
metadata.drop_all()
metadata.create_all()
session = sessionmaker(bind=engine)()
# Add objects. We can use strings...
session.add_all(
[
Road(
road_name="Jeff Rd",
road_geom="LINESTRING(191232 243118,191108 243242)",
),
Road(
road_name="Geordie Rd",
road_geom="LINESTRING(189141 244158,189265 244817)",
),
Road(
road_name="Paul St",
road_geom="LINESTRING(192783 228138,192612 229814)",
),
Road(
road_name="Graeme Ave",
road_geom="LINESTRING(189412 252431,189631 259122)",
),
Road(
road_name="Phil Tce",
road_geom="LINESTRING(190131 224148,190871 228134)",
),
]
)
# or use an explicit TextualGisElement
# (similar to saying func.GeomFromText())
r = Road(
road_name="Dave Cres",
road_geom=TextualGisElement(
"LINESTRING(198231 263418,198213 268322)", -1
),
)
session.add(r)
# pre flush, the TextualGisElement represents the string we sent.
assert str(r.road_geom) == "LINESTRING(198231 263418,198213 268322)"
session.commit()
# after flush and/or commit, all the TextualGisElements
# become PersistentGisElements.
assert str(r.road_geom) == "LINESTRING(198231 263418,198213 268322)"
r1 = session.query(Road).filter(Road.road_name == "Graeme Ave").one()
# illustrate the overridden __eq__() operator.
# strings come in as TextualGisElements
r2 = (
session.query(Road)
.filter(Road.road_geom == "LINESTRING(189412 252431,189631 259122)")
.one()
)
r3 = session.query(Road).filter(Road.road_geom == r1.road_geom).one()
assert r1 is r2 is r3
# core usage just fine:
road_table = Road.__table__
stmt = select([road_table]).where(
road_table.c.road_geom.intersects(r1.road_geom)
)
print(session.execute(stmt).fetchall())
# TODO: for some reason the auto-generated labels have the internal
# replacement strings exposed, even though PG doesn't complain
# look up the hex binary version, using SQLAlchemy casts
as_binary = session.scalar(
select([type_coerce(r.road_geom, Geometry(coerce_="binary"))])
)
assert as_binary.as_hex == (
"01020000000200000000000000b832084100000000"
"e813104100000000283208410000000088601041"
)
# back again, same method !
as_text = session.scalar(
select([type_coerce(as_binary, Geometry(coerce_="text"))])
)
assert as_text.desc == "LINESTRING(198231 263418,198213 268322)"
session.rollback()
metadata.drop_all()
|
{
"content_hash": "5262f458f8a758f95511f1c31f589738",
"timestamp": "",
"source": "github",
"line_count": 347,
"max_line_length": 76,
"avg_line_length": 28.00864553314121,
"alnum_prop": 0.5563329560654389,
"repo_name": "cloudera/hue",
"id": "868d3d055da030ae30173c7a6253b5a29219c850",
"size": "9719",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/SQLAlchemy-1.3.17/examples/postgis/postgis.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
import cms
from .subcommands.base import SubcommandsCommand
from .subcommands.check import CheckInstallation
from .subcommands.copy import CopyCommand
from .subcommands.delete_orphaned_plugins import DeleteOrphanedPluginsCommand
from .subcommands.list import ListCommand
from .subcommands.publisher_publish import PublishCommand
from .subcommands.tree import FixTreeCommand
from .subcommands.uninstall import UninstallCommand
class Command(SubcommandsCommand):
command_name = 'cms'
subcommands = OrderedDict((
('check', CheckInstallation),
('copy', CopyCommand),
('delete-orphaned-plugins', DeleteOrphanedPluginsCommand),
('fix-tree', FixTreeCommand),
('list', ListCommand),
('publisher-publish', PublishCommand),
('uninstall', UninstallCommand),
))
missing_args_message = 'one of the available sub commands must be provided'
subcommand_dest = 'cmd'
def get_version(self):
return cms.__version__
def add_arguments(self, parser):
parser.add_argument('--version', action='version', version=self.get_version())
super().add_arguments(parser)
|
{
"content_hash": "6ce9fe298cb9e7de0f84f6979bc65b06",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 86,
"avg_line_length": 33.97142857142857,
"alnum_prop": 0.7266610597140454,
"repo_name": "rsalmaso/django-cms",
"id": "864f657d8e13d85190ee29f18815e27707ec0fcc",
"size": "1189",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "cms/management/commands/cms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "204223"
},
{
"name": "JavaScript",
"bytes": "1250281"
},
{
"name": "Python",
"bytes": "2386268"
},
{
"name": "SCSS",
"bytes": "137693"
},
{
"name": "Shell",
"bytes": "22511"
}
],
"symlink_target": ""
}
|
"""Test Home Assistant Cast."""
from unittest.mock import patch
from homeassistant.components.cast import home_assistant_cast
from homeassistant.config import async_process_ha_core_config
from tests.common import MockConfigEntry, async_mock_signal
async def test_service_show_view(hass, mock_zeroconf):
"""Test we don't set app id in prod."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
await home_assistant_cast.async_setup_ha_cast(hass, MockConfigEntry())
calls = async_mock_signal(hass, home_assistant_cast.SIGNAL_HASS_CAST_SHOW_VIEW)
await hass.services.async_call(
"cast",
"show_lovelace_view",
{"entity_id": "media_player.kitchen", "view_path": "mock_path"},
blocking=True,
)
assert len(calls) == 1
controller, entity_id, view_path, url_path = calls[0]
assert controller.hass_url == "https://example.com"
assert controller.client_id is None
# Verify user did not accidentally submit their dev app id
assert controller.supporting_app_id == "A078F6B0"
assert entity_id == "media_player.kitchen"
assert view_path == "mock_path"
assert url_path is None
async def test_service_show_view_dashboard(hass, mock_zeroconf):
"""Test casting a specific dashboard."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
await home_assistant_cast.async_setup_ha_cast(hass, MockConfigEntry())
calls = async_mock_signal(hass, home_assistant_cast.SIGNAL_HASS_CAST_SHOW_VIEW)
await hass.services.async_call(
"cast",
"show_lovelace_view",
{
"entity_id": "media_player.kitchen",
"view_path": "mock_path",
"dashboard_path": "mock-dashboard",
},
blocking=True,
)
assert len(calls) == 1
_controller, entity_id, view_path, url_path = calls[0]
assert entity_id == "media_player.kitchen"
assert view_path == "mock_path"
assert url_path == "mock-dashboard"
async def test_use_cloud_url(hass, mock_zeroconf):
"""Test that we fall back to cloud url."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
hass.config.components.add("cloud")
await home_assistant_cast.async_setup_ha_cast(hass, MockConfigEntry())
calls = async_mock_signal(hass, home_assistant_cast.SIGNAL_HASS_CAST_SHOW_VIEW)
with patch(
"homeassistant.components.cloud.async_remote_ui_url",
return_value="https://something.nabu.casa",
):
await hass.services.async_call(
"cast",
"show_lovelace_view",
{"entity_id": "media_player.kitchen", "view_path": "mock_path"},
blocking=True,
)
assert len(calls) == 1
controller = calls[0][0]
assert controller.hass_url == "https://something.nabu.casa"
async def test_remove_entry(hass, mock_zeroconf):
"""Test removing config entry removes user."""
entry = MockConfigEntry(
data={},
domain="cast",
title="Google Cast",
)
entry.add_to_hass(hass)
with patch(
"pychromecast.discovery.discover_chromecasts", return_value=(True, None)
), patch("pychromecast.discovery.stop_discovery"):
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert "cast" in hass.config.components
user_id = entry.data.get("user_id")
assert await hass.auth.async_get_user(user_id)
assert await hass.config_entries.async_remove(entry.entry_id)
assert not await hass.auth.async_get_user(user_id)
|
{
"content_hash": "d55f5be2597fc71df95f855782ad439a",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 83,
"avg_line_length": 32.86725663716814,
"alnum_prop": 0.6470113085621971,
"repo_name": "jawilson/home-assistant",
"id": "67b5454b6e1391a4ff38f9251b3691ba2d6ac5db",
"size": "3714",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/components/cast/test_home_assistant_cast.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2782"
},
{
"name": "Python",
"bytes": "40129467"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
"""
This module is pending deprecation as of Django 1.6 and will be removed in
version 1.8.
"""
from importlib import import_module
import json
import re
import unittest as real_unittest
import warnings
from django.apps import apps
from django.test import _doctest as doctest
from django.test import runner
from django.test.utils import compare_xml, strip_quotes
# django.utils.unittest is deprecated, but so is django.test.simple,
# and the latter will be removed before the former.
from django.utils import unittest
from django.utils.deprecation import RemovedInDjango18Warning
from django.utils.module_loading import module_has_submodule
__all__ = ('DjangoTestSuiteRunner',)
warnings.warn(
"The django.test.simple module and DjangoTestSuiteRunner are deprecated; "
"use django.test.runner.DiscoverRunner instead.",
RemovedInDjango18Warning)
# The module name for tests outside models.py
TEST_MODULE = 'tests'
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
normalize_decimals = lambda s: re.sub(r"Decimal\('(\d+(\.\d*)?)'\)",
lambda m: "Decimal(\"%s\")" % m.groups()[0], s)
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
"""
The entry method for doctest output checking. Defers to a sequence of
child checkers
"""
checks = (self.check_output_default,
self.check_output_numeric,
self.check_output_xml,
self.check_output_json)
for check in checks:
if check(want, got, optionflags):
return True
return False
def check_output_default(self, want, got, optionflags):
"""
The default comparator provided by doctest - not perfect, but good for
most purposes
"""
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def check_output_numeric(self, want, got, optionflags):
"""Doctest does an exact string comparison of output, which means that
some numerically equivalent values aren't equal. This check normalizes
* long integers (22L) so that they equal normal integers. (22)
* Decimals so that they are comparable, regardless of the change
made to __repr__ in Python 2.6.
"""
return doctest.OutputChecker.check_output(self,
normalize_decimals(normalize_long_ints(want)),
normalize_decimals(normalize_long_ints(got)),
optionflags)
def check_output_xml(self, want, got, optionsflags):
try:
return compare_xml(want, got)
except Exception:
return False
def check_output_json(self, want, got, optionsflags):
"""
Tries to compare want and got as if they were JSON-encoded data
"""
want, got = strip_quotes(want, got)
try:
want_json = json.loads(want)
got_json = json.loads(got)
except Exception:
return False
return want_json == got_json
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
doctestOutputChecker = OutputChecker()
def get_tests(app_config):
try:
test_module = import_module('%s.%s' % (app_config.name, TEST_MODULE))
except ImportError:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
if not module_has_submodule(app_config.module, TEST_MODULE):
test_module = None
else:
# The module exists, so there must be an import error in the test
# module itself.
raise
return test_module
def make_doctest(module):
return doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner)
def build_suite(app_config):
"""
Create a complete Django test suite for the provided application module.
"""
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
models_module = app_config.models_module
if models_module:
if hasattr(models_module, 'suite'):
suite.addTest(models_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
models_module))
try:
suite.addTest(make_doctest(models_module))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
tests_module = get_tests(app_config)
if tests_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(tests_module, 'suite'):
suite.addTest(tests_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(
tests_module))
try:
suite.addTest(make_doctest(tests_module))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""
Construct a test case with the specified label. Label should be of the
form app_label.TestClass or app_label.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase "
"or app.TestCase.test_method" % label)
app_config = apps.get_app_config(parts[0])
models_module = app_config.models_module
tests_module = get_tests(app_config)
test_modules = []
if models_module:
test_modules.append(models_module)
if tests_module:
test_modules.append(tests_module)
TestClass = None
for module in test_modules:
TestClass = getattr(module, parts[1], None)
if TestClass is not None:
break
try:
if issubclass(TestClass, (unittest.TestCase, real_unittest.TestCase)):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(
TestClass)
except TypeError:
raise ValueError(
"Test label '%s' does not refer to a test class"
% label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in test_modules:
try:
doctests = make_doctest(module)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (
module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
class DjangoTestSuiteRunner(runner.DiscoverRunner):
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app_config = apps.get_app_config(label)
suite.addTest(build_suite(app_config))
else:
for app_config in apps.get_app_configs():
suite.addTest(build_suite(app_config))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return runner.reorder_suite(suite, (unittest.TestCase,))
|
{
"content_hash": "0dc247ad7395f5f24f6075b892711c2e",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 79,
"avg_line_length": 34.63492063492063,
"alnum_prop": 0.6043767186067828,
"repo_name": "wfxiang08/django178",
"id": "a60735de0ad9d9341a284e6f20d1faadb6b1cf7c",
"size": "8751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/test/simple.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42829"
},
{
"name": "HTML",
"bytes": "169506"
},
{
"name": "JavaScript",
"bytes": "75783"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "9164014"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.