text stringlengths 4 1.02M | meta dict |
|---|---|
from django.conf.urls import url
from djangoql import views
class DjangoQLFavoriteQueryMixin(object):
@property
def media(self):
media = super(DjangoQLFavoriteQueryMixin, self).media
media.add_js((
'djangoql/js/favorite.js',
))
media.add_css({
'': (
'djangoql/css/favorite.css',
),
})
return media
def get_urls(self):
custom_urls = []
custom_urls += [
url(
r'^favorite_queries/$',
self.admin_site.admin_view(views.FavoriteQueryCreateListView.as_view(
model=self.model,
)),
name='%s_%s_create_list_favorite_queries' % (
self.model._meta.app_label,
self.model._meta.model_name,
),
),
url(
r'^favorite_queries/(?P<pk>\d+)/$',
self.admin_site.admin_view(views.FavoriteQueryDestroyUpdateView.as_view()),
name='%s_%s_destroy_update_favorite_queries' % (
self.model._meta.app_label,
self.model._meta.model_name,
),
)
]
return custom_urls + super(DjangoQLFavoriteQueryMixin, self).get_urls()
| {
"content_hash": "cbca3a80959aec23f1516ff99936c98c",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 91,
"avg_line_length": 27.708333333333332,
"alnum_prop": 0.48721804511278194,
"repo_name": "artinnok/djangoql",
"id": "21d211209e96169a0ef6856f3c8140330b834157",
"size": "1330",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djangoql/admin/favorite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2806"
},
{
"name": "HTML",
"bytes": "17420"
},
{
"name": "JavaScript",
"bytes": "58944"
},
{
"name": "Python",
"bytes": "86349"
}
],
"symlink_target": ""
} |
import unittest
from datetime import datetime, timedelta
from trac.test import locale_en
from trac.ticket.query import QueryModule
from trac.ticket.report import ReportModule
from trac.ticket.roadmap import RoadmapModule
from trac.ticket.model import Milestone, Ticket
from trac.util.datefmt import format_datetime, pretty_timedelta, utc
from trac.wiki.tests import formatter
TICKET_TEST_CASES = u"""
============================== ticket: link resolver
ticket:1
ticket:12
ticket:abc
------------------------------
<p>
<a class="new ticket" href="/ticket/1" title="This is the summary (new)">ticket:1</a>
<a class="missing ticket">ticket:12</a>
<a class="missing ticket">ticket:abc</a>
</p>
------------------------------
============================== ticket: link resolver + arguments
ticket:1?format=csv
ticket:1#comment:3
------------------------------
<p>
<a class="new ticket" href="/ticket/1?format=csv" title="This is the summary (new)">ticket:1?format=csv</a>
<a class="new ticket" href="/ticket/1#comment:3" title="This is the summary (new)">ticket:1#comment:3</a>
</p>
------------------------------
============================== ticket: link resolver with ranges
ticket:12-14,33
ticket:12,33?order=created
------------------------------
<p>
<a href="/query?id=12-14%2C33" title="Tickets 12-14, 33">ticket:12-14,33</a>
<a href="/query?id=12%2C33&order=created" title="Tickets 12, 33">ticket:12,33?order=created</a>
</p>
------------------------------
============================== ticket link shorthand form
#1, #2
#12, #abc
------------------------------
<p>
<a class="new ticket" href="/ticket/1" title="This is the summary (new)">#1</a>, <a class="missing ticket">#2</a>
<a class="missing ticket">#12</a>, #abc
</p>
------------------------------
============================== ticket link shorthand form with ranges
#1-5,42
#1,3,5,7
------------------------------
<p>
<a href="/query?id=1-5%2C42" title="Tickets 1-5, 42">#1-5,42</a>
<a href="/query?id=1%2C3%2C5%2C7" title="Tickets 1, 3, 5, 7">#1,3,5,7</a>
</p>
------------------------------
============================== ticket link shorthand form with long ranges (#10111 regression)
#1-123456789012345678901234
------------------------------
<p>
<a href="/query?id=1-123456789012345678901234" title="Tickets 1-123456789012345678901234">#1-123456789012345678901234</a>
</p>
------------------------------
============================== escaping the above
!#1
------------------------------
<p>
#1
</p>
------------------------------
#1
============================== InterTrac for tickets
trac:ticket:2041
[trac:ticket:2041 Trac #2041]
#T2041
#trac2041
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/ticket%3A2041" title="ticket:2041 in Trac's Trac"><span class="icon"></span>trac:ticket:2041</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/ticket%3A2041" title="ticket:2041 in Trac's Trac"><span class="icon"></span>Trac #2041</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/ticket%3A2041" title="ticket:2041 in Trac's Trac"><span class="icon"></span>#T2041</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/ticket%3A2041" title="ticket:2041 in Trac's Trac"><span class="icon"></span>#trac2041</a>
</p>
------------------------------
============================== Ticket InterTrac shorthands
T:#2041
trac:#2041
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/%232041" title="#2041 in Trac's Trac"><span class="icon"></span>T:#2041</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/%232041" title="#2041 in Trac's Trac"><span class="icon"></span>trac:#2041</a>
</p>
------------------------------
============================== ticket syntax with unicode digits
#⁴²
#1-⁵,42
#1,³,5,7
#T²⁰⁴¹
#trac²⁰⁴¹
------------------------------
<p>
#⁴²
<a class="new ticket" href="/ticket/1" title="This is the summary (new)">#1</a>-⁵,42
<a class="new ticket" href="/ticket/1" title="This is the summary (new)">#1</a>,³,5,7
#T²⁰⁴¹
#trac²⁰⁴¹
</p>
------------------------------
""" # "
def ticket_setup(tc):
config = tc.env.config
config.set('ticket-custom', 'custom1', 'text')
config.save()
ticket = Ticket(tc.env)
ticket.populate({'reporter': 'santa',
'summary': 'This is the summary',
'status': 'new'})
ticket.insert()
def ticket_teardown(tc):
config = tc.env.config
config.remove('ticket-custom', 'custom1')
config.save()
tc.env.reset_db()
REPORT_TEST_CASES = u"""
============================== report link shorthand form
{1}, {2}
{12}, {abc}
------------------------------
<p>
<a class="report" href="/report/1">{1}</a>, <a class="report" href="/report/2">{2}</a>
<a class="missing report" title="report does not exist">{12}</a>, {abc}
</p>
------------------------------
============================== escaping the above
!{1}
------------------------------
<p>
{1}
</p>
------------------------------
{1}
============================== ticket shorthands, not numerical HTML entities
 
------------------------------
<p>
&#1; &#23;
</p>
------------------------------
&#1; &#23;
============================== report link with non-digits
report:blah
------------------------------
<p>
<a class="missing report" title="report does not exist">report:blah</a>
</p>
------------------------------
<a class="missing report" title="report does not exist">report:blah</a>
============================== InterTrac for reports
trac:report:1
[trac:report:1 Trac r1]
{T1}
{trac1}
{trac 1}
------------------------------
<p>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/report%3A1" title="report:1 in Trac's Trac"><span class="icon"></span>trac:report:1</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/report%3A1" title="report:1 in Trac's Trac"><span class="icon"></span>Trac r1</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/report%3A1" title="report:1 in Trac's Trac"><span class="icon"></span>{T1}</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/report%3A1" title="report:1 in Trac's Trac"><span class="icon"></span>{trac1}</a>
<a class="ext-link" href="http://trac.edgewall.org/intertrac/report%3A1" title="report:1 in Trac's Trac"><span class="icon"></span>{trac 1}</a>
</p>
------------------------------
============================== report syntax with unicode digits
{⁴²} !{⁴²}
{T⁴²}
{trac⁴²}
------------------------------
<p>
{⁴²} !{⁴²}
{T⁴²}
{trac⁴²}
</p>
------------------------------
""" # '
def report_setup(tc):
def create_report(tc, id):
tc.env.db_transaction("""
INSERT INTO report (id,title,query,description)
VALUES (%s,%s,'SELECT 1','')""", (id, 'Report %s' % id))
create_report(tc, 1)
create_report(tc, 2)
dt_past = datetime.now(utc) - timedelta(days=1)
dt_future = datetime.now(utc) + timedelta(days=1)
MILESTONE_TEST_CASES = u"""
============================== milestone: link resolver
milestone:foo
[milestone:boo Milestone Boo]
[milestone:roo Milestone Roo]
[milestone:woo Milestone Woo]
[milestone:zoo Milestone Zoo]
------------------------------
<p>
<a class="missing milestone" href="/milestone/foo" rel="nofollow">milestone:foo</a>
<a class="milestone" href="/milestone/boo" title="No date set">Milestone Boo</a>
<a class="closed milestone" href="/milestone/roo" title="Completed %(dt_past)s ago (%(datestr_past)s)">Milestone Roo</a>
<a class="milestone" href="/milestone/woo" title="Due in %(dt_future)s (%(datestr_future)s)">Milestone Woo</a>
<a class="milestone" href="/milestone/zoo" title="%(dt_past)s late (%(datestr_past)s)">Milestone Zoo</a>
</p>
------------------------------
============================== milestone: link resolver + arguments
milestone:?action=new
[milestone:boo#KnownIssues Known Issues for 1.0]
------------------------------
<p>
<a class="missing milestone" href="/milestone/?action=new" rel="nofollow">milestone:?action=new</a>
<a class="milestone" href="/milestone/boo#KnownIssues" title="No date set">Known Issues for 1.0</a>
</p>
------------------------------
""" % {'dt_past': pretty_timedelta(dt_past),
'dt_future': pretty_timedelta(dt_future),
'datestr_past': format_datetime(dt_past, locale=locale_en, tzinfo=utc),
'datestr_future': format_datetime(dt_future, locale=locale_en,
tzinfo=utc)} #"
def milestone_setup(tc):
boo = Milestone(tc.env)
boo.name = 'boo'
boo.completed = boo.due = None
boo.insert()
roo = Milestone(tc.env)
roo.name = 'roo'
roo.completed = dt_past
roo.due = None
roo.insert()
woo = Milestone(tc.env)
woo.name = 'woo'
woo.completed = None
woo.due = dt_future
woo.insert()
zoo = Milestone(tc.env)
zoo.name = 'zoo'
zoo.completed = None
zoo.due = dt_past
zoo.insert()
def milestone_teardown(tc):
tc.env.reset_db()
QUERY_TEST_CASES = u"""
============================== query: link resolver
query:?order=priority
query:?order=priority&owner=me
query:?type=résumé
query:status=new|reopened
query:reporter!=
query:reporter=joe|jack&owner=me
query:group=owner
query:verbose=1
query:summary=résumé
------------------------------
<p>
<a class="query" href="/query?order=priority">query:?order=priority</a>
</p>
<p>
<a class="query" href="/query?order=priority&owner=me">query:?order=priority&owner=me</a>
</p>
<p>
<a class="query" href="/query?type=r%C3%A9sum%C3%A9">query:?type=résumé</a>
</p>
<p>
<a class="query" href="/query?status=new&status=reopened&order=priority">query:status=new|reopened</a>
</p>
<p>
<a class="query" href="/query?reporter=!&order=priority">query:reporter!=</a>
</p>
<p>
<a class="query" href="/query?owner=me&reporter=joe&reporter=jack&order=priority">query:reporter=joe|jack&owner=me</a>
</p>
<p>
<a class="query" href="/query?group=owner&order=priority">query:group=owner</a>
</p>
<p>
<a class="query" href="/query?order=priority&row=description">query:verbose=1</a>
</p>
<p>
<a class="query" href="/query?summary=r%C3%A9sum%C3%A9&order=priority">query:summary=résumé</a>
</p>
------------------------------
============================== TicketQuery macro: no results, list form
Reopened tickets: [[TicketQuery(status=reopened)]]
------------------------------
<p>
Reopened tickets: <span class="query_no_results">No results</span>
</p>
------------------------------
============================== TicketQuery macro: no results, count 0 (raw)
Reopened tickets: [[TicketQuery(status=reopened, format=rawcount)]]
------------------------------
<p>
Reopened tickets: <span class="query_count" title="0 tickets matching status=reopened, max=0, order=id">0</span>
</p>
------------------------------
============================== TicketQuery macro: no results, count 0
Reopened tickets: [[TicketQuery(status=reopened, format=count)]]
------------------------------
<p>
Reopened tickets: <a href="/query?status=reopened&max=0&order=id" title="0 tickets matching status=reopened, max=0, order=id">0</a>
</p>
------------------------------
============================== TicketQuery macro: no results, compact form
Reopened tickets: [[TicketQuery(status=reopened, format=compact)]]
------------------------------
<p>
Reopened tickets: <span class="query_no_results">No results</span>
</p>
------------------------------
============================== TicketQuery macro: one result, list form
New tickets: [[TicketQuery(status=new)]]
------------------------------
<p>
New tickets: </p><div><dl class="wiki compact"><dt><a class="new" href="/ticket/1" title="This is the summary">#1</a></dt><dd>This is the summary</dd></dl></div><p>
</p>
------------------------------
============================== TicketQuery macro: one result, count 1 (raw)
New tickets: [[TicketQuery(status=new, format=rawcount)]]
------------------------------
<p>
New tickets: <span class="query_count" title="1 ticket matching status=new, max=0, order=id">1</span>
</p>
------------------------------
============================== TicketQuery macro: one result, count 1
New tickets: [[TicketQuery(status=new, format=count)]]
------------------------------
<p>
New tickets: <a href="/query?status=new&max=0&order=id" title="1 ticket matching status=new, max=0, order=id">1</a>
</p>
------------------------------
============================== TicketQuery macro: one result, compact form
New tickets: [[TicketQuery(status=new, format=compact)]]
------------------------------
<p>
New tickets: <span><a class="new" href="/ticket/1" title="This is the summary">#1</a></span>
</p>
------------------------------
============================== TicketQuery macro: duplicated fields
New tickets: [[TicketQuery(status=new, format=compact, col=summary|status|status)]]
------------------------------
<p>
New tickets: <span><a class="new" href="/ticket/1" title="This is the summary">#1</a></span>
</p>
------------------------------
============================== TicketQuery macro: duplicated custom fields
New tickets: [[TicketQuery(status=new, format=compact, col=summary|custom1|custom1)]]
------------------------------
<p>
New tickets: <span><a class="new" href="/ticket/1" title="This is the summary">#1</a></span>
</p>
------------------------------
"""
QUERY2_TEST_CASES = u"""
============================== TicketQuery macro: two results, list form
New tickets: [[TicketQuery(status=new, order=reporter)]]
------------------------------
<p>
New tickets: </p><div><dl class="wiki compact"><dt><a class="new" href="/ticket/2" title="This is another summary">#2</a></dt><dd>This is another summary</dd><dt><a class="new" href="/ticket/1" title="This is the summary">#1</a></dt><dd>This is the summary</dd></dl></div><p>
</p>
------------------------------
============================== TicketQuery macro: two results, count 2 (raw)
New tickets: [[TicketQuery(status=new, order=reporter, format=rawcount)]]
------------------------------
<p>
New tickets: <span class="query_count" title="2 tickets matching status=new, max=0, order=reporter">2</span>
</p>
------------------------------
============================== TicketQuery macro: two results, count 2
New tickets: [[TicketQuery(status=new, order=reporter, format=count)]]
------------------------------
<p>
New tickets: <a href="/query?status=new&max=0&order=reporter" title="2 tickets matching status=new, max=0, order=reporter">2</a>
</p>
------------------------------
============================== TicketQuery macro: two results, compact form
New tickets: [[TicketQuery(status=new, order=reporter, format=compact)]]
------------------------------
<p>
New tickets: <span><a class="new" href="/ticket/2" title="This is another summary">#2</a>, <a class="new" href="/ticket/1" title="This is the summary">#1</a></span>
</p>
------------------------------
"""
def query2_setup(tc):
ticket = Ticket(tc.env)
ticket.populate({'reporter': 'santa',
'summary': 'This is the summary',
'status': 'new'})
ticket.insert()
ticket = Ticket(tc.env)
ticket.populate({'reporter': 'claus',
'summary': 'This is another summary',
'status': 'new'})
ticket.insert()
def query2_teardown(tc):
tc.env.reset_db()
COMMENT_TEST_CASES = u"""
============================== comment: link resolver (deprecated)
comment:ticket:1:1 (deprecated)
[comment:ticket:1:1 see above] (deprecated)
comment:ticket:1:description (deprecated)
[comment:ticket:1:description see descr] (deprecated)
comment:ticket:2:1 (deprecated)
comment:ticket:2:3 (deprecated)
comment:ticket:3:1 (deprecated)
comment:tiket:2:1 (deprecated)
comment:ticket:two:1 (deprecated)
comment:ticket:2:1a (deprecated)
comment:ticket:2:one (deprecated)
comment:ticket:1: (deprecated)
comment:ticket::2 (deprecated)
comment:ticket:: (deprecated)
------------------------------
<p>
<a class="new ticket" href="/ticket/1#comment:1" title="Comment 1 for Ticket #1">comment:ticket:1:1</a> (deprecated)
<a class="new ticket" href="/ticket/1#comment:1" title="Comment 1 for Ticket #1">see above</a> (deprecated)
<a class="new ticket" href="/ticket/1#comment:description" title="Description for Ticket #1">comment:ticket:1:description</a> (deprecated)
<a class="new ticket" href="/ticket/1#comment:description" title="Description for Ticket #1">see descr</a> (deprecated)
<a class="ticket" href="/ticket/2#comment:1" title="Comment 1">comment:ticket:2:1</a> (deprecated)
<a class="missing ticket" title="ticket comment does not exist">comment:ticket:2:3</a> (deprecated)
<a class="missing ticket" title="ticket does not exist">comment:ticket:3:1</a> (deprecated)
comment:tiket:2:1 (deprecated)
comment:ticket:two:1 (deprecated)
comment:ticket:2:1a (deprecated)
comment:ticket:2:one (deprecated)
comment:ticket:1: (deprecated)
comment:ticket::2 (deprecated)
comment:ticket:: (deprecated)
</p>
------------------------------
============================== comment: link resolver
comment:1
[comment:1 see above]
comment:description
[comment:description see descr]
comment:
comment:one
comment:1a
------------------------------
<p>
<a class="ticket" href="/ticket/2#comment:1" title="Comment 1">comment:1</a>
<a class="ticket" href="/ticket/2#comment:1" title="Comment 1">see above</a>
<a class="ticket" href="/ticket/2#comment:description" title="Description">comment:description</a>
<a class="ticket" href="/ticket/2#comment:description" title="Description">see descr</a>
comment:
comment:one
comment:1a
</p>
------------------------------
============================== comment: link resolver with ticket number
comment:1:ticket:1
[comment:1:ticket:1 see above]
comment:description:ticket:1
[comment:description:ticket:1 see descr]
comment:1:ticket:2
comment:3:ticket:2
comment:1:ticket:3
comment:2:tiket:1
comment:1:ticket:two
comment:one:ticket:1
comment:1a:ticket:1
comment:ticket:1
comment:2:ticket:
comment::ticket:
------------------------------
<p>
<a class="new ticket" href="/ticket/1#comment:1" title="Comment 1 for Ticket #1">comment:1:ticket:1</a>
<a class="new ticket" href="/ticket/1#comment:1" title="Comment 1 for Ticket #1">see above</a>
<a class="new ticket" href="/ticket/1#comment:description" title="Description for Ticket #1">comment:description:ticket:1</a>
<a class="new ticket" href="/ticket/1#comment:description" title="Description for Ticket #1">see descr</a>
<a class="ticket" href="/ticket/2#comment:1" title="Comment 1">comment:1:ticket:2</a>
<a class="missing ticket" title="ticket comment does not exist">comment:3:ticket:2</a>
<a class="missing ticket" title="ticket does not exist">comment:1:ticket:3</a>
comment:2:tiket:1
comment:1:ticket:two
comment:one:ticket:1
comment:1a:ticket:1
comment:ticket:1
comment:2:ticket:
comment::ticket:
</p>
------------------------------
""" # "
# NOTE: the following test cases:
#
# comment:2
# [comment:2 see above]
#
# would trigger an error in the workaround code ../api.py, line 235
# As it's a problem with a temp workaround, I think there's no need
# to fix it for now.
def comment_setup(tc):
ticket1 = Ticket(tc.env)
ticket1.populate({'reporter': 'santa',
'summary': 'This is the summary for ticket 1',
'status': 'new'})
ticket1.insert()
ticket1.save_changes(comment='This is the comment for ticket 1')
ticket2 = Ticket(tc.env)
ticket2.populate({'reporter': 'claws',
'summary': 'This is the summary for ticket 2',
'status': 'closed'})
ticket2.insert()
ticket2.save_changes(comment='This is the comment for ticket 2')
def comment_teardown(tc):
tc.env.reset_db()
def suite():
suite = unittest.TestSuite()
suite.addTest(formatter.suite(TICKET_TEST_CASES, ticket_setup, __file__,
ticket_teardown))
suite.addTest(formatter.suite(REPORT_TEST_CASES, report_setup, __file__))
suite.addTest(formatter.suite(MILESTONE_TEST_CASES, milestone_setup,
__file__, milestone_teardown))
suite.addTest(formatter.suite(QUERY_TEST_CASES, ticket_setup, __file__,
ticket_teardown))
suite.addTest(formatter.suite(QUERY2_TEST_CASES, query2_setup, __file__,
query2_teardown))
suite.addTest(formatter.suite(COMMENT_TEST_CASES, comment_setup, __file__,
comment_teardown, ('ticket', 2)))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| {
"content_hash": "f746b501d959e9685d1717051fc09bf6",
"timestamp": "",
"source": "github",
"line_count": 564,
"max_line_length": 275,
"avg_line_length": 36.709219858156025,
"alnum_prop": 0.5751545595054096,
"repo_name": "pkdevbox/trac",
"id": "fec42222898d0cc700219f8850fc8e3ebd0b874d",
"size": "21269",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trac/ticket/tests/wikisyntax.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3268"
},
{
"name": "CSS",
"bytes": "71129"
},
{
"name": "HTML",
"bytes": "356464"
},
{
"name": "JavaScript",
"bytes": "85641"
},
{
"name": "Makefile",
"bytes": "18957"
},
{
"name": "Python",
"bytes": "3830196"
},
{
"name": "Shell",
"bytes": "9573"
}
],
"symlink_target": ""
} |
__copyright__ = "Copyright 2015-2018 Altova GmbH"
__license__ = 'http://www.apache.org/licenses/LICENSE-2.0'
__version__ = '5.1.0'
# This script implements additional data quality validation rules as specified by the XBRL US Data Quality Committee (http://xbrl.us/data-quality/rules-guidance/).
# This script is designed to be used standalone or in conjunction with the EDGAR Filer Manual (EFM) rules implemented in script efm_validation.py. When using the efm_validation.py script, the DQC validation rules can be enabled with the enableDqcValidation option.
#
# The following script parameters can be additionally specified:
#
# suppressErrors A list of DQC.US.nnnn.mmm error codes separated by | characters.
#
# Example invocations
#
# Validate a single filing
# raptorxmlxbrl valxbrl --script=dqc_validation.py instance.xml
# Suppress a specific error
# raptorxmlxbrl valxbrl --script=dqc_validation.py --script-param=suppressErrors:DQC.US.0004.16 instance.xml
# Validate a single filing using EFM and DQC rules
# raptorxmlxbrl valxbrl --script=efm_validation.py --script-param=enableDqcValidation:true instance.xml
#
# Using Altova RaptorXML+XBRL Server with XMLSpy client:
#
# 1a. Copy efm_validation.py and all dqc_* files to the Altova RaptorXML Server script directory /etc/scripts/sec-edgar-tools/ (default C:\Program Files\Altova\RaptorXMLXBRLServer2016\etc\scripts\sec-edgar-tools\) or
# 1b. Edit the <server.script-root-dir> tag in /etc/server_config.xml
# 2. Start Altova RaptorXML+XBRL server.
# 3. Start Altova XMLSpy, open Tools|Manage Raptor Servers... and connect to the running server
# 4. Create a new configuration and rename it to e.g. "DQC CHECKS"
# 5. Select the XBRL Instance property page and then set the script property to sec-edgar-tools/dqc_validation.py
# 6. Select the new "DQC CHECKS" configuration in Tools|Raptor Servers and Configurations
# 7. Open a SEC instance file
# 8. Validate instance file with XML|Validate XML on Server (Ctrl+F8)
import collections
import datetime
import decimal
import json
import operator
import os
import re
import sys
import altova_api.v2.xml as xml
import altova_api.v2.xsd as xsd
import altova_api.v2.xbrl as xbrl
RuleInfo = collections.namedtuple('ruleInfo', ['ruleVersion', 'releaseDate', 'url'])
re_namespaces = {
'country': re.compile(r'http://xbrl\.(?:us|sec\.gov)/country/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'currency': re.compile(r'http://xbrl\.(?:us|sec\.gov)/currency/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'dei': re.compile(r'http://xbrl\.(?:us|sec\.gov)/dei/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'exch': re.compile(r'http://xbrl\.(?:us|sec\.gov)/exch/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'invest': re.compile(r'http://xbrl\.(?:us|sec\.gov)/invest/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'naics': re.compile(r'http://xbrl\.(?:us|sec\.gov)/naics/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'sic': re.compile(r'http://xbrl\.(?:us|sec\.gov)/sic/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'stpr': re.compile(r'http://xbrl\.(?:us|sec\.gov)/stpr/([0-9]{4})-([0-9]{2})-([0-9]{2})'),
'us-gaap': re.compile(r'http://(?:xbrl\.us|fasb\.org)/us-gaap/([0-9]{4})-([0-9]{2})-([0-9]{2})')
}
re_standard_ns = re.compile(r'http://(xbrl\.(us|sec\.gov)|fasb\.org)/')
msg_template_properties = [
'The properties of this ${fact1.name} fact are:',
'Period: ${fact1.period}',
'Dimensions: ${fact1.dimensions}',
'Unit: ${fact1.unit}',
'Rule version: ${ruleVersion}',
]
dqc_data_dir = os.path.join(os.path.dirname(__file__), 'dqc_data')
msg_templates = json.load(open(os.path.join(dqc_data_dir, 'dqc_msg_templates.json')))
dqc_0001_axis_members = json.load(open(os.path.join(dqc_data_dir, 'dqc_0001_axis_members.json')))
dqc_0006_period_focus_durations = json.load(open(os.path.join(dqc_data_dir, 'dqc_0006_period_focus_durations.json')))
dqc_0008_calculations = json.load(open(os.path.join(dqc_data_dir, 'dqc_0008_calculations.json')))
dqc_0009_facts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0009_facts.json')))
dqc_0011_facts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0011_facts.json')))
dqc_0013_facts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0013_facts.json')))
dqc_0013_preconditions = json.load(open(os.path.join(dqc_data_dir, 'dqc_0013_preconditions.json')))
dqc_0014_facts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0014_facts.json')))
dqc_0015_facts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0015_facts.json')))
dqc_0015_member_exclusions = json.load(open(os.path.join(dqc_data_dir, 'dqc_0015_member_exclusions.json')))
dqc_0018_concepts = json.load(open(os.path.join(dqc_data_dir, 'dqc_0018_concepts.json')))
dqc_0041_default_members = json.load(open(os.path.join(dqc_data_dir, 'dqc_0041_default_members.json')))
dqc_0043_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0043_data.json')))
dqc_0044_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0044_data.json')))
dqc_0045_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0045_data.json')))
dqc_0046_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0046_data.json')))
dqc_0048_roots = json.load(open(os.path.join(dqc_data_dir, 'dqc_0048_roots.json')))
dqc_0049_roots = json.load(open(os.path.join(dqc_data_dir, 'dqc_0049_roots.json')))
dqc_0051_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0051_data.json')))
dqc_0052_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0052_data.json')))
dqc_0053_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0053_data.json')))
dqc_0054_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0054_data.json')))
dqc_0055_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0055_data.json')))
dqc_0057_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0057_data.json')))
dqc_0060_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0060_data.json')))
dqc_0061_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0061_data.json')))
dqc_0062_data = json.load(open(os.path.join(dqc_data_dir, 'dqc_0062_data.json')))
arcrole_summation_item = 'http://www.xbrl.org/2003/arcrole/summation-item'
arcrole_parent_child = 'http://www.xbrl.org/2003/arcrole/parent-child'
opening_label_roles = set(['http://www.xbrl.org/2003/role/periodStartLabel'])
closing_label_roles = set(['http://www.xbrl.org/2003/role/periodEndLabel'])
def get_namespace(namespaces, prefix):
ns, _ = namespaces.get(prefix, (None, None))
return ns
def get_namespace_and_year(namespaces, prefix):
return namespaces.get(prefix, (None, 0))
def prefixed_name(x):
"""Give a fact of concept returns the name formatted as [prefix:]name."""
qname = x.qname
return '%s:%s' % (qname.prefix, qname.local_name) if qname.prefix else qname.local_name
def label(x):
"""Give a fact of concept returns the text of the first English standard label."""
if isinstance(x, xbrl.taxonomy.Concept):
concept = x
else:
concept = x.concept
labels = list(concept.labels(lang='en', label_role=xbrl.taxonomy.ROLE_LABEL))
return labels[0].text if labels else prefixed_name(x)
def period_end(fact):
"""Given a fact returns either the end date of the duration period or instant date of the instant period."""
period = fact.period_aspect_value
if period.period_type == xbrl.PeriodType.START_END:
return period.end
elif period.period_type == xbrl.PeriodType.INSTANT:
return period.instant
else:
return datetime.datetime.max
def period_duration(fact):
"""Given a fact returns the duration of the period in days."""
period = fact.period_aspect_value
if period.period_type == xbrl.PeriodType.START_END:
return (period.end-period.start).days
elif period.period_type == xbrl.PeriodType.INSTANT:
return 0
else:
return sys.maxsize
def format_date(val, is_end=False):
"""Given a date or datetime object, return the date part as a string. If the is_end flag is set, the date represents the end of the day which is according to XBRL 2.1 midnight of the next day. In this case, a day is subtracted first before formatting."""
if val.time() != datetime.time.min:
return val.strftime('%Y-%m-%d %H:%M:%S')
if is_end:
val -= datetime.timedelta(days=1)
return val.strftime('%Y-%m-%d')
def handle_param(msg_parts, msg_params, param_parts, param, param_value):
"""Handles a paramter during construction of an xbrl.Error (Helper for create_error)"""
if isinstance(param_value, xbrl.Fact):
fact = param_value
if len(param_parts) > 1 and param_parts[1] == 'fact':
del param_parts[1]
if len(param_parts) == 1 or param_parts[1] == 'name':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(prefixed_name(fact), tooltip=str(fact.qname), location=fact, quotes=False)
elif param_parts[1] == 'localName':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(fact.local_name, tooltip=str(fact.qname), location=fact, quotes=False)
elif param_parts[1] == 'label':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(label(fact), tooltip=str(fact.qname), location=fact, deflocation=fact.concept, quotes=False)
elif param_parts[1] == 'value':
msg_parts.append('{%s:value}' % param)
if fact.xsi_nil:
msg_params[param] = xbrl.Error.Param('nil', location=fact.element.find_attribute(('nil', xsd.NAMESPACE_XSI)), quotes=False)
elif fact.concept.is_numeric():
msg_params[param] = xbrl.Error.Param('{:,}'.format(fact.numeric_value), location=fact, quotes=False)
else:
msg_params[param] = xbrl.Error.Param(fact.normalized_value, location=fact, quotes=False)
elif param_parts[1] == 'period':
period = fact.context.period
if len(param_parts) > 2:
if param_parts[2] == 'startDate':
msg_parts.append('{%s:value}' % param)
msg_params[param] = xbrl.Error.Param(format_date(period.start_date.value), location=period.start_date, quotes=False)
elif param_parts[2] == 'endDate':
end_date = period.instant if period.type == xbrl.PeriodType.INSTANT else period.end_date
msg_parts.append('{%s:value}' % param)
msg_params[param] = xbrl.Error.Param(format_date(end_date.value, is_end=True), location=end_date, quotes=False)
elif param_parts[2] == 'instant':
msg_parts.append('{%s:value}' % param)
msg_params[param] = xbrl.Error.Param(format_date(period.instant.value, is_end=True), location=period.instant, quotes=False)
elif param_parts[2] == 'durationDays':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(str(period_duration(fact)), quotes=False)
else:
raise KeyError('Unknown period property '+param_parts[2])
else:
if period.type == xbrl.PeriodType.INSTANT:
msg_parts.append('{%s.instant:value}' % param)
msg_params[param+'.instant'] = xbrl.Error.Param(format_date(period.instant.value, is_end=True), location=period.instant, quotes=False)
elif period.type == xbrl.PeriodType.START_END:
msg_parts.append('{%s.startDate:value} - {%s.endDate:value}' % (param, param))
msg_params[param+'.startDate'] = xbrl.Error.Param(format_date(period.start_date.value), location=period.start_date, quotes=False)
msg_params[param+'.endDate'] = xbrl.Error.Param(format_date(period.end_date.value, is_end=True), location=period.end_date, quotes=False)
else:
msg_parts.append('forever')
elif param_parts[1] == 'dimensions':
dimension_aspects = list(fact.context.dimension_aspect_values)
if dimension_aspects:
msg_parts.append(', '.join('{%s.dim%d} = {%s.member%d}' % (param, i, param, i) for i, aspect in enumerate(dimension_aspects)))
for i, aspect in enumerate(dimension_aspects):
msg_params['%s.dim%d' % (param, i)] = xbrl.Error.Param(prefixed_name(aspect.dimension), tooltip=str(aspect.dimension.qname), deflocation=aspect.dimension, quotes=False)
msg_params['%s.member%d' % (param, i)] = xbrl.Error.Param(prefixed_name(aspect.value), tooltip=str(aspect.value.qname), deflocation=aspect.value, quotes=False)
else:
msg_parts.append('none')
elif param_parts[1] == 'unit':
if fact.unit:
numerator = list(fact.unit.numerator_measures)
denominator = list(fact.unit.denominator_measures)
msg_parts.append(' '.join('{%s.num%d:value}' % (param, i) for i, measure in enumerate(numerator)))
for i, measure in enumerate(numerator):
msg_params['%s.num%d' % (param, i)] = xbrl.Error.Param(measure.value.local_name, tooltip=str(measure.value), location=measure, quotes=False)
if len(denominator):
msg_parts.append(' / ')
msg_parts.append(' '.join('{%s.denom%d:value}' % (param, i) for i, measure in enumerate(denominator)))
for i, measure in enumerate(denominator):
msg_params['%s.denom%d' % (param, i)] = xbrl.Error.Param(measure.value.local_name, tooltip=str(measure.value), location=measure, quotes=False)
else:
msg_parts.append('none')
elif param_parts[1] == 'decimals':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(str(fact.decimals), location=fact.element.find_attribute('decimals'), quotes=False)
else:
raise KeyError('Unknown fact property '+param_parts[1])
elif isinstance(param_value, xbrl.taxonomy.Concept):
concept = param_value
if len(param_parts) == 1 or param_parts[1] == 'name':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(prefixed_name(concept), tooltip=str(concept.qname), deflocation=concept, quotes=False)
elif param_parts[1] == 'localName':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(concept.name, tooltip=str(concept.qname), deflocation=concept, quotes=False)
elif param_parts[1] == 'label':
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(label(concept), tooltip=str(concept.qname), deflocation=concept, quotes=False)
elif isinstance(param_value, RuleInfo):
ruleVersion = param_value
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.ExternalLinkParam(ruleVersion.url, title=ruleVersion.ruleVersion, tooltip=ruleVersion.releaseDate, quotes=False)
elif isinstance(param_value, xbrl.Error.Param):
msg_parts.append('{%s}' % param)
msg_params[param] = param_value
else:
msg_parts.append('{%s}' % param)
msg_params[param] = xbrl.Error.Param(str(param_value), quotes=False)
def create_error(msg, location, severity, children, **kargs):
"""Creates a xbrl.Error object from a message template msg and other arguments depending on the template."""
msg_parts = []
msg_params = {}
text_start = 0
while True:
param_start = msg.find('${', text_start)
if param_start == -1:
msg_parts.append(msg[text_start:])
break
if text_start < param_start:
msg_parts.append(msg[text_start:param_start])
param_start += 2
param_end = msg.find('}', param_start)
param = msg[param_start:param_end]
param_parts = param.split('.')
param = param.replace(':', '_')
param_values = kargs
if param_parts[0] not in param_values:
raise KeyError('Missing value for parameter '+param_parts[0])
if isinstance(param_values[param_parts[0]], dict):
param_subvalues = param_values[param_parts[0]]
if param_parts[1] not in param_subvalues:
raise KeyError('Missing value for parameter '+'.'.join(param_parts[:2]))
param_parts = param_parts[1:]
handle_param(msg_parts, msg_params, param_parts, param, param_subvalues[param_parts[0]])
elif isinstance(param_values[param_parts[0]], list):
param_index = 1
for param_value in param_values[param_parts[0]]:
if param_index > 1:
msg_parts.append(", ")
handle_param(msg_parts, msg_params, param_parts, "%s_%d" % (param, param_index), param_value)
param_index += 1
else:
handle_param(msg_parts, msg_params, param_parts, param, param_values[param_parts[0]])
text_start = param_end+1
return xbrl.Error.create(''.join(msg_parts), location=location, severity=severity, children=children, **msg_params)
def report_error(error_log, suppress_errors, rule_id, location=None, variation=None, **kargs):
"""Constructs and reports an error given an error code and additional arguments. This function creates xbrl.Error objects according to the associated message template and adds it to the error log."""
if rule_id in suppress_errors or rule_id.rsplit('.', 1)[0] in suppress_errors:
return
if rule_id in msg_templates:
msg = msg_templates[rule_id]
else:
# Remove test case number
msg = msg_templates[rule_id.rsplit('.', 1)[0]]
kargs['ruleVersion'] = RuleInfo(*msg['version'])
if variation is not None:
msg = msg['variations'][variation]
property_lines = []
for line in msg_template_properties[1:]:
if 'fact1' not in line or 'fact1' in kargs:
property_lines.append(create_error(line, None, xml.ErrorSeverity.OTHER, None, **kargs))
child_lines = []
content = msg.get('content', [])
if not isinstance(content, list):
content = [content]
hints = msg.get('hint', [])
if not isinstance(hints, list):
hints = [hints]
for submsg in content:
child_lines.append(create_error(submsg, None, xml.ErrorSeverity.OTHER, None, **kargs))
for hint in hints:
child_lines.append(create_error(hint, None, xml.ErrorSeverity.INFO, None, **kargs))
if 'fact1' in kargs:
location = kargs['fact1']
child_lines.append(create_error(msg_template_properties[0], None, xml.ErrorSeverity.OTHER, property_lines, **kargs))
elif property_lines:
child_lines.extend(property_lines)
msg_text = '[%s] %s' % (rule_id, msg['msg'])
error_log.report(create_error(msg_text, location, xml.ErrorSeverity.ERROR, child_lines, **kargs))
def decimal_comparison(fact1, fact2, cmp):
"""Rounds both numerical facts to the least accurate precision of both facts and calls the given cmp function with the rounded decimal values."""
# When comparing two numeric fact values in a rule, the comparison needs to take into account different decimals. Numbers are compared based on the lowest decimal value rounded per XBRL specification. For example, the number 532,000,000 with decimals of -6 is considered to be equivalent to 532,300,000 with a decimals value of -5. In this case the 532,300,000 is rounded to a million and then compared to the value of 532,000,000. (Note that XBRL specifies "round half to nearest even" so 532,500,000 with decimals -6 rounds to 532,000,000, and 532,500,001 rounds to 533,000,000.)
decimals = min(fact1.inferred_decimals, fact2.inferred_decimals)
if decimals == float('inf'):
return cmp(fact1.numeric_value, fact2.numeric_value)
return cmp(fact1.round_numeric_value(decimals), fact2.round_numeric_value(decimals), decimals)
def equal_within_tolerance(val1, val2, decimals=None):
"""Returns true if va1 is equal to val2 within given tolerance."""
# The rule allows a tolerance for rounding between the values tested of 2 based on the scale of the values. For example, if the values are reported in millions, the rounding tolerance would be $2 million.
if decimals is None:
return val1 == val2
return abs(val1-val2) <= decimal.Decimal(2).scaleb(-decimals)
def less_or_equal(val1, val2, decimals=None):
"""Returns true if va1 is less or equal than val2."""
return val1 <= val2
def dimension_value(fact, dim):
"""Returns the domain member for the given dimension aspect or None if fact does not have this dimension aspect."""
aspect_value = fact.dimension_aspect_value(dim)
return aspect_value.value if aspect_value else None
def reporting_period_ends(instance, dei_namespace):
"""Returns a dict of DocumentPeriodEndDate fact and end date tuples keyed by the legal entity domain member."""
reporting_period_end_for_legal_entity = {}
dim_LegalEntityAxis = instance.dts.resolve_concept(xml.QName('LegalEntityAxis', dei_namespace))
concept_DocumentPeriodEndDate = instance.dts.resolve_concept(xml.QName('DocumentPeriodEndDate', dei_namespace))
for fact in instance.facts.filter(concept_DocumentPeriodEndDate):
# Amendment: Use the period end date of the context and not the DocumentPeriodEndDate value!
end_date = fact.period_aspect_value.end
legal_entity = dimension_value(fact, dim_LegalEntityAxis)
if legal_entity not in reporting_period_end_for_legal_entity or reporting_period_end_for_legal_entity[legal_entity][1] < end_date:
reporting_period_end_for_legal_entity[legal_entity] = (fact, end_date)
return reporting_period_end_for_legal_entity
def textblock_facts(instance):
"""Returns an xbrl.FactSet object with facts whose concept's item type is or is derived from textBlockItemType."""
facts = xbrl.FactSet()
type_textBlockItemType = instance.dts.schema.resolve_type_definition(xml.QName('textBlockItemType', 'http://www.xbrl.org/dtr/type/non-numeric'))
if type_textBlockItemType:
is_textblock_cache = {}
for fact in instance.facts:
is_textblock = is_textblock_cache.get(fact.concept, None)
if is_textblock is None:
is_textblock = fact.concept.type_definition.is_derived_from(type_textBlockItemType)
is_textblock_cache[fact.concept] = is_textblock
if is_textblock:
facts.add(fact)
return facts
def facts_in_namespace(instance, namespace, ignored):
"""Returns an xbrl.FactSet object with facts whose concept is in the given namespace."""
facts = xbrl.FactSet()
for fact in instance.facts:
qname = fact.qname
if qname.namespace_name == namespace and qname.local_name not in ignored:
facts.add(fact)
return facts
def is_extension(namespace):
"""Returns True if the given namespace is not a standard US-GAAP or SEC taxonomy namespace."""
return re_standard_ns.match(namespace) is None
def _subtree_children_iterate(network, concept, children):
for rel in network.relationships_from(concept):
children.append(rel)
_subtree_children_iterate(network, rel.target, children)
def _subtree_children(network, concept):
children = []
_subtree_children_iterate(network, concept, children)
return children
def _get_dimension_values_iterate(network, concept, dims):
if isinstance(concept, xbrl.xdt.Dimension):
dims[concept] = list(_subtree_children(network, concept))
return
for rel in network.relationships_from(concept):
_get_dimension_values_iterate(network, rel.target, dims)
def _get_dimension_values(network):
dims = {}
for root in network.roots:
_get_dimension_values_iterate(network, root, dims)
return dims
def dqc_0001(instance, error_log, suppress_errors, namespaces):
"""DQC_0001 Axis with Inappropriate Members"""
handled = set()
for role in instance.dts.presentation_link_roles():
for dim, rels in _get_dimension_values(instance.dts.presentation_network(role)).items():
rule = dqc_0001_axis_members.get(dim.target_namespace, {}).get(dim.name)
if rule:
for rel in rels:
member = rel.target
if dim.default_member == member:
continue
ext = is_extension(member.target_namespace)
if ext:
valid = rule['extensions'] if isinstance(rule['extensions'], bool) else member.name in rule['extensions']
elif rule['disallowed']:
valid = member.name not in rule['disallowed']
else:
valid = rule['allowed'] if isinstance(rule['allowed'], bool) else member.name in rule['allowed']
if not valid and (dim, member) not in handled:
# Mimick Arelle's behaviour of only reporting the first occurrence of each type of error
handled.add((dim, member))
rule_id = 'DQC.US.0001.'+rule['id'].split('.')[-1]
cs = xbrl.ConstraintSet()
cs[dim] = member
facts = instance.facts.filter(cs)
for fact in facts:
report_error(error_log, suppress_errors, rule_id, rel.arc, 'ext' if ext else 'std', Rule={'axis': dim, 'member': member}, fact1=fact)
if len(facts) == 0:
report_error(error_log, suppress_errors, rule_id, rel.arc, 'nofact', Rule={'axis': dim,
'member': member}, group=xbrl.Error.Param(instance.dts.role_definition(role), tooltip=role))
def _dqc_0004(instance, error_log, suppress_errors, rule_id, concept1, concept2):
for fact1 in instance.facts.filter(concept1, allow_nil=False):
# All comparisons between fact values occur between facts of equivalent dimensions. A rule will produce a message for each occurrence of the compared facts in equivalent dimensions.
cs = xbrl.ConstraintSet(fact1)
cs[xbrl.Aspect.CONCEPT] = concept2
for fact2 in instance.facts.filter(cs, allow_nil=False, allow_additional_dimensions=False):
if not decimal_comparison(fact1, fact2, equal_within_tolerance):
report_error(error_log, suppress_errors, rule_id, fact1=fact1, fact2=fact2)
def dqc_0004_16(instance, error_log, suppress_errors, namespaces):
"""DQC_0004 Element Values Are Equal"""
us_gaap_ns = get_namespace(namespaces, 'us-gaap')
concept_Assets = instance.dts.resolve_concept(xml.QName('Assets', us_gaap_ns))
concept_LiabilitiesAndStockholdersEquity = instance.dts.resolve_concept(xml.QName('LiabilitiesAndStockholdersEquity', us_gaap_ns))
if concept_Assets and concept_LiabilitiesAndStockholdersEquity:
_dqc_0004(instance, error_log, suppress_errors, 'DQC.US.0004.16', concept_Assets, concept_LiabilitiesAndStockholdersEquity)
def dqc_0004(instance, error_log, suppress_errors, namespaces):
"""DQC_0004 Element Values Are Equal"""
dqc_0004_16(instance, error_log, suppress_errors, namespaces)
def _dqc_0005(instance, error_log, suppress_errors, rule_id, namespaces, facts, reporting_period_ends, cmp, additional_params={}):
dei_ns = get_namespace(namespaces, 'dei')
dim_LegalEntityAxis = instance.dts.resolve_concept(xml.QName('LegalEntityAxis', dei_ns))
concept_EntityCommonStockSharesOutstanding = instance.dts.resolve_concept(xml.QName('EntityCommonStockSharesOutstanding', dei_ns))
for fact1 in facts:
reporting_period_end = reporting_period_ends.get(dimension_value(fact1, dim_LegalEntityAxis))
if not reporting_period_end:
reporting_period_end = reporting_period_ends.get(dim_LegalEntityAxis.default_member)
if reporting_period_end and not cmp(period_end(fact1), reporting_period_end[1]):
params = {'fact1': fact1, 'dei:DocumentPeriodEndDate': reporting_period_end[0]}
params.update(additional_params)
report_error(error_log, suppress_errors, rule_id, **params)
def dqc_0005_17(instance, error_log, suppress_errors, namespaces, reporting_period_ends):
"""DQC_0005.17 Entity Common Stock, Shares Outstanding"""
dei_ns = get_namespace(namespaces, 'dei')
concept_EntityCommonStockSharesOutstanding = instance.dts.resolve_concept(xml.QName('EntityCommonStockSharesOutstanding', dei_ns))
facts = instance.facts.filter(concept_EntityCommonStockSharesOutstanding)
_dqc_0005(instance, error_log, suppress_errors, 'DQC.US.0005.17', namespaces, facts, reporting_period_ends, operator.ge)
def dqc_0005_48(instance, error_log, suppress_errors, namespaces, reporting_period_ends):
"""DQC_0005.48 Subsequent events"""
us_gaap_ns = get_namespace(namespaces, 'us-gaap')
dim_SubsequentEventTypeAxis = instance.dts.resolve_concept(xml.QName('SubsequentEventTypeAxis', us_gaap_ns))
if dim_SubsequentEventTypeAxis:
cs = xbrl.ConstraintSet()
cs[dim_SubsequentEventTypeAxis] = xbrl.ExplicitDimensionAspectValue(dim_SubsequentEventTypeAxis, None)
facts = instance.facts - instance.facts.filter(cs)
_dqc_0005(instance, error_log, suppress_errors, 'DQC.US.0005.48', namespaces, facts, reporting_period_ends, operator.gt, {'us-gaap:SubsequentEventTypeAxis': dim_SubsequentEventTypeAxis})
def dqc_0005_49(instance, error_log, suppress_errors, namespaces, reporting_period_ends):
"""DQC_0005.49 Subsequent events"""
us_gaap_ns = get_namespace(namespaces, 'us-gaap')
dim_StatementScenarioAxis = instance.dts.resolve_concept(xml.QName('StatementScenarioAxis', us_gaap_ns))
if dim_StatementScenarioAxis:
member_ScenarioForecastMember = instance.dts.resolve_concept(xml.QName('ScenarioForecastMember', us_gaap_ns))
cs = xbrl.ConstraintSet()
cs[dim_StatementScenarioAxis] = member_ScenarioForecastMember
facts = instance.facts.filter(cs)
_dqc_0005(instance, error_log, suppress_errors, 'DQC.US.0005.49', namespaces, facts, reporting_period_ends, operator.gt, {
'us-gaap:StatementScenarioAxis': dim_StatementScenarioAxis, 'us-gaap:ScenarioForecastMember': member_ScenarioForecastMember})
def dqc_0005(instance, error_log, suppress_errors, namespaces):
"""DQC_0005 Context Dates After Period End Date"""
dei_ns = get_namespace(namespaces, 'dei')
concept_DocumentType = instance.dts.resolve_concept(xml.QName('DocumentType', dei_ns))
facts_DocumentType = instance.facts.filter(concept_DocumentType)
if len(facts_DocumentType) != 1 or facts_DocumentType[0].normalized_value in ('S-1', 'S-3', 'S-4', 'S-6', 'S-8', 'S-11', 'S-20', 'S-1/A', 'S-3/A', 'S-4/A', 'S-6/A', 'S-8/A', 'S-11/A', 'S-20/A'):
# Appendix A
# Exclusions from the rule: S-1, S-3, S-4, S-6, S-8, S-11, S-20, S-1/A, S-3/A, S-4/A, S-6/A, S-8/A, S-11/A and S-20/A
return
reporting_periods = reporting_period_ends(instance, dei_ns)
dqc_0005_17(instance, error_log, suppress_errors, namespaces, reporting_periods)
dqc_0005_48(instance, error_log, suppress_errors, namespaces, reporting_periods)
dqc_0005_49(instance, error_log, suppress_errors, namespaces, reporting_periods)
def _dqc_0006(instance, error_log, suppress_errors, dim_LegalEntityAxis, period_focus_for_legal_entity, facts):
for fact1 in facts:
period_focus = period_focus_for_legal_entity.get(dimension_value(fact1, dim_LegalEntityAxis))
if not period_focus:
period_focus = period_focus_for_legal_entity.get(dim_LegalEntityAxis.default_member)
if period_focus and period_focus.normalized_value in dqc_0006_period_focus_durations:
duration = dqc_0006_period_focus_durations.get(period_focus.normalized_value)
if not duration[0] <= period_duration(fact1) <= duration[1]:
report_error(error_log, suppress_errors, 'DQC.US.0006.14', **{'fact1': fact1, 'dei:DocumentFiscalPeriodFocus': period_focus})
def dqc_0006(instance, error_log, suppress_errors, namespaces):
"""DQC_0006 DEI and Block Tag Date Contexts"""
dei_ns = get_namespace(namespaces, 'dei')
concept_DocumentType = instance.dts.resolve_concept(xml.QName('DocumentType', dei_ns))
facts_DocumentType = instance.facts.filter(concept_DocumentType)
if len(facts_DocumentType) != 1 or facts_DocumentType[0].normalized_value.endswith('T') or facts_DocumentType[0].normalized_value.endswith('T/A'):
# This rule also does not test any transition period filings, which are identified by the letter "T" in the form name.
# Transition period filings are submitted when a filer changes their fiscal year.
# Transition period filings may cover periods which are different from the general quarter or annual length.
return
dim_LegalEntityAxis = instance.dts.resolve_concept(xml.QName('LegalEntityAxis', dei_ns))
concept_DocumentFiscalPeriodFocus = instance.dts.resolve_concept(xml.QName('DocumentFiscalPeriodFocus', dei_ns))
period_focus_for_legal_entity = {}
for fact in instance.facts.filter(concept_DocumentFiscalPeriodFocus):
period_focus_for_legal_entity[dimension_value(fact, dim_LegalEntityAxis)] = fact
fact_names = [
'AmendmentDescription',
'AmendmentFlag',
'CurrentFiscalYearEndDate',
'DocumentPeriodEndDate',
'DocumentFiscalYearFocus',
'DocumentFiscalPeriodFocus',
'DocumentType',
'EntityRegistrantName',
'EntityCentralIndexKey',
'EntityFilerCategory',
]
for name in fact_names:
concept = instance.dts.resolve_concept(xml.QName(name, dei_ns))
if concept:
_dqc_0006(instance, error_log, suppress_errors, dim_LegalEntityAxis, period_focus_for_legal_entity, instance.facts.filter(concept))
_dqc_0006(instance, error_log, suppress_errors, dim_LegalEntityAxis, period_focus_for_legal_entity, textblock_facts(instance))
def dqc_0008(instance, error_log, suppress_errors, namespaces):
"""DQC_0008 Reversed Calculation"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
us_gaap_calc = dqc_0008_calculations.get(ns)
if us_gaap_calc:
for linkrole in dts.calculation_link_roles(arcrole_summation_item):
nw = dts.calculation_network(linkrole, arcrole_summation_item)
for rel in nw.relationships:
us_gaap_items = us_gaap_calc.get(rel.target_concept.name, [])
if rel.source_concept.name in us_gaap_items:
report_error(error_log, suppress_errors, 'DQC.US.0008.6819', extCalcTarget=rel.target_concept, extCalcSource=rel.source_concept)
def dqc_0009(instance, error_log, suppress_errors, namespaces):
"""DQC_0009 Element A must be less than or equal to Element B"""
for rule_id, prefix1, name1, prefix2, name2 in dqc_0009_facts:
concept1 = instance.dts.resolve_concept(xml.QName(name1, get_namespace(namespaces, prefix1)))
concept2 = instance.dts.resolve_concept(xml.QName(name2, get_namespace(namespaces, prefix2)))
if concept1 and concept2:
for fact1 in instance.facts.filter(concept1, allow_nil=False):
# All comparisons between fact values occur between facts of equivalent dimensions. A rule will produce a message for each occurrence of the compared facts in equivalent dimensions.
cs = xbrl.ConstraintSet(fact1)
cs[xbrl.Aspect.CONCEPT] = concept2
for fact2 in instance.facts.filter(cs, allow_nil=False, allow_additional_dimensions=False):
if not decimal_comparison(fact1, fact2, less_or_equal):
report_error(error_log, suppress_errors, rule_id, fact1=fact1, fact2=fact2)
def dqc_0011(instance, error_log, suppress_errors, namespaces):
"""DQC_0011 Dimensional Equivalents """
ns = get_namespace(namespaces, 'us-gaap')
for rule_id, lineItemName, dimItemName, axisName, memberName, weight in dqc_0011_facts:
lineConcept = instance.dts.resolve_concept(xml.QName(lineItemName, ns))
dimConcept = instance.dts.resolve_concept(xml.QName(dimItemName, ns))
axisConcept = instance.dts.resolve_concept(xml.QName(axisName, ns))
memberConcept = instance.dts.resolve_concept(xml.QName(memberName, ns))
if lineConcept is None or dimConcept is None or axisConcept is None or memberConcept is None:
continue
# select all facts with name lineItemName and no value for explicit dimension axisName
lineItemConstraintSet = xbrl.ConstraintSet()
lineItemConstraintSet.add(xbrl.ConceptAspectValue(lineConcept))
lineItemConstraintSet.add(xbrl.ExplicitDimensionAspectValue(axisConcept))
lineFacts = instance.facts.filter(lineItemConstraintSet, allow_nil=False)
for lineFact in lineFacts:
if not isinstance(lineFact, xbrl.Item):
continue
# select all facts with name dimItemName and explicit dimension axisName=memberName and all other aspect values equal to their respective value of lineFact
dimItemConstraintSet = lineFact.aspect_values
dimItemConstraintSet.add(xbrl.ConceptAspectValue(dimConcept))
dimItemConstraintSet.add(xbrl.ExplicitDimensionAspectValue(axisConcept, memberConcept))
dimFacts = instance.facts.filter(dimItemConstraintSet, allow_nil=False, allow_additional_dimensions=False)
lineValue = lineFact.effective_numeric_value
for dimFact in dimFacts:
if not isinstance(dimFact, xbrl.Item):
continue
dimValue = dimFact.effective_numeric_value
if dimValue * weight != lineValue:
report_error(error_log, suppress_errors, rule_id, fact1=lineFact, fact2=dimFact, weight=weight)
def _dqc_0013_precondition_check(instance, namespaces, context):
cs = xbrl.ConstraintSet(context)
us_gaap_ns = get_namespace(namespaces, 'us-gaap')
for name, summation in dqc_0013_preconditions.items():
cs[xbrl.Aspect.CONCEPT] = instance.dts.resolve_concept(xml.QName(name, us_gaap_ns))
precondition_facts = instance.facts.filter(cs, allow_nil=False, allow_additional_dimensions=False)
if precondition_facts:
val = 0
for name in summation:
cs[xbrl.Aspect.CONCEPT] = instance.dts.resolve_concept(xml.QName(name, us_gaap_ns))
for fact in instance.facts.filter(cs, allow_nil=False, allow_additional_dimensions=False):
val += fact.numeric_value
if val > 0:
return precondition_facts[0]
return None
def dqc_0013(instance, error_log, suppress_errors, namespaces):
"""DQC_0013 Negative Values with Dependence"""
cache = {}
for rule_id, prefix, name in dqc_0013_facts:
concept = instance.dts.resolve_concept(xml.QName(name, get_namespace(namespaces, prefix)))
if concept:
for fact1 in instance.facts.filter(concept, allow_nil=False):
if fact1.numeric_value < 0 and not _dqc_0015_member_exclusions_check(fact1):
if fact1.context in cache:
precondition_fact = cache[fact1.context]
else:
precondition_fact = _dqc_0013_precondition_check(instance, namespaces, fact1.context)
cache[fact1.context] = precondition_fact
if precondition_fact:
report_error(error_log, suppress_errors, rule_id, fact1=fact1, preconditionfact=precondition_fact)
def has_dimensions(context):
try:
next(context.dimension_aspect_values)
return True
except StopIteration:
return False
def dqc_0014(instance, error_log, suppress_errors, namespaces):
"""DQC_0014 Negative Values with No Dimensions"""
for rule_id, prefix, name in dqc_0014_facts:
concept = instance.dts.resolve_concept(xml.QName(name, get_namespace(namespaces, prefix)))
if concept:
for fact1 in instance.facts.filter(concept, allow_nil=False):
if fact1.numeric_value < 0 and not has_dimensions(fact1.context):
report_error(error_log, suppress_errors, rule_id, fact1=fact1)
def _dqc_0015_member_exclusions_test_contains(rule, dim_aspect):
name = dim_aspect.value.name if rule['dim'] == 'member' else dim_aspect.dimension.name
return re.search(rule['text'], name, re.IGNORECASE)
def _dqc_0015_member_exclusions_test_equals(rule, dim_aspect):
name = dim_aspect.value.name if rule['dim'] == 'member' else dim_aspect.dimension.name
return name == rule['name']
def _dqc_0015_member_exclusions_test(rule, dim_aspect):
if rule['test'] == 'contains':
return _dqc_0015_member_exclusions_test_contains(rule, dim_aspect)
elif rule['test'] == 'equals':
return _dqc_0015_member_exclusions_test_equals(rule, dim_aspect)
elif rule['test'] == 'and':
return _dqc_0015_member_exclusions_test(rule['arg1'], dim_aspect) and _dqc_0015_member_exclusions_test(rule['arg2'], dim_aspect)
elif rule['test'] == 'or':
return _dqc_0015_member_exclusions_test(rule['arg1'], dim_aspect) or _dqc_0015_member_exclusions_test(rule['arg2'], dim_aspect)
raise RuntimeError('Unknown member exclusion test '+rule['test'])
def _dqc_0015_member_exclusions_check(fact):
for dim_aspect in fact.context.dimension_aspect_values:
for rule in dqc_0015_member_exclusions:
if _dqc_0015_member_exclusions_test(rule, dim_aspect):
return True
return False
def dqc_0015(instance, error_log, suppress_errors, namespaces):
"""DQC_0015 Negative Values"""
for rule_id, prefix, name in dqc_0015_facts:
concept = instance.dts.resolve_concept(xml.QName(name, get_namespace(namespaces, prefix)))
if concept:
for fact1 in instance.facts.filter(concept, allow_nil=False):
if fact1.numeric_value < 0 and not _dqc_0015_member_exclusions_check(fact1):
report_error(error_log, suppress_errors, rule_id, fact1=fact1)
def _dqc_0018(error_log, suppress_errors, us_gaap, deprecated_concepts, network, rels):
for rel in rels:
if rel.target.target_namespace == us_gaap and rel.target.name in deprecated_concepts:
report_error(error_log, suppress_errors, 'DQC.US.0018.34', rel.arc, element=rel.target, deprecatedlabel=deprecated_concepts[rel.target.name])
_dqc_0018(error_log, suppress_errors, us_gaap, deprecated_concepts, network, network.relationships_from(rel.target))
def dqc_0018(instance, error_log, suppress_errors, namespaces):
"""DQC_0018 Deprecated Element is Used in the Filing"""
us_gaap = get_namespace(namespaces, 'us-gaap')
deprecated_concepts = dqc_0018_concepts.get(us_gaap)
if deprecated_concepts:
for role in instance.dts.presentation_link_roles():
network = instance.dts.presentation_network(role)
for root in network.roots:
if root.target_namespace == us_gaap and root.name in deprecated_concepts:
report_error(error_log, suppress_errors, 'DQC.US.0018.34', element=root, deprecatedlabel=deprecated_concepts[root.name])
_dqc_0018(error_log, suppress_errors, us_gaap, deprecated_concepts, network, network.relationships_from(root))
def dqc_0033(instance, error_log, suppress_errors, namespaces):
"""DQC_0033 Document Period End Date Context"""
dei_namespace = get_namespace(namespaces, 'dei')
dim_LegalEntityAxis = instance.dts.resolve_concept(xml.QName('LegalEntityAxis', dei_namespace))
reporting_periods = {}
concept_DocumentPeriodEndDate = instance.dts.resolve_concept(xml.QName('DocumentPeriodEndDate', dei_namespace))
for fact1 in instance.facts.filter(concept_DocumentPeriodEndDate):
end_date = datetime.datetime.combine(fact1.element.schema_actual_value.value, datetime.time()) + datetime.timedelta(days=1)
is_valid = abs((end_date - fact1.period_aspect_value.end).days) <= 3
legal_entity = dimension_value(fact1, dim_LegalEntityAxis)
reporting_periods[legal_entity] = (fact1, is_valid)
for fact1 in facts_in_namespace(instance, dei_namespace, ('EntityCommonStockSharesOutstanding', 'EntityPublicFloat', 'DocumentPeriodEndDate', 'EntityNumberOfEmployees', 'EntityListingDepositoryReceiptRatio')):
reporting_period = reporting_periods.get(dimension_value(fact1, dim_LegalEntityAxis))
if not reporting_period:
reporting_period = reporting_periods.get(dim_LegalEntityAxis.default_member)
if reporting_period and reporting_period[1] and period_end(fact1) != period_end(reporting_period[0]):
report_error(error_log, suppress_errors, 'DQC.US.0033.2', **{'fact1': fact1, 'dei:DocumentPeriodEndDate': reporting_period[0]})
def dqc_0036(instance, error_log, suppress_errors, namespaces):
"""DQC_0036 Document Period End Date Context / Fact Value Check"""
concept_DocumentPeriodEndDate = instance.dts.resolve_concept(xml.QName('DocumentPeriodEndDate', get_namespace(namespaces, 'dei')))
for fact1 in instance.facts.filter(concept_DocumentPeriodEndDate):
end_date = datetime.datetime.combine(fact1.element.schema_actual_value.value, datetime.time()) + datetime.timedelta(days=1)
if abs((end_date - fact1.period_aspect_value.end).days) > 3:
report_error(error_log, suppress_errors, 'DQC.US.0036.1', fact1=fact1)
def dqc_0041(instance, error_log, suppress_errors, namespaces):
"""DQC_0041 Axis with a Default Member that Differs from the US GAAP Taxonomy"""
for dim in instance.dts.dimensions:
if dim.is_explicit():
default_member = dim.default_member
if not default_member:
continue
usgaap_default_member = dqc_0041_default_members.get(dim.target_namespace, {}).get(dim.name)
if usgaap_default_member and default_member.name != usgaap_default_member:
report_error(error_log, suppress_errors, 'DQC.US.0041.73', axis=dim, axis_default=instance.dts.resolve_concept(
xml.QName(usgaap_default_member, dim.target_namespace)), default=default_member)
def _dqc_0043_recurse(instance, rule_id, error_log, suppress_errors, exclude, ncf, ocf, nw, child, effective_weight):
if child.name not in exclude:
if child.balance == xbrl.taxonomy.Balance.DEBIT:
if effective_weight < 0:
report_error(error_log, suppress_errors, rule_id, child, 'debit', fact1=child, NetCashProvidedByUsedInOperatingActivities=ncf,
NetCashProvidedByUsedInOperatingActivitiesContinuingOperations=ocf)
elif child.balance == xbrl.taxonomy.Balance.CREDIT:
if effective_weight > 0:
report_error(error_log, suppress_errors, rule_id, child, 'credit', fact1=child, NetCashProvidedByUsedInOperatingActivities=ncf,
NetCashProvidedByUsedInOperatingActivitiesContinuingOperations=ocf)
for rel in nw.relationships_from(child):
_dqc_0043_recurse(instance, rule_id, error_log, suppress_errors, exclude, ncf, ocf, nw, rel.target_concept, effective_weight*rel.weight)
def dqc_0043(instance, error_log, suppress_errors, namespaces):
"""DQC_0043 Incorrect Calculation Weights in Operating Cash Flows"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
ncf = dts.resolve_concept(xml.QName('NetCashProvidedByUsedInOperatingActivities', ns))
exclude = set(dqc_0043_data['exclude'])
for rule_id, ocf_name in dqc_0043_data['rules']:
ocf = dts.resolve_concept(xml.QName(ocf_name, ns))
if not ocf:
continue
ocf_found = False
for linkrole in dts.calculation_link_roles(arcrole_summation_item):
nw = dts.calculation_network(linkrole, arcrole_summation_item)
for rel in nw.relationships_from(ocf):
ocf_found = True
_dqc_0043_recurse(instance, rule_id, error_log, suppress_errors, exclude, ncf, ocf, nw, rel.target_concept, rel.weight)
# stop at first operating cash flow element used in calculation linkbase
if ocf_found:
break
def traverse_calc_multi(instance, error_log, suppress_errors, parents, level, function, *args):
dts = instance.dts
for linkrole in dts.calculation_link_roles(arcrole_summation_item):
nw = dts.calculation_network(linkrole, arcrole_summation_item)
visited = set()
for parent in parents:
if not parent:
continue
todo = [(parent, level)]
while todo:
concept, cur_level = todo.pop()
if concept in visited:
continue
visited.add(concept)
for rel in nw.relationships_from(concept):
if function(instance, error_log, suppress_errors, rel, parent, *args):
cur_level = cur_level - 1
if cur_level > 0:
todo.append((rel.target_concept, cur_level))
def traverse_calc_single(instance, error_log, suppress_errors, parent, level, function, *args):
return traverse_calc_multi(instance, error_log, suppress_errors, [parent], level, function, *args)
def _dqc_0044_check_item(instance, error_log, suppress_errors, rel, parent, rule_id, accrual_concepts):
item = rel.target_concept
if item.name in accrual_concepts:
# [CHECK] should we report nil valued facts?
for fact in instance.facts.filter(item, allow_nil=True):
report_error(error_log, suppress_errors, rule_id, fact, fact1=fact, sum=parent)
return True # continue traversing the subtree of item
def dqc_0044(instance, error_log, suppress_errors, namespaces):
"""DQC_0044 Accrual Items used in Investing/Financing Cash flow Reconciliation"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
dqc_0044_concepts = dqc_0044_data['concepts']
dqc_0044_rules = dqc_0044_data['rules']
if ns in dqc_0044_concepts:
accrual_concepts = set(dqc_0044_concepts[ns])
for rule_id, parent_name in dqc_0044_rules:
parent_concept = dts.resolve_concept(xml.QName(parent_name, ns))
if not parent_concept:
continue
traverse_calc_single(instance, error_log, suppress_errors, parent_concept, -1, _dqc_0044_check_item, rule_id, accrual_concepts)
def _dqc_0045_check_item(instance, error_log, suppress_errors, rel, extension_parent, rule_id, us_gaap_parent, us_gaap_items):
child = rel.target_concept
if child.name in us_gaap_items:
# [CHECK] should we report facts?
# for fact in instance.facts.filter(child, allow_nil=False):
# report_error(error_log, suppress_errors, rule_id, fact, Result_item=fact, item2=extension_parent, item1=us_gaap_parent)
report_error(error_log, suppress_errors, rule_id, rel.arc, Result_item=child, item2=extension_parent, item1=us_gaap_parent)
return True # continue traversing the subtree of item
def dqc_0045(instance, error_log, suppress_errors, namespaces):
"""DQC_0045 Movement of Concepts between Calculation Trees"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
if ns in dqc_0045_data:
for rule_id, rule_data in dqc_0045_data[ns].items():
us_gaap_name = rule_data['us-gaap']
us_gaap_concept = dts.resolve_concept(xml.QName(us_gaap_name, ns))
if not us_gaap_concept:
continue
extension_name = rule_data['extension']
extension_concept = dts.resolve_concept(xml.QName(extension_name, ns))
if not extension_concept:
continue
items = set(rule_data['items'])
if not items:
continue
traverse_calc_single(instance, error_log, suppress_errors, extension_concept, -1, _dqc_0045_check_item, rule_id, us_gaap_concept, items)
def _dqc_0046_check_item(instance, error_log, suppress_errors, rel, parent, rules):
child = rel.target_concept
if child.name in rules:
report_error(error_log, suppress_errors, rules[child.name], child, parentElement=parent, childElement=child)
return True # continue traversing the subtree of item
def dqc_0046(instance, error_log, suppress_errors, namespaces):
"""DQC_0046 Inappropriate Calculation Descendent"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
for parent_name, level, rules in dqc_0046_data:
parent_concept = dts.resolve_concept(xml.QName(parent_name, ns))
if not parent_concept:
continue
traverse_calc_single(instance, error_log, suppress_errors, parent_concept, level, _dqc_0046_check_item, rules)
def _dqc_0047_check_item(instance, error_log, suppress_errors, rel, parent):
child = rel.target_concept
if not child.balance and not is_extension(child.target_namespace) and child.name != 'NetCashProvidedByUsedInOperatingActivitiesContinuingOperations':
report_error(error_log, suppress_errors, "DQC.US.0047.7481", child, parentElement=parent, childElement=child)
return True # continue traversing the subtree of item
def dqc_0047(instance, error_log, suppress_errors, namespaces):
"""DQC_0047 Calculation Descendants with No Balance Type """
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
parent_concepts = [dts.resolve_concept(xml.QName(_, ns)) for _ in ['NetCashProvidedByUsedInOperatingActivities', 'NetCashProvidedByUsedInOperatingActivitiesContinuingOperations']]
traverse_calc_multi(instance, error_log, suppress_errors, parent_concepts, -1, _dqc_0047_check_item)
def _get_cashflow_linkroles(dts, ns):
"""Returns the linkroles of cash flow presentation trees."""
linkroles = set()
statementOfCashFlowsAbstract = dts.resolve_concept(xml.QName('StatementOfCashFlowsAbstract', ns))
for linkrole in dts.presentation_link_roles(arcrole_parent_child):
linkrole_lower = linkrole.lower()
roleDef = dts.role_definition(linkrole)
if roleDef and roleDef.find('- Statement') != -1 and linkrole_lower.find('parenthetical') == -1:
nw = dts.presentation_network(linkrole, arcrole_parent_child)
if linkrole_lower.find('cashflow') != -1 or (statementOfCashFlowsAbstract is not None and any(nw.relationships_from(statementOfCashFlowsAbstract))):
linkroles.add(linkrole)
return linkroles
def dqc_0048(instance, error_log, suppress_errors, namespaces):
"""DQC_0048 Required Calculation Parent Element in the Cash Flow Statement"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
presentation_linkroles = _get_cashflow_linkroles(dts, ns)
calculation_linkroles = []
calculation_roots = set()
for linkrole in presentation_linkroles:
nw = dts.calculation_network(linkrole, arcrole_summation_item)
if nw:
calculation_linkroles.append(linkrole)
calculation_roots.update(nw.roots)
if calculation_linkroles:
valid = False
for roots in dqc_0048_roots:
root_concepts = set(dts.resolve_concept(xml.QName(_, ns)) for _ in roots)
if root_concepts.issubset(calculation_roots):
valid = True
break
if not valid:
report_error(error_log, suppress_errors, "DQC.US.0048.7482", None, networkRole=sorted(calculation_linkroles), elementNames=sorted(calculation_roots))
def _dqc_0049_root_filter(concept):
return (isinstance(concept, xbrl.taxonomy.Item) and
concept.period_type == xbrl.taxonomy.PeriodType.DURATION and
not is_extension(concept.target_namespace) and
concept.name != 'NoncashOrPartNoncashAcquisitionNetNonmonetaryAssetsAcquiredLiabilitiesAssumed1')
def dqc_0049(instance, error_log, suppress_errors, namespaces):
"""DQC_0049 Single Calculation Tree for Change in Cash Flows"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
presentation_linkroles = _get_cashflow_linkroles(dts, ns)
requiredRoots = set([dts.resolve_concept(xml.QName(_, ns)) for _ in dqc_0049_roots])
for linkrole in presentation_linkroles:
nw = dts.calculation_network(linkrole, arcrole_summation_item)
if nw:
roots = set(filter(_dqc_0049_root_filter, nw.roots))
if len(roots) > 1 and not roots.isdisjoint(requiredRoots):
report_error(error_log, suppress_errors, "DQC.US.0049.7483", None, networkRole=linkrole, elementNames=sorted(roots))
def _dqc_0051_check_instance(instance, parent, child):
"""Checks if both parent and child are present in the instance with the same aspect values."""
child_facts = instance.facts.filter(child, allow_nil=False)
for child_fact in child_facts:
constraintSet = xbrl.ConstraintSet(child_fact)
constraintSet.add(xbrl.ConceptAspectValue(parent))
if len(instance.facts.filter(constraintSet, allow_nil=False)) > 0:
return True
return False
def _dqc_0051_check_item(instance, error_log, suppress_errors, rel, parent, tax_items):
child = rel.target_concept
if child.target_namespace == parent.target_namespace and child.name in tax_items:
if _dqc_0051_check_instance(instance, parent, child):
report_error(error_log, suppress_errors, dqc_0051_data['rules'][parent.name], rel.arc, element=parent, childElement=child, networkRole=rel.role)
return False # only report the first tax child item
return True # continue traversing the subtree of item
def dqc_0051(instance, error_log, suppress_errors, namespaces):
"""DQC_0051 Before Tax Items"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
income_tax_items = set(dqc_0051_data['tax_items'])
parent_concepts = [dts.resolve_concept(xml.QName(_, ns)) for _ in dqc_0051_data['rules'].keys()]
traverse_calc_multi(instance, error_log, suppress_errors, parent_concepts, -1, _dqc_0051_check_item, income_tax_items)
def dqc_0052(instance, error_log, suppress_errors, namespaces):
"""DQC_0052 Member Values"""
dts = instance.dts
ns, year = get_namespace_and_year(namespaces, 'us-gaap')
if int(year) < 2017:
return
for rule, dim_name, member_name in dqc_0052_data:
dimension = dts.resolve_concept(xml.QName(dim_name, ns))
member = dts.resolve_concept(xml.QName(member_name, ns))
if dimension is not None and member is not None:
constraintSet = xbrl.ConstraintSet()
constraintSet.add(xbrl.ExplicitDimensionAspectValue(dimension, member))
for fact in instance.facts.filter(constraintSet, allow_nil=False):
report_error(error_log, suppress_errors, rule, fact, fact1=fact, axis=dimension, member=member)
def dqc_0053(instance, error_log, suppress_errors, namespaces):
"""DQC_0053 Excluded Members from an Axis"""
dts = instance.dts
ns, year = get_namespace_and_year(namespaces, 'us-gaap')
if int(year) < 2017:
return
for rule, dim_name, member_name in dqc_0053_data:
dimension = dts.resolve_concept(xml.QName(dim_name, ns))
member = dts.resolve_concept(xml.QName(member_name, ns))
if dimension is not None and member is not None:
drs = dts.dimensional_relationship_set()
for linkrole in drs.link_roles():
todo = list(drs.dimension_domain_relationships(dimension, linkrole))
while todo:
rel = todo.pop()
if rel.target_concept == member:
constraintSet = xbrl.ConstraintSet()
constraintSet.add(xbrl.ExplicitDimensionAspectValue(dimension, member))
errorReported = False
for fact in instance.facts.filter(constraintSet, allow_nil=False):
report_error(error_log, suppress_errors, rule, fact, 'fact', fact1=fact, axis=dimension, member=member)
errorReported = True
if not errorReported:
report_error(error_log, suppress_errors, rule, rel.arc, 'nofact', member=member, axis=dimension, group=dts.role_definition(linkrole))
else:
todo.extend(drs.consecutive_relationships(rel))
def dqc_0054(instance, error_log, suppress_errors, namespaces):
"""DQC_0054 Excluded Dimensions from a Table"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
for rule, hc_name, dim_name in dqc_0054_data:
hc = dts.resolve_concept(xml.QName(hc_name, ns))
dimension = dts.resolve_concept(xml.QName(dim_name, ns))
if dimension is not None and hc is not None:
drs = dts.dimensional_relationship_set()
for linkrole in drs.link_roles():
for rel in drs.hypercube_dimension_relationships(hc, linkrole):
if rel.target_concept == dimension:
report_error(error_log, suppress_errors, rule, rel.arc, axis=dimension, table=hc)
break
def dqc_0055(instance, error_log, suppress_errors, namespaces):
"""DQC_0055 Required Member on An Axis"""
dts = instance.dts
ns, year = get_namespace_and_year(namespaces, 'us-gaap')
if int(year) < 2017:
return
for rule, axis_name, domain_name, member_names in dqc_0055_data:
axis = dts.resolve_concept(xml.QName(axis_name, ns))
domain = dts.resolve_concept(xml.QName(domain_name, ns))
members = set(dts.resolve_concept(xml.QName(_, ns)) for _ in member_names)
if axis is not None and domain is not None and all(_ is not None for _ in members):
for linkrole in dts.presentation_link_roles():
rels = _subtree_children(dts.presentation_network(linkrole, arcrole_parent_child), axis)
children = set(_.target_concept for _ in rels)
if children and children.isdisjoint(members):
if domain not in children or len(children) > 1:
report_error(error_log, suppress_errors, rule, rels[0].from_locator, axis=axis, members=sorted(members), networkRole=linkrole)
def dqc_0057(instance, error_log, suppress_errors, namespaces):
"""DQC_0057 Cash Flow Opening and Closing Balances"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
expectedBalanceElements = set(filter(lambda x: x is not None, [dts.resolve_concept(xml.QName(_, ns)) for _ in dqc_0057_data]))
for linkrole in _get_cashflow_linkroles(dts, ns):
nw = dts.presentation_network(linkrole, arcrole_parent_child)
balanceElements = set()
for root in nw.roots:
for rel in _subtree_children(nw, root):
if rel.preferred_label in opening_label_roles or rel.preferred_label in closing_label_roles:
location = rel.arc
balanceElements.add(rel.target_concept)
if expectedBalanceElements.isdisjoint(balanceElements):
report_error(error_log, suppress_errors, 'DQC.US.0057.7494', None, elementNames=dqc_0057_data, balanceElements=sorted(balanceElements), networkRole=linkrole)
def dqc_0060(instance, error_log, suppress_errors, namespaces):
"""DQC_0060 Element Dependence for Specific Elements"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
for rule, reported_name, dependent_names, general_name in dqc_0060_data:
reported_concept = dts.resolve_concept(xml.QName(reported_name, ns))
general_concept = dts.resolve_concept(xml.QName(general_name, ns))
dependent_concepts = [dts.resolve_concept(xml.QName(_, ns)) for _ in dependent_names]
if reported_concept is None or general_concept is None or any(filter(lambda _: _ is None, dependent_concepts)):
continue
reported_concept_constraint_set = xbrl.ConstraintSet()
reported_concept_constraint_set.add(xbrl.ConceptAspectValue(reported_concept))
for fact in instance.facts.filter(reported_concept_constraint_set, allow_nil=True, allow_additional_dimensions=False):
constraintSet = xbrl.ConstraintSet(fact)
dependent_fact_found = False
for dependent_concept in dependent_concepts:
constraintSet.add(xbrl.ConceptAspectValue(dependent_concept))
if len(instance.facts.filter(constraintSet, allow_nil=True, allow_additional_dimensions=False)) > 0:
dependent_fact_found = True
break
if not dependent_fact_found:
report_error(error_log, suppress_errors, rule, fact, fact1=fact, DependentElements=dependent_concepts, GeneralElement=general_concept)
def _dqc_0061_is_ancestor_of(nw, ancestor, descendant):
"""Returns True if ancestor is an ancestor of descendant within relationship network nw"""
if ancestor == descendant:
return True
for rel in nw.relationships_to(descendant):
if _dqc_0061_is_ancestor_of(nw, ancestor, rel.source_concept):
return True
return False
def dqc_0061(instance, error_log, suppress_errors, namespaces):
"""DQC_0061 Cash Flow Continuing Operations Elements not Used"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
for rule, parent_name, child_name in dqc_0061_data:
parent_concept = dts.resolve_concept(xml.QName(parent_name, ns))
child_concept = dts.resolve_concept(xml.QName(child_name, ns))
if parent_concept is None or child_concept is None:
continue
for linkrole in dts.calculation_link_roles(arcrole_summation_item):
nw = dts.calculation_network(linkrole, arcrole_summation_item)
for root_concept in [_.source_concept for _ in nw.relationships_to(parent_concept)]:
if _dqc_0061_is_ancestor_of(nw, root_concept, child_concept) and not _dqc_0061_is_ancestor_of(nw, parent_concept, child_concept):
report_error(error_log, suppress_errors, rule, None, ParentElement=parent_concept, ChildElement=child_concept, changeInCashElement=root_concept)
def dqc_0062(instance, error_log, suppress_errors, namespaces):
"""DQC_0062 No Fact Value for Change in Cash"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
cashflow_linkroles = _get_cashflow_linkroles(dts, ns)
if cashflow_linkroles:
for fact_name in dqc_0062_data:
concept = dts.resolve_concept(xml.QName(fact_name, ns))
if concept is None:
continue
constraintSet = xbrl.ConstraintSet()
constraintSet.add(xbrl.ConceptAspectValue(concept))
if len(instance.facts.filter(constraintSet, allow_nil=False, allow_additional_dimensions=False)) > 0:
return # at least one fact found => no error
report_error(error_log, suppress_errors, "DQC.US.0062.7501")
def dqc_0065(instance, error_log, suppress_errors, namespaces):
"""DQC_0065 Interest Paid Net (Operating) Not on Cash Flow"""
dts = instance.dts
ns = get_namespace(namespaces, 'us-gaap')
cashflow_concept = dts.resolve_concept(xml.QName('SupplementalCashFlowInformationAbstract', ns))
interestPaid_concept = dts.resolve_concept(xml.QName('InterestPaid', ns))
interestPaidNet_concept = dts.resolve_concept(xml.QName('InterestPaidNet', ns))
interestPaidCapitalized_concept = dts.resolve_concept(xml.QName('InterestPaidCapitalized', ns))
if cashflow_concept is None or interestPaid_concept is None:
return
for linkrole in dts.presentation_link_roles(arcrole_parent_child):
nw = dts.presentation_network(linkrole, arcrole_parent_child)
children = set(_.target_concept for _ in _subtree_children(dts.presentation_network(linkrole, arcrole_parent_child), cashflow_concept))
if interestPaid_concept in children and interestPaidNet_concept not in children and interestPaidCapitalized_concept not in children:
constraintSet = xbrl.ConstraintSet()
constraintSet.add(xbrl.ConceptAspectValue(interestPaid_concept))
for interestPaid_fact in instance.facts.filter(constraintSet, allow_nil=False, allow_additional_dimensions=False):
if interestPaid_fact.effective_numeric_value != 0:
report_error(error_log, suppress_errors, "DQC.US.0065.7502", interestPaid_fact, fact1=interestPaid_fact)
break
def standard_namespaces(dts):
"""Returns a dict of prefix and (namespace,year) key/value pairs for standard namespaces."""
namespaces = {}
for taxonomy in dts.taxonomy_schemas:
if taxonomy.target_namespace:
for prefix, re in re_namespaces.items():
m = re.fullmatch(taxonomy.target_namespace)
if m:
namespaces[prefix] = (taxonomy.target_namespace, m.group(1))
return namespaces
def parse_suppress_errors(params):
"""Returns a list with suppressed error codes."""
val = params.get('suppressErrors', None)
if not val:
return []
return val.split('|')
def validate(instance, error_log, **params):
"""Performs additional validation of xBRL instance according to DQC rules."""
if instance:
error_log.report(xbrl.Error.create(
'Verified {DQC} with Altova RaptorXML+XBRL',
severity=xml.ErrorSeverity.INFO,
location=instance,
DQC=xbrl.Error.ExternalLinkParam('http://xbrl.us/data-quality/rules-guidance/', title='DQC validation rules', quotes=False)
))
suppress_errors = set(code.strip() for code in parse_suppress_errors(params))
namespaces = standard_namespaces(instance.dts)
if 'dei' in namespaces:
try:
dqc_0001(instance, error_log, suppress_errors, namespaces)
dqc_0004(instance, error_log, suppress_errors, namespaces)
dqc_0005(instance, error_log, suppress_errors, namespaces)
dqc_0006(instance, error_log, suppress_errors, namespaces)
dqc_0008(instance, error_log, suppress_errors, namespaces)
dqc_0009(instance, error_log, suppress_errors, namespaces)
dqc_0011(instance, error_log, suppress_errors, namespaces)
dqc_0013(instance, error_log, suppress_errors, namespaces)
dqc_0014(instance, error_log, suppress_errors, namespaces)
dqc_0015(instance, error_log, suppress_errors, namespaces)
dqc_0018(instance, error_log, suppress_errors, namespaces)
dqc_0033(instance, error_log, suppress_errors, namespaces)
dqc_0036(instance, error_log, suppress_errors, namespaces)
dqc_0041(instance, error_log, suppress_errors, namespaces)
# dqc v5 checks
dqc_0043(instance, error_log, suppress_errors, namespaces)
dqc_0044(instance, error_log, suppress_errors, namespaces)
dqc_0045(instance, error_log, suppress_errors, namespaces)
dqc_0046(instance, error_log, suppress_errors, namespaces)
dqc_0047(instance, error_log, suppress_errors, namespaces)
dqc_0048(instance, error_log, suppress_errors, namespaces)
dqc_0049(instance, error_log, suppress_errors, namespaces)
dqc_0051(instance, error_log, suppress_errors, namespaces)
dqc_0052(instance, error_log, suppress_errors, namespaces)
dqc_0053(instance, error_log, suppress_errors, namespaces)
dqc_0054(instance, error_log, suppress_errors, namespaces)
dqc_0055(instance, error_log, suppress_errors, namespaces)
dqc_0057(instance, error_log, suppress_errors, namespaces)
dqc_0060(instance, error_log, suppress_errors, namespaces)
dqc_0061(instance, error_log, suppress_errors, namespaces)
dqc_0062(instance, error_log, suppress_errors, namespaces)
dqc_0065(instance, error_log, suppress_errors, namespaces)
except RuntimeError as e:
if str(e) != "Error limit exceeded":
raise
# Main script callback entry points. These functions will be called by RaptorXML after the XBRL instance validation job has finished.
def on_xbrl_finished_dts(job, dts):
pass
def on_xbrl_finished(job, instance):
# instance object will be None if XBRL 2.1 validation was not successful.
validate(instance, job.error_log, **job.script_params)
| {
"content_hash": "48c622f0f0fa093aac5bdc4f669ae559",
"timestamp": "",
"source": "github",
"line_count": 1409,
"max_line_length": 585,
"avg_line_length": 52.2278211497516,
"alnum_prop": 0.6645558439440677,
"repo_name": "altova/sec-edgar-tools",
"id": "6706b735da2f47bca2d76f8233f4dec5272bc2f8",
"size": "74169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dqc_validation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "384368"
}
],
"symlink_target": ""
} |
from ez_setup import use_setuptools
use_setuptools()
import os
from setuptools import setup, find_packages
import djangologdb
def read_file(name):
return open(os.path.join(os.path.dirname(__file__), name)).read()
readme = read_file('README.rst')
changes = read_file('CHANGES.rst')
setup(
name='django-logdb',
version='.'.join(map(str, djangologdb.__version__)),
description='Django-logdb enables you to log entries to a database and aggregate them periodically.',
long_description='\n\n'.join([readme, changes]),
author='Joeri Bekker',
author_email='joeri@maykinmedia.nl',
license='MIT',
platforms=['any'],
url='http://github.com/joeribekker/django-logdb',
#install_requires=[
# 'Django>=1.1',
#],
include_package_data=True,
packages=['djangologdb'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
],
zip_safe=False,
)
| {
"content_hash": "22bf03576d0697bb1cc096db2b4d7f85",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 105,
"avg_line_length": 30.634146341463413,
"alnum_prop": 0.6226114649681529,
"repo_name": "joeribekker/django-logdb",
"id": "e9ef03b46df7e2d7480983fc30e5985e46af9df4",
"size": "1256",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "56511"
}
],
"symlink_target": ""
} |
"""Tests for t2t_trainer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.bin import t2t_trainer
from tensor2tensor.utils import trainer_lib_test
import tensorflow.compat.v1 as tf
FLAGS = tf.flags.FLAGS
class TrainerTest(tf.test.TestCase):
@classmethod
def setUpClass(cls):
trainer_lib_test.TrainerLibTest.setUpClass()
def testTrain(self):
FLAGS.problem = "tiny_algo"
FLAGS.model = "transformer"
FLAGS.hparams_set = "transformer_tiny"
FLAGS.train_steps = 1
FLAGS.eval_steps = 1
FLAGS.output_dir = tf.test.get_temp_dir()
FLAGS.data_dir = tf.test.get_temp_dir()
t2t_trainer.main(None)
if __name__ == "__main__":
tf.test.main()
| {
"content_hash": "c91b766e46c69b89723be8bc421da348",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 48,
"avg_line_length": 23.78125,
"alnum_prop": 0.7003942181340341,
"repo_name": "tensorflow/tensor2tensor",
"id": "64cc7bc2c2c6e5358e70fb8c1f0527b2b93ce749",
"size": "1367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensor2tensor/bin/t2t_trainer_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "32015"
},
{
"name": "HTML",
"bytes": "34684"
},
{
"name": "JavaScript",
"bytes": "78408"
},
{
"name": "Jupyter Notebook",
"bytes": "2859453"
},
{
"name": "Python",
"bytes": "5109255"
},
{
"name": "Shell",
"bytes": "11941"
}
],
"symlink_target": ""
} |
"""Test environment for client library tests.
This module has functions for creating keyspaces, tablets for the client
library test.
"""
#!/usr/bin/env python
# coding: utf-8
import hashlib
import random
import struct
import threading
import time
import traceback
import unittest
import environment
import tablet
import utils
from clientlib_tests import topo_schema
from clientlib_tests import db_class_unsharded
from clientlib_tests import db_class_sharded
from clientlib_tests import db_class_lookup
from vtdb import database_context
from vtdb import db_object
from vtdb import keyrange
from vtdb import keyrange_constants
from vtdb import keyspace
from vtdb import dbexceptions
from vtdb import shard_constants
from vtdb import vtdb_logger
from vtdb import vtgatev2
from vtdb import vtgate_cursor
from zk import zkocc
conn_class = vtgatev2
__tablets = None
shard_names = ['-80', '80-']
shard_kid_map = {'-80': [527875958493693904, 626750931627689502,
345387386794260318, 332484755310826578,
1842642426274125671, 1326307661227634652,
1761124146422844620, 1661669973250483744,
3361397649937244239, 2444880764308344533],
'80-': [9767889778372766922, 9742070682920810358,
10296850775085416642, 9537430901666854108,
10440455099304929791, 11454183276974683945,
11185910247776122031, 10460396697869122981,
13379616110062597001, 12826553979133932576],
}
pack_kid = struct.Struct('!Q').pack
def setUpModule():
try:
environment.topo_server().setup()
setup_topology()
# start mysql instance external to the test
global __tablets
setup_procs = []
for tablet in __tablets:
setup_procs.append(tablet.init_mysql())
utils.wait_procs(setup_procs)
create_db()
start_tablets()
utils.VtGate().start()
except:
tearDownModule()
raise
def tearDownModule():
global __tablets
if utils.options.skip_teardown:
return
if __tablets is not None:
tablet.kill_tablets(__tablets)
teardown_procs = []
for t in __tablets:
teardown_procs.append(t.teardown_mysql())
utils.wait_procs(teardown_procs, raise_on_error=False)
environment.topo_server().teardown()
utils.kill_sub_processes()
utils.remove_tmp_files()
if __tablets is not None:
for t in __tablets:
t.remove_tree()
def setup_topology():
global __tablets
if __tablets is None:
__tablets = []
keyspaces = topo_schema.keyspaces
for ks in keyspaces:
ks_name = ks[0]
ks_type = ks[1]
utils.run_vtctl(['CreateKeyspace', ks_name])
if ks_type == shard_constants.UNSHARDED:
shard_master = tablet.Tablet()
shard_replica = tablet.Tablet()
shard_master.init_tablet('master', keyspace=ks_name, shard='0')
__tablets.append(shard_master)
shard_replica.init_tablet('replica', keyspace=ks_name, shard='0')
__tablets.append(shard_replica)
elif ks_type == shard_constants.RANGE_SHARDED:
utils.run_vtctl(['SetKeyspaceShardingInfo', '-force', ks_name,
'keyspace_id', 'uint64'])
for shard_name in shard_names:
shard_master = tablet.Tablet()
shard_replica = tablet.Tablet()
shard_master.init_tablet('master', keyspace=ks_name, shard=shard_name)
__tablets.append(shard_master)
shard_replica.init_tablet('replica', keyspace=ks_name, shard=shard_name)
__tablets.append(shard_replica)
utils.run_vtctl(['RebuildKeyspaceGraph', ks_name], auto_log=True)
def create_db():
global __tablets
for t in __tablets:
t.create_db(t.dbname)
ks_name = t.keyspace
for table_tuple in topo_schema.keyspace_table_map[ks_name]:
t.mquery(t.dbname, table_tuple[1])
def start_tablets():
global __tablets
# start tablets
for t in __tablets:
t.start_vttablet(wait_for_state=None)
# wait for them to come in serving state
for t in __tablets:
t.wait_for_vttablet_state('SERVING')
# InitShardMaster for master tablets
for t in __tablets:
if t.tablet_type == 'master':
utils.run_vtctl(['InitShardMaster', t.keyspace+'/'+t.shard,
t.tablet_alias], auto_log=True)
for ks in topo_schema.keyspaces:
ks_name = ks[0]
ks_type = ks[1]
utils.run_vtctl(['RebuildKeyspaceGraph', ks_name],
auto_log=True)
if ks_type == shard_constants.RANGE_SHARDED:
utils.check_srv_keyspace('test_nj', ks_name,
'Partitions(master): -80 80-\n' +
'Partitions(rdonly): -80 80-\n' +
'Partitions(replica): -80 80-\n')
def get_connection(user=None, password=None):
timeout = 10.0
conn = None
vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
conn = conn_class.connect(vtgate_addrs, timeout,
user=user, password=password)
return conn
def get_keyrange(shard_name):
kr = None
if shard_name == keyrange_constants.SHARD_ZERO:
kr = keyrange.KeyRange(keyrange_constants.NON_PARTIAL_KEYRANGE)
else:
kr = keyrange.KeyRange(shard_name)
return kr
def _delete_all(keyspace, shard_name, table_name):
vtgate_conn = get_connection()
# This write is to set up the test with fresh insert
# and hence performing it directly on the connection.
vtgate_conn.begin()
vtgate_conn._execute("delete from %s" % table_name, {},
keyspace, 'master',
keyranges=[get_keyrange(shard_name)])
vtgate_conn.commit()
def restart_vtgate(extra_args={}):
port = utils.vtgate.port
utils.vtgate.kill()
utils.VtGate(port=port).start(extra_args=extra_args)
def populate_table():
keyspace = "KS_UNSHARDED"
_delete_all(keyspace, keyrange_constants.SHARD_ZERO, 'vt_unsharded')
vtgate_conn = get_connection()
cursor = vtgate_conn.cursor(keyspace, 'master', keyranges=[get_keyrange(keyrange_constants.SHARD_ZERO),],writable=True)
cursor.begin()
for x in xrange(10):
cursor.execute('insert into vt_unsharded (id, msg) values (%s, %s)' % (str(x), 'msg'), {})
cursor.commit()
class TestUnshardedTable(unittest.TestCase):
def setUp(self):
self.vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
self.dc = database_context.DatabaseContext(self.vtgate_addrs)
self.all_ids = []
with database_context.WriteTransaction(self.dc) as context:
for x in xrange(20):
ret_id = db_class_unsharded.VtUnsharded.insert(context.get_cursor(),
msg="test message")
self.all_ids.append(ret_id)
def tearDown(self):
_delete_all("KS_UNSHARDED", "0", 'vt_unsharded')
def test_read(self):
id_val = self.all_ids[0]
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(
context.get_cursor(), id_val)
expected = 1
self.assertEqual(len(rows), expected, "wrong number of rows fetched %d, expected %d" % (len(rows), expected))
self.assertEqual(rows[0].id, id_val, "wrong row fetched")
def test_update_and_read(self):
id_val = self.all_ids[0]
where_column_value_pairs = [('id', id_val)]
with database_context.WriteTransaction(self.dc) as context:
update_cols = [('msg', "test update"),]
db_class_unsharded.VtUnsharded.update_columns(context.get_cursor(),
where_column_value_pairs,
update_column_value_pairs=update_cols)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(context.get_cursor(), id_val)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(rows[0].msg, "test update", "wrong row fetched")
def test_delete_and_read(self):
id_val = self.all_ids[-1]
where_column_value_pairs = [('id', id_val)]
with database_context.WriteTransaction(self.dc) as context:
db_class_unsharded.VtUnsharded.delete_by_columns(context.get_cursor(),
where_column_value_pairs)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_unsharded.VtUnsharded.select_by_id(context.get_cursor(), id_val)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
self.all_ids = self.all_ids[:-1]
def test_count(self):
with database_context.ReadFromMaster(self.dc) as context:
count = db_class_unsharded.VtUnsharded.get_count(
context.get_cursor(), msg="test message")
expected = len(self.all_ids)
self.assertEqual(count, expected, "wrong count fetched; expected %d got %d" % (expected, count))
def test_min_id(self):
with database_context.ReadFromMaster(self.dc) as context:
min_id = db_class_unsharded.VtUnsharded.get_min(
context.get_cursor())
expected = min(self.all_ids)
self.assertEqual(min_id, expected, "wrong min value fetched; expected %d got %d" % (expected, min_id))
def test_max_id(self):
with database_context.ReadFromMaster(self.dc) as context:
max_id = db_class_unsharded.VtUnsharded.get_max(
context.get_cursor())
self.all_ids.sort()
expected = max(self.all_ids)
self.assertEqual(max_id, expected, "wrong max value fetched; expected %d got %d" % (expected, max_id))
class TestRangeSharded(unittest.TestCase):
def populate_tables(self):
self.user_id_list = []
self.song_id_list = []
self.user_song_map = {}
r = random.Random()
# This should create the lookup entries and sharding key.
with database_context.WriteTransaction(self.dc) as context:
for x in xrange(20):
# vt_user - EntityRangeSharded; creates username:user_id lookup
user_id = db_class_sharded.VtUser.insert(context.get_cursor(),
username="user%s" % x, msg="test message")
self.user_id_list.append(user_id)
# vt_user_email - RangeSharded; references user_id:keyspace_id hash
email = 'user%s@google.com' % x
m = hashlib.md5()
m.update(email)
email_hash = m.digest()
entity_id_map={'user_id':user_id}
db_class_sharded.VtUserEmail.insert(
context.get_cursor(entity_id_map=entity_id_map),
user_id=user_id, email=email,
email_hash=email_hash)
# vt_song - EntityRangeSharded; creates song_id:user_id lookup
num_songs_for_user = r.randint(1, 5)
for i in xrange(num_songs_for_user):
song_id = db_class_sharded.VtSong.insert(context.get_cursor(),
user_id=user_id, title="Test Song")
self.song_id_list.append(song_id)
self.user_song_map.setdefault(user_id, []).append(song_id)
# vt_song_detail - RangeSharded; references song_id:user_id lookup
entity_id_map = {'song_id':song_id}
db_class_sharded.VtSongDetail.insert(context.get_cursor(entity_id_map=entity_id_map),
song_id=song_id, album_name="Test album",
artist="Test artist")
def setUp(self):
self.vtgate_addrs = {"vt": [utils.vtgate.addr(),]}
self.dc = database_context.DatabaseContext(self.vtgate_addrs)
self.populate_tables()
def tearDown(self):
with database_context.WriteTransaction(self.dc) as context:
for uid in self.user_id_list:
try:
db_class_sharded.VtUser.delete_by_columns(context.get_cursor(entity_id_map={'id':uid}),
[('id', uid),])
db_class_sharded.VtUserEmail.delete_by_columns(context.get_cursor(entity_id_map={'user_id':uid}),
[('user_id', uid),])
db_class_sharded.VtSong.delete_by_columns(context.get_cursor(entity_id_map={'user_id':uid}),
[('user_id', uid),])
song_id_list = self.user_song_map[uid]
for sid in song_id_list:
db_class_sharded.VtSongDetail.delete_by_columns(context.get_cursor(entity_id_map={'song_id':sid}),
[('song_id', sid),])
except dbexceptions.DatabaseError as e:
if str(e) == "DB Row not found":
pass
def test_sharding_key_read(self):
user_id = self.user_id_list[0]
with database_context.ReadFromMaster(self.dc) as context:
where_column_value_pairs = [('id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), len(self.user_song_map[user_id]), "wrong number of rows fetched")
def test_entity_id_read(self):
user_id = self.user_id_list[0]
with database_context.ReadFromMaster(self.dc) as context:
entity_id_map = {'username': 'user0'}
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
[('id', user_id),])
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('id', self.user_song_map[user_id][0]),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
where_column_value_pairs = [('song_id', self.user_song_map[user_id][0]),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSongDetail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
def test_in_clause_read(self):
with database_context.ReadFromMaster(self.dc) as context:
user_id_list = [self.user_id_list[0], self.user_id_list[1]]
where_column_value_pairs = (('id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.id for row in rows]
got.sort()
self.assertEqual(user_id_list, got, "wrong rows fetched; expected %s got %s" % (user_id_list, got))
username_list = [row.username for row in rows]
username_list.sort()
where_column_value_pairs = (('username', username_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUser.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.username for row in rows]
got.sort()
self.assertEqual(username_list, got, "wrong rows fetched; expected %s got %s" % (username_list, got))
where_column_value_pairs = (('user_id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 2, "wrong number of rows fetched")
got = [row.user_id for row in rows]
got.sort()
self.assertEqual(user_id_list, got, "wrong rows fetched; expected %s got %s" % (user_id_list, got))
song_id_list = []
for user_id in user_id_list:
song_id_list.extend(self.user_song_map[user_id])
song_id_list.sort()
where_column_value_pairs = [('id', song_id_list),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSong.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
got = [row.id for row in rows]
got.sort()
self.assertEqual(song_id_list, got, "wrong rows fetched %s got %s" % (song_id_list, got))
where_column_value_pairs = [('song_id', song_id_list),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtSongDetail.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
got = [row.song_id for row in rows]
got.sort()
self.assertEqual(song_id_list, got, "wrong rows fetched %s got %s" % (song_id_list, got))
def test_keyrange_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows1 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange='-80'), where_column_value_pairs)
rows2 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange='80-'), where_column_value_pairs)
fetched_rows = len(rows1) + len(rows2)
expected = len(self.user_id_list)
self.assertEqual(fetched_rows, expected, "wrong number of rows fetched expected:%d got:%d" % (expected, fetched_rows))
def test_scatter_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
where_column_value_pairs)
self.assertEqual(len(rows), len(self.user_id_list), "wrong number of rows fetched, expecting %d got %d" % (len(self.user_id_list), len(rows)))
def test_streaming_read(self):
where_column_value_pairs = []
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns_streaming(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
where_column_value_pairs)
got_user_id_list = []
for r in rows:
got_user_id_list.append(r.id)
self.assertEqual(len(got_user_id_list), len(self.user_id_list), "wrong number of rows fetched")
def update_columns(self):
with database_context.WriteTransaction(self.dc) as context:
user_id = self.user_id_list[1]
where_column_value_pairs = [('id', user_id),]
entity_id_map = {'id': user_id}
new_username = 'new_user%s' % user_id
update_cols = [('username', new_username),]
db_class_sharded.VtUser.update_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs,
update_column_value_pairs=update_cols)
# verify the updated value.
where_column_value_pairs = [('id', user_id),]
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map={'id': user_id}),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(new_username, rows[0].username)
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = {'user_id': user_id}
new_email = 'new_user%s@google.com' % user_id
m = hashlib.md5()
m.update(new_email)
email_hash = m.digest()
update_cols = [('email', new_email), ('email_hash', email_hash)]
db_class_sharded.VtUserEmail.update_columns(context.get_cursor(entity_id_map={'user_id':user_id}),
where_column_value_pairs,
update_column_value_pairs=update_cols)
# verify the updated value.
with database_context.ReadFromMaster(self.dc) as context:
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = dict(where_column_value_pairs)
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 1, "wrong number of rows fetched")
self.assertEqual(new_email, rows[0].email)
self.user_id_list.sort()
def delete_columns(self):
user_id = self.user_id_list[-1]
with database_context.WriteTransaction(self.dc) as context:
where_column_value_pairs = [('id', user_id),]
entity_id_map = {'id': user_id}
db_class_sharded.VtUser.delete_by_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
where_column_value_pairs = [('user_id', user_id),]
entity_id_map = {'user_id': user_id}
db_class_sharded.VtUserEmail.delete_by_columns(context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
with database_context.ReadFromMaster(self.dc) as context:
rows = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
rows = db_class_sharded.VtUserEmail.select_by_ids(
context.get_cursor(entity_id_map=entity_id_map),
where_column_value_pairs)
self.assertEqual(len(rows), 0, "wrong number of rows fetched")
self.user_id_list = self.user_id_list[:-1]
self.user_id_list.sort()
def test_count(self):
with database_context.ReadFromMaster(self.dc) as context:
count = db_class_sharded.VtUser.get_count(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE),
msg="test message")
expected = len(self.user_id_list)
self.assertEqual(count, expected, "wrong count fetched; expected %d got %d" % (expected, count))
def test_min_id(self):
with database_context.ReadFromMaster(self.dc) as context:
min_id = db_class_sharded.VtUser.get_min(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE))
self.user_id_list.sort()
expected = min(self.user_id_list)
rows1 = db_class_sharded.VtUser.select_by_columns(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE), [])
id_list = [row.id for row in rows1]
self.assertEqual(min_id, expected, "wrong min value fetched; expected %d got %d" % (expected, min_id))
def test_max_id(self):
with database_context.ReadFromMaster(self.dc) as context:
max_id = db_class_sharded.VtUser.get_max(
context.get_cursor(keyrange=keyrange_constants.NON_PARTIAL_KEYRANGE))
expected = max(self.user_id_list)
self.assertEqual(max_id, expected, "wrong max value fetched; expected %d got %d" % (expected, max_id))
def test_batch_read(self):
# TODO(sougou): fix
return
# 1. Create select queries using DB classes.
query_list = []
bv_list = []
user_id_list = [self.user_id_list[0], self.user_id_list[1]]
where_column_value_pairs = (('id', user_id_list),)
entity_id_map = dict(where_column_value_pairs)
q, bv = db_class_sharded.VtUser.create_select_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
where_column_value_pairs = (('user_id', user_id_list),)
q, bv = db_class_sharded.VtUserEmail.create_select_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
with database_context.ReadFromMaster(self.dc) as context:
# 2. Cursor Creation using one of the DB classes.
cursor = context.get_cursor(entity_id_map=entity_id_map)(db_class_sharded.VtUser)
# 3. Batch execution of reads.
results = db_object.execute_batch_read(
cursor, query_list, bv_list)
self.assertEqual(len(results), len(query_list))
res_ids = [row.id for row in results[0]]
res_user_ids = [row.user_id for row in results[1]]
self.assertEqual(res_ids, user_id_list)
self.assertEqual(res_user_ids, user_id_list)
def test_batch_write(self):
# TODO(sougou): fix
return
# 1. Create DMLs using DB Classes.
query_list = []
bv_list = []
# Update VtUser table.
user_id = self.user_id_list[1]
where_column_value_pairs = (('id', user_id),)
entity_id_map = dict(where_column_value_pairs)
new_username = 'new_user%s' % user_id
update_cols = [('username', new_username),]
q, bv = db_class_sharded.VtUser.create_update_query(
where_column_value_pairs, update_column_value_pairs=update_cols)
query_list.append(q)
bv_list.append(bv)
# Update VtUserEmail table.
where_column_value_pairs = [('user_id', user_id),]
new_email = 'new_user%s@google.com' % user_id
m = hashlib.md5()
m.update(new_email)
email_hash = m.digest()
update_cols = [('email', new_email), ('email_hash', email_hash)]
q, bv = db_class_sharded.VtUserEmail.create_update_query(
where_column_value_pairs, update_column_value_pairs=update_cols)
query_list.append(q)
bv_list.append(bv)
# Delete a VtSong row
where_column_value_pairs = [('user_id', user_id),]
q, bv = db_class_sharded.VtSong.create_delete_query(where_column_value_pairs)
query_list.append(q)
bv_list.append(bv)
with database_context.WriteTransaction(self.dc) as context:
# 2. Routing for query_list is done by associating
# the common entity_id to the cursor.
# NOTE: cursor creation needs binding to a particular db class,
# so we create a writable cursor using the common entity (user_id).
# This entity_id is used to derive the keyspace_id for routing the dmls.
entity_id_map = {'id': user_id}
cursor = context.get_cursor(entity_id_map=entity_id_map)(db_class_sharded.VtUser)
# 3. Execute the writable batch query.
results = db_object.execute_batch_write(
cursor, query_list, bv_list)
# 4. Verify results
self.assertEqual(len(results), len(query_list))
self.assertEqual(results[0]['rowcount'], 1, "VtUser update didn't update 1 row")
self.assertEqual(results[1]['rowcount'], 1, "VtUserEmail update didn't update 1 row")
self.assertEqual(results[2]['rowcount'], len(self.user_song_map[user_id]),
"VtSong deleted '%d' rows, expected '%d'" % (results[2]['rowcount'], len(self.user_song_map[user_id])))
if __name__ == '__main__':
utils.main()
| {
"content_hash": "744c8de53c5e104acc53bc3256edbb8b",
"timestamp": "",
"source": "github",
"line_count": 650,
"max_line_length": 148,
"avg_line_length": 42.06461538461539,
"alnum_prop": 0.634591470997001,
"repo_name": "SDHM/vitess",
"id": "9666339d182134f455c3b09d6b62f177e3d1f1c1",
"size": "27342",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/client_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "40319"
},
{
"name": "CSS",
"bytes": "80183"
},
{
"name": "Go",
"bytes": "4309168"
},
{
"name": "HTML",
"bytes": "65751"
},
{
"name": "Java",
"bytes": "166847"
},
{
"name": "JavaScript",
"bytes": "58128"
},
{
"name": "Liquid",
"bytes": "15617"
},
{
"name": "Makefile",
"bytes": "7039"
},
{
"name": "PHP",
"bytes": "7167"
},
{
"name": "PLpgSQL",
"bytes": "8933"
},
{
"name": "Protocol Buffer",
"bytes": "58096"
},
{
"name": "Python",
"bytes": "924744"
},
{
"name": "Ruby",
"bytes": "465"
},
{
"name": "Shell",
"bytes": "51739"
},
{
"name": "Yacc",
"bytes": "18460"
}
],
"symlink_target": ""
} |
__author__ = 'anna'
from PetersScheme.Vertex import Vertex_DooSabin
from PetersScheme.Shape import Shape_DooSabin
def DooSabin(vertices, faces, alpha, iter):
vertices_refined = []
faces_refined = []
vertices_children = [ [] for _ in range(len(vertices))]#[None]*len(vertices)
edges = []
#get list of edges
faces_total = faces.__len__()
face_count = 0
for face in faces:
face_count += 1
if face_count % 100 == 0:
print "getting list of edges: face %d of %d."%(face_count,faces_total)
for edge in face.getEdges():
if not ((edge in edges) or ([edge[1], edge[0]] in edges)):
edges.append(edge)
edges_children = [ [] for _ in range(len(edges))]
faces_total = faces.__len__()
face_count = 0
for face in faces:
face_count += 1
if face_count % 100 == 0:
print "face %d of %d."%(face_count,faces_total)
F = face.centroid
numberOfVertices = len(face.vertex_ids)
newVertices = []
for j in range(numberOfVertices):
# v = Vertex(len(vertices_refined),[face.vertices[j]._coordinates[l]*(1 - alpha) + F[l]*alpha for l in range(3)])
newVertex_xCoord = face._vertices[j]._coordinates[0]*(1 - alpha) + F[0]*alpha
newVertex_yCoord = face._vertices[j]._coordinates[1]*(1 - alpha) + F[1]*alpha
newVertex_zCoord = face._vertices[j]._coordinates[2]*(1 - alpha) + F[2]*alpha
v = Vertex_DooSabin(len(vertices_refined), newVertex_xCoord, newVertex_yCoord, newVertex_zCoord)
vertices_children[face._vertices[j]._id].append([face, v])
vertices_refined.append(v)
for edge in face.adjacentEdges(face._vertices[j]):
if edge in edges:
edges_children[edges.index(edge)].append([v, edge.index(face._vertices[j]), face, "asIs"])
else:
#positioning is assigned with respect to the orientation of the edge in the list
edges_children[edges.index([edge[1], edge[0]])].append([v, 1-edge.index(face._vertices[j]), face, "reversed"])
newVertices.append(v)
new_face = Shape_DooSabin(len(faces_refined), newVertices)
if iter == 1:
new_face.type = "center"
new_face.parents.append(face)
if iter == 2:
new_face.type = "center"
new_face.parent_type = face.type
if face.type == "center":
globalIndicesInOrderedOriginalQuad = [5, 6, 10, 9]
for i in range(len(face.parents[0]._vertices)):
face.parents[0].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[i]] = newVertices[i]
newVertices[i].parentOrigGrid = face.parents[0]
face.parents[0]._vertices[i].A.append([face.parents[0], newVertices[i]])
if face.type == "vertex":
globalIndicesInOrderedOriginalQuad = [0, 3, 15, 12]
parent_faces = face.parents
for i in range(numberOfVertices):
face.parent_vertex.C.append([face.parents[i], newVertices[i]])
ind = face.parents[i]._vertices.index(face.parent_vertex)
face.parents[i].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[ind]] = newVertices[i]
newVertices[i].parentOrigGrid = face.parents[i]
if face.type == "edge":
globalIndicesInOrderedOriginalQuad = [[1, 7, 14, 8], [2, 11, 13, 4]]
parent_edge = face.parent_edge
positioning = face.edge_face_positioning
for parent_local_id in range(2):
if parent_edge in face.parents[parent_local_id].edges:
#ids in the face parent_local_id*2, parent_local_id*2+1
edge_id = face.parents[parent_local_id].edges.index(parent_edge)
face.parents[parent_local_id].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[positioning[parent_local_id*2]][edge_id]] = newVertices[parent_local_id*2]
newVertices[parent_local_id*2].parentOrigGrid = face.parents[parent_local_id]
face.parents[parent_local_id].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[positioning[parent_local_id*2+1]][edge_id]] = newVertices[parent_local_id*2+1]
newVertices[parent_local_id*2+1].parentOrigGrid = face.parents[parent_local_id]
else:
edge_id = face.parents[parent_local_id].edges.index([parent_edge[1], parent_edge[0]])
face.parents[parent_local_id].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[1-positioning[parent_local_id*2]][edge_id]] = newVertices[parent_local_id*2]
newVertices[parent_local_id*2].parentOrigGrid = face.parents[parent_local_id]
face.parents[parent_local_id].ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[1-positioning[parent_local_id*2+1]][edge_id]] = newVertices[parent_local_id*2+1]
newVertices[parent_local_id*2+1].parentOrigGrid = face.parents[parent_local_id]
#get the neighbouring "vertex" faces with respect to our current "edge" face
neighbouringVertexFaces = [face.parent_edge[i].childFace for i in range(2)]
for i in range(2):
#for each of the neighbouring faces find the shared edge
sharedEdge = neighbouringVertexFaces[i].isAdjacent(face)
indexOfSharedEdge = neighbouringVertexFaces[i].edges.index(sharedEdge)
#ordered original faces containing the vertices of the "vertex" face
for vert in sharedEdge:
#the local id of the face in original grid, containing the current vertex
localFaceId = neighbouringVertexFaces[i]._vertices.index(vert)
grandParentFace = neighbouringVertexFaces[i].parents[localFaceId]
# if parent_edge in grandParentFace.edges:
# positioning = face.edge_face_positioning
# localInd = face._vertices.index(vert)
# grandParentFace.ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[positioning[localInd]][grandParentFace.edges.index(parent_edge)]]
# else:
# positioning = face.edge_face_positioning
# localInd = face._vertices.index(vert)
# grandParentFace.ordered_refined_vertices[globalIndicesInOrderedOriginalQuad[1-positioning[localInd]][grandParentFace.edges.index([parent_edge[1], parent_edge[0]])]]
if localFaceId == indexOfSharedEdge:
face.parent_edge[i].B2.append([grandParentFace, newVertices[face._vertices.index(vert)], face.parent_edge])
else:
face.parent_edge[i].B1.append([grandParentFace, newVertices[face._vertices.index(vert)], face.parent_edge])
for vertex in newVertices:
vertex.addNeighbouringFace(new_face)
faces_refined.append(new_face)
vertices_total = vertices.__len__()
vertex_count = 0
#Loop through vertices, getting the faces of the type "vertex"
for vert in vertices:
vertex_count += 1
if vertex_count % 100 == 0:
print "vertex %d of %d."%(vertex_count,vertices_total)
n = len(vertices_children[vert._id])
new_face_vertices = [vertices_children[vert._id][i][1] for i in range(n)]
parent_faces = [vertices_children[vert._id][i][0] for i in range(n)]
face_ordered = [vertices_children[vert._id][0][1]]
parent_faces_ordered = [vertices_children[vert._id][0][0]]
current_face = parent_faces[0]
for i in range(1, n, 1):
j = 0
while (not current_face.isAdjacent(parent_faces[j])) or (new_face_vertices[j] in face_ordered):
j += 1
face_ordered.append(new_face_vertices[j])
parent_faces_ordered.append(parent_faces[j])
current_face = parent_faces[j]
face_object = Shape_DooSabin(len(faces_refined), face_ordered)
vert.childFace = face_object
if iter == 1:
face_object.type = "vertex"
for i in range(len(parent_faces_ordered)):
face_object.parents.append(parent_faces_ordered[i])
face_object.parent_vertex = vert
for vertex in new_face_vertices:
vertex.addNeighbouringFace(face_object)
faces_refined.append(face_object)
edges_total = edges.__len__()
#Loop through edges, getting the faces of the type "edge"
for i in range(len(edges)):
if i % 100 == 0:
print "edge %d of %d."%(i, edges_total)
n = 4 #edge always has four children!
new_face_vertices_positioning = [edges_children[i][j][1] for j in range(n)]
new_face_vertices = [edges_children[i][0][0], edges_children[i][1][0]]
if new_face_vertices_positioning[2] == new_face_vertices_positioning[1]:
new_face_vertices.append(edges_children[i][2][0])
new_face_vertices.append(edges_children[i][3][0])
else:
new_face_vertices.append(edges_children[i][3][0])
new_face_vertices.append(edges_children[i][2][0])
temp = new_face_vertices_positioning[3]
new_face_vertices_positioning[3] = new_face_vertices_positioning[2]
new_face_vertices_positioning[2] = temp
face_object = Shape_DooSabin(len(faces_refined), new_face_vertices)
face_object.edge_face_positioning = new_face_vertices_positioning
face_object.parent_edge = edges[i]
face_object.parents = [edges_children[i][j][2] for j in range(0,4, 2)]
if iter == 1:
face_object.type = "edge"
faces_refined.append(face_object)
return [vertices_refined, faces_refined] | {
"content_hash": "cf9b4b6a27a2f81c8610f3ff792bffcd",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 194,
"avg_line_length": 49.31730769230769,
"alnum_prop": 0.5923181906804446,
"repo_name": "BGCECSE2015/CADO",
"id": "4cc6f13255cffccaa4deffe152945fd1f7643c5f",
"size": "10258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PYTHON/NURBSReconstruction/DooSabin/DooSabin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "92113"
},
{
"name": "CMake",
"bytes": "1257"
},
{
"name": "Makefile",
"bytes": "554"
},
{
"name": "Python",
"bytes": "208324"
},
{
"name": "QMake",
"bytes": "514"
},
{
"name": "Shell",
"bytes": "479"
}
],
"symlink_target": ""
} |
import os
import datetime
import mimetypes
from webskewer.common import http_status
from webskewer.common import time_util, range_util
from webskewer.common.exceptions import RangePastEOFError, BadRangeSpecError
from webskewer.common.util import decodeurl
from webskewer.wsgi.slash import with_slash, without_slash
from webskewer.wsgi.http import (NotFound, NotModified,
RangeNotSatisfiable)
def read_range(file, (start, end), bufsize=8192):
file.seek(start)
remain = True if end is None else end - start
while remain:
data = file.read(bufsize if end is None else min(remain, bufsize))
if not data:
if end is None:
return
else:
raise RangePastEOFError()
yield data
if end is not None:
remain -= len(data)
class static_files(object):
read_buf = 8192
dir_listing = (u'<html><head><title>Directory listing for'
u'%(url_path)s</title></head><body><h1>Directory '
u'listing for %(url_path)s</h1><ul>%(items)s</ul>'
u'</body></html>')
def __init__(self, root, index_files=None,
default_charset=None, listing=True):
self.root = os.path.abspath(root)
if index_files is None:
index_files = ['index.html']
self.index_files = index_files
self.default_charset = default_charset
self.listing = listing
def send_file(self, fn_):
def send_file(environ, start_response):
fn = fn_
gzfn = fn + '.gz'
if not fn.endswith('.gz') and os.path.isfile(gzfn):
fn = gzfn
st = os.stat(fn)
mt = time_util.timestamp_to_dt(st.st_mtime, time_util.localtime)
if not time_util.check_if_modified_since(environ, mt):
return NotModified()(environ, start_response)
clen = st.st_size
ctype, cenc = mimetypes.guess_type(fn)
if ctype:
if self.default_charset is not None and ctype.startswith('text/'):
ctype += '; charset=' + self.default_charset
ranges = environ.get('HTTP_RANGE', None)
if environ['REQUEST_METHOD'] == 'GET' and ranges:
try:
ranges = list(range_util.canon(
range_util.parse_ranges(ranges), clen))
except BadRangeSpecError:
ranges = None
else:
if not ranges:
return RangeNotSatisfiable(clen)(environ, start_response)
headers = [
('Last-modified', time_util.dt_to_1123(mt)),
('Accept-ranges', 'bytes'),
]
if environ['REQUEST_METHOD'] == 'GET':
f = open(fn, 'rb')
else:
f = None
if False and environ['REQUEST_METHOD'] == 'GET' and ranges:
# TODO: wsgify this
def partial_start_response(status, headers, exc_info=None):
pass
resp = PartialContent(
((start, end, read_range(f, (start, end)))
for start, end in ranges))
content_headers = {}
if ctype:
content_headers['content-type'] = ctype
if cenc:
content_headers['content-encoding'] = cenc
if content_headers:
for part in resp['entity']['parts']:
part['headers'].update(content_headers)
resp['entity']['headers'].update(ent_headers)
else:
if ctype:
headers.append(('Content-type', ctype))
if cenc:
headers.append(('Content-encoding', cenc))
headers.append(('Content-length', clen))
start_response(http_status.OK, headers)
return read_range(f, (0, None))
return send_file
def list_dir(self, path):
def list_dir(environ, start_response):
url_path = environ['PATH_INFO']
files = [u'../']
for fn in sorted(os.listdir(path)):
if os.path.isdir(os.path.join(path, fn)):
files.append(fn + u'/')
else:
files.append(fn)
items = u''.join(u'<li><a href="%s">%s</a></li>' % (fn, fn)
for fn in files).encode('utf-8')
body = self.dir_listing % locals()
start_response(http_status.OK,
[('Content-type', 'text/html; charset=utf-8'),
('Content-length', str(len(body)))])
return [body]
return list_dir
def __call__(self, environ, start_response):
rel_path = environ['PATH_INFO'].lstrip('/').decode('utf-8')
fn = os.path.normpath(os.path.join(self.root, rel_path))
if fn.startswith(self.root):
if os.path.isdir(fn):
for f in self.index_files:
nfn = os.path.join(fn, f)
if os.path.isfile(nfn):
return with_slash(self.send_file(nfn))(environ, start_response)
if self.listing:
return with_slash(self.list_dir(fn))(environ, start_response)
elif os.path.isfile(fn):
return without_slash(self.send_file(fn))(environ, start_response)
return NotFound()(environ, start_response)
| {
"content_hash": "da548ddc66b145b6db33de64a56eaa2a",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 87,
"avg_line_length": 40.81428571428572,
"alnum_prop": 0.5042002100105005,
"repo_name": "dhain/webskewer",
"id": "5b0af22f313dab5cbd541466c22349267421539f",
"size": "5714",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webskewer.wsgi/webskewer/wsgi/static_files.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60675"
}
],
"symlink_target": ""
} |
from yowsup.structs import ProtocolTreeNode
from .iq_sync import SyncIqProtocolEntity
class ResultSyncIqProtocolEntity(SyncIqProtocolEntity):
'''
<iq type="result" from="491632092557@s.whatsapp.net" id="1417046561-4">
<sync index="0" wait="166952" last="true" version="1417046548593182" sid="1.30615237617e+17">
<in>
<user jid="{{jid}}>{{number}}</user>
</in>
<out>
<user jid="{{jid}}">
{{number}}
</user>
</out>
<invalid>
<user>
abcdefgh
</user>
</invalid>
</sync>
</iq>
'''
def __init__(self,_id, sid, index, last, version, inNumbers, outNumbers, invalidNumbers, wait = None):
super(ResultSyncIqProtocolEntity, self).__init__("result", _id, sid, index, last)
self.setResultSyncProps(wait, version, inNumbers, outNumbers, invalidNumbers)
def setResultSyncProps(self, version, inNumbers, outNumbers, invalidNumbers, wait = None):
assert type(inNumbers) is dict, "in numbers must be a dict {number -> jid}"
assert type(outNumbers) is dict, "out numbers must be a dict {number -> jid}"
assert type(invalidNumbers) is list, "invalid numbers must be a list"
self.inNumbers = inNumbers
self.outNumbers = outNumbers
self.invalidNumbers = invalidNumbers
self.wait = int(wait) if wait is not None else None
self.version = version
def __str__(self):
out = super(SyncIqProtocolEntity, self).__str__()
if self.wait is not None:
out += "Wait: %s\n" % self.wait
out += "Version: %s\n" % self.version
out += "In Numbers: %s\n" % (",".join(self.inNumbers))
out += "Out Numbers: %s\n" % (",".join(self.outNumbers))
out += "Invalid Numbers: %s\n" % (",".join(self.invalidNumbers))
return out
def toProtocolTreeNode(self):
outUsers = [ProtocolTreeNode("user", {"jid": jid}, None, number) for number, jid in self.outNumbers.items()]
inUsers = [ProtocolTreeNode("user", {"jid": jid}, None, number) for number, jid in self.inNumbers.items()]
invalidUsers = [ProtocolTreeNode("user", {}, None, number) for number in self.invalidNumbers]
node = super(ResultSyncIqProtocolEntity, self).toProtocolTreeNode()
syncNode = node.getChild("sync")
syncNode.setAttribute("version", self.version)
if self.wait is not None:
syncNode.setAttribute("wait", str(self.wait))
if len(outUsers):
syncNode.addChild(ProtocolTreeNode("out", children = outUsers))
if len(inUsers):
syncNode.addChild(ProtocolTreeNode("in", children = inUsers))
if len(invalidUsers):
syncNode.addChildren([ProtocolTreeNode("invalid", children = invalidUsers)])
return node
@staticmethod
def fromProtocolTreeNode(node):
syncNode = node.getChild("sync")
outNode = syncNode.getChild("out")
inNode = syncNode.getChild("in")
invalidNode = syncNode.getChild("invalid")
outUsers = outNode.getAllChildren() if outNode else []
inUsers = inNode.getAllChildren() if inNode else []
invalidUsers = [inode.data for inode in invalidNode.getAllChildren()] if invalidNode else []
outUsersDict = {}
for u in outUsers:
outUsersDict[u.data] = u.getAttributeValue("jid")
inUsersDict = {}
for u in inUsers:
inUsersDict[u.data] = u.getAttributeValue("jid")
entity = SyncIqProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = ResultSyncIqProtocolEntity
entity.setResultSyncProps(syncNode.getAttributeValue("version"),
inUsersDict,
outUsersDict,
invalidUsers,
syncNode.getAttributeValue("wait")
)
return entity
| {
"content_hash": "96b056d2dca5b63f009210c2cd039f42",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 116,
"avg_line_length": 38.03809523809524,
"alnum_prop": 0.6014021031547321,
"repo_name": "felix-dumit/campusbot",
"id": "f1f16bcfd8585ce3101a7567fe69f5ded5a47b82",
"size": "3994",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "yowsup2/yowsup/layers/protocol_contacts/protocolentities/iq_sync_result.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "13787"
},
{
"name": "Python",
"bytes": "584218"
},
{
"name": "Shell",
"bytes": "254"
}
],
"symlink_target": ""
} |
import numpy.distutils.system_info as sysinfo
lib=sysinfo.lapack_mkl_info().get_info()['libraries']
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config=Configuration('futils',parent_package,top_path)
config.add_extension('fmodule',['fysics.f90'],libraries=lib)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
| {
"content_hash": "26e799b43e7e845b036043bd2a234629",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 64,
"avg_line_length": 38.416666666666664,
"alnum_prop": 0.737527114967462,
"repo_name": "GiggleLiu/tridmat",
"id": "6bd29755af9daaa2077e1d895c097ebdf1617600",
"size": "461",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "futils/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "159783"
},
{
"name": "FORTRAN",
"bytes": "20523"
},
{
"name": "Makefile",
"bytes": "817"
},
{
"name": "Python",
"bytes": "67711"
}
],
"symlink_target": ""
} |
import copy
import fixtures
import mock
from oslo_utils.fixture import uuidsentinel as uuids
from nova import block_device
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields as obj_fields
from nova import test
from nova.tests.unit import fake_block_device
import nova.tests.unit.image.fake
from nova.tests.unit.virt import fakelibosinfo
from nova.virt import block_device as driver_block_device
from nova.virt import driver
from nova.virt.libvirt import blockinfo
class LibvirtBlockInfoTest(test.NoDBTestCase):
def setUp(self):
super(LibvirtBlockInfoTest, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.get_admin_context()
nova.tests.unit.image.fake.stub_out_image_service(self)
self.test_instance = {
'uuid': '32dfcb37-5af1-552b-357c-be8c3aa38310',
'memory_kb': '1024000',
'basepath': '/some/path',
'bridge_name': 'br100',
'vcpus': 2,
'project_id': 'fake',
'bridge': 'br101',
'image_ref': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'root_gb': 10,
'ephemeral_gb': 20,
'instance_type_id': 2, # m1.tiny
'config_drive': None,
'launched_at': None,
'system_metadata': {},
}
self.test_image_meta = {
'disk_format': 'raw',
}
flavor = objects.Flavor(memory_mb=128,
root_gb=0,
name='m1.micro',
ephemeral_gb=0,
vcpus=1,
swap=0,
rxtx_factor=1.0,
flavorid='1',
vcpu_weight=None,
id=2)
self.test_instance['flavor'] = flavor
self.test_instance['old_flavor'] = None
self.test_instance['new_flavor'] = None
def test_volume_in_mapping(self):
swap = {'device_name': '/dev/sdb',
'swap_size': 1}
ephemerals = [{'device_type': 'disk', 'guest_format': 'ext4',
'device_name': '/dev/sdc1', 'size': 10},
{'disk_bus': 'ide', 'guest_format': None,
'device_name': '/dev/sdd', 'size': 10}]
block_device_mapping = [{'mount_device': '/dev/sde',
'device_path': 'fake_device'},
{'mount_device': '/dev/sdf',
'device_path': 'fake_device'}]
block_device_info = {
'root_device_name': '/dev/sda',
'swap': swap,
'ephemerals': ephemerals,
'block_device_mapping': block_device_mapping}
def _assert_volume_in_mapping(device_name, true_or_false):
self.assertEqual(
true_or_false,
block_device.volume_in_mapping(device_name,
block_device_info))
_assert_volume_in_mapping('sda', False)
_assert_volume_in_mapping('sdb', True)
_assert_volume_in_mapping('sdc1', True)
_assert_volume_in_mapping('sdd', True)
_assert_volume_in_mapping('sde', True)
_assert_volume_in_mapping('sdf', True)
_assert_volume_in_mapping('sdg', False)
_assert_volume_in_mapping('sdh1', False)
def test_find_disk_dev(self):
mapping = {
"disk.local": {
'dev': 'sda',
'bus': 'scsi',
'type': 'disk',
},
"disk.swap": {
'dev': 'sdc',
'bus': 'scsi',
'type': 'disk',
},
}
dev = blockinfo.find_disk_dev_for_disk_bus(mapping, 'scsi')
self.assertEqual('sdb', dev)
dev = blockinfo.find_disk_dev_for_disk_bus(mapping, 'virtio')
self.assertEqual('vda', dev)
dev = blockinfo.find_disk_dev_for_disk_bus(mapping, 'fdc')
self.assertEqual('fda', dev)
@mock.patch('nova.virt.libvirt.blockinfo.has_disk_dev', return_value=True)
def test_find_disk_dev_for_disk_bus_no_free_error(self, has_disk_dev_mock):
# Tests that an exception is raised when all devices for a given prefix
# are already reserved.
mapping = {
'disk': {
'bus': 'ide',
'dev': 'hda',
'type': 'cdrom',
'boot_index': '1',
}
}
self.assertRaises(exception.NovaException,
blockinfo.find_disk_dev_for_disk_bus,
mapping, 'ide')
def test_get_next_disk_dev(self):
mapping = {}
mapping['disk.local'] = blockinfo.get_next_disk_info(mapping,
'virtio')
self.assertEqual({'dev': 'vda', 'bus': 'virtio', 'type': 'disk'},
mapping['disk.local'])
mapping['disk.swap'] = blockinfo.get_next_disk_info(mapping,
'virtio')
self.assertEqual({'dev': 'vdb', 'bus': 'virtio', 'type': 'disk'},
mapping['disk.swap'])
mapping['disk.config'] = blockinfo.get_next_disk_info(mapping,
'ide',
'cdrom')
self.assertEqual({'dev': 'hda', 'bus': 'ide', 'type': 'cdrom'},
mapping['disk.config'])
def test_get_next_disk_dev_boot_index(self):
info = blockinfo.get_next_disk_info({}, 'virtio', boot_index=-1)
self.assertEqual({'dev': 'vda', 'bus': 'virtio', 'type': 'disk'}, info)
info = blockinfo.get_next_disk_info({}, 'virtio', boot_index=2)
self.assertEqual({'dev': 'vda', 'bus': 'virtio',
'type': 'disk', 'boot_index': '2'},
info)
def test_get_disk_mapping_simple(self):
# The simplest possible disk mapping setup, all defaults
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
with mock.patch.object(instance_ref, 'get_flavor',
return_value=instance_ref.flavor) as get_flavor:
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
# Since there was no block_device_info passed to get_disk_mapping we
# expect to get the swap info from the flavor in the instance.
get_flavor.assert_called_once_with()
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'}
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_simple_rootdev(self):
# A simple disk mapping setup, but with custom root device name
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'root_device_name': '/dev/sda'
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'disk': {'bus': 'virtio', 'dev': 'sda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'sda',
'type': 'disk', 'boot_index': '1'}
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_rescue(self):
# A simple disk mapping setup, but in rescue mode
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
rescue=True)
expect = {
'disk.rescue': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_rescue_with_config(self):
# A simple disk mapping setup, but in rescue mode with a config drive
test_instance_with_config = self.test_instance
test_instance_with_config['config_drive'] = True
instance_ref = objects.Instance(**test_instance_with_config)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
rescue=True)
expect_disk_config_rescue = {
'bus': 'ide', 'dev': 'hda', 'type': 'cdrom'}
if blockinfo.libvirt_utils.get_arch({}) == 'aarch64':
expect_disk_config_rescue['bus'] = 'scsi'
expect_disk_config_rescue['dev'] = 'sda'
expect = {
'disk.rescue': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.config.rescue': expect_disk_config_rescue,
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_lxc(self):
# A simple disk mapping setup, but for lxc
self.test_instance['ephemeral_gb'] = 0
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("lxc", instance_ref,
"lxc", "lxc",
image_meta)
expect = {
'disk': {'bus': 'lxc', 'dev': None,
'type': 'disk', 'boot_index': '1'},
'root': {'bus': 'lxc', 'dev': None,
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_simple_iso(self):
# A simple disk mapping setup, but with a ISO for root device
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict({'disk_format': 'iso'})
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
expect = {
'disk': {'bus': 'ide', 'dev': 'hda',
'type': 'cdrom', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk'},
'root': {'bus': 'ide', 'dev': 'hda',
'type': 'cdrom', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_simple_swap(self):
# A simple disk mapping setup, but with a swap device added
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.swap = 5
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.swap': {'bus': 'virtio', 'dev': 'vdc', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_volumes_swap(self):
# A disk mapping setup with volumes attached, then a swap device added
instance_ref = objects.Instance(**self.test_instance)
instance_ref.root_device_name = '/dev/vda'
instance_ref.ephemeral_gb = 0
block_dev_info = {'swap': None, 'root_device_name': u'/dev/vda',
'ephemerals': [],
'block_device_mapping': [{'boot_index': None,
'mount_device': u'/dev/vdb',
'connection_info': {},
'disk_bus': None,
'device_type': None},
{'boot_index': 0,
'mount_device': u'/dev/vda',
'connection_info': {},
'disk_bus': u'virtio',
'device_type': u'disk'}]}
instance_ref.flavor.swap = 5
image_meta = {}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info=block_dev_info)
expect = {
'/dev/vda': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'/dev/vdb': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.swap': {'bus': 'virtio', 'dev': 'vdc', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_simple_configdrive(self):
# A simple disk mapping setup, but with configdrive added
# It's necessary to check if the architecture is power, because
# power doesn't have support to ide, and so libvirt translate
# all ide calls to scsi
self.flags(force_config_drive=True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
# Pick the first drive letter on the bus that is available
# as the config drive. Delete the last device hardcode as
# the config drive here.
bus_ppc = ("scsi", "sda")
bus_aarch64 = ("scsi", "sda")
expect_bus = {"ppc": bus_ppc, "ppc64": bus_ppc,
"ppc64le": bus_ppc, "aarch64": bus_aarch64}
bus, dev = expect_bus.get(blockinfo.libvirt_utils.get_arch({}),
("ide", "hda"))
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.config': {'bus': bus, 'dev': dev, 'type': 'cdrom'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'}
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_cdrom_configdrive(self):
# A simple disk mapping setup, with configdrive added as cdrom
# It's necessary to check if the architecture is power, because
# power doesn't have support to ide, and so libvirt translate
# all ide calls to scsi
self.flags(force_config_drive=True)
self.flags(config_drive_format='iso9660')
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
bus_ppc = ("scsi", "sda")
bus_aarch64 = ("scsi", "sda")
expect_bus = {"ppc": bus_ppc, "ppc64": bus_ppc,
"ppc64le": bus_ppc, "aarch64": bus_aarch64}
bus, dev = expect_bus.get(blockinfo.libvirt_utils.get_arch({}),
("ide", "hda"))
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.config': {'bus': bus, 'dev': dev, 'type': 'cdrom'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'}
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_disk_configdrive(self):
# A simple disk mapping setup, with configdrive added as disk
self.flags(force_config_drive=True)
self.flags(config_drive_format='vfat')
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'disk.config': {'bus': 'virtio', 'dev': 'vdc', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_ephemeral(self):
# A disk mapping with ephemeral devices
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.swap = 5
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'ephemerals': [
{'device_type': 'disk', 'guest_format': 'ext4',
'device_name': '/dev/vdb', 'size': 10},
{'disk_bus': 'ide', 'guest_format': None,
'device_name': '/dev/vdc', 'size': 10},
{'device_type': 'floppy',
'device_name': '/dev/vdd', 'size': 10},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.eph0': {'bus': 'virtio', 'dev': 'vdb',
'type': 'disk', 'format': 'ext4'},
'disk.eph1': {'bus': 'ide', 'dev': 'vdc', 'type': 'disk'},
'disk.eph2': {'bus': 'virtio', 'dev': 'vdd', 'type': 'floppy'},
'disk.swap': {'bus': 'virtio', 'dev': 'vde', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_custom_swap(self):
# A disk mapping with a swap device at position vdb. This
# should cause disk.local to be removed
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'swap': {'device_name': '/dev/vdb',
'swap_size': 10},
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.swap': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_blockdev_root(self):
# A disk mapping with a blockdev replacing the default root
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'block_device_mapping': [
{'connection_info': "fake",
'mount_device': "/dev/vda",
'boot_index': 0,
'device_type': 'disk',
'delete_on_termination': True},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'/dev/vda': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_blockdev_root_on_spawn(self):
# A disk mapping with a blockdev initializing the default root
instance_ref = objects.Instance(**self.test_instance)
image_meta = {}
block_device_info = {
'block_device_mapping': [
{'connection_info': None,
'mount_device': None,
'boot_index': 0,
'device_type': None,
'delete_on_termination': True},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'/dev/vda': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'disk.local': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_blockdev_eph(self):
# A disk mapping with a blockdev replacing the ephemeral device
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'block_device_mapping': [
{'connection_info': "fake",
'mount_device': "/dev/vdb",
'boot_index': -1,
'delete_on_termination': True},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'/dev/vdb': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_blockdev_many(self):
# A disk mapping with a blockdev replacing all devices
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'block_device_mapping': [
{'connection_info': "fake",
'mount_device': "/dev/vda",
'boot_index': 0,
'disk_bus': 'scsi',
'delete_on_termination': True},
{'connection_info': "fake",
'mount_device': "/dev/vdb",
'boot_index': -1,
'delete_on_termination': True},
{'connection_info': "fake",
'mount_device': "/dev/vdc",
'boot_index': -1,
'device_type': 'cdrom',
'delete_on_termination': True},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'/dev/vda': {'bus': 'scsi', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'/dev/vdb': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'},
'/dev/vdc': {'bus': 'virtio', 'dev': 'vdc', 'type': 'cdrom'},
'root': {'bus': 'scsi', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_complex(self):
# The strangest possible disk mapping setup
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'root_device_name': '/dev/vdf',
'swap': {'device_name': '/dev/vdy',
'swap_size': 10},
'ephemerals': [
{'device_type': 'disk', 'guest_format': 'ext4',
'device_name': '/dev/vdb', 'size': 10},
{'disk_bus': 'ide', 'guest_format': None,
'device_name': '/dev/vdc', 'size': 10},
],
'block_device_mapping': [
{'connection_info': "fake",
'mount_device': "/dev/vda",
'boot_index': 1,
'delete_on_termination': True},
]
}
mapping = blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
expect = {
'disk': {'bus': 'virtio', 'dev': 'vdf',
'type': 'disk', 'boot_index': '1'},
'/dev/vda': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '2'},
'disk.eph0': {'bus': 'virtio', 'dev': 'vdb',
'type': 'disk', 'format': 'ext4'},
'disk.eph1': {'bus': 'ide', 'dev': 'vdc', 'type': 'disk'},
'disk.swap': {'bus': 'virtio', 'dev': 'vdy', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vdf',
'type': 'disk', 'boot_index': '1'},
}
self.assertEqual(expect, mapping)
def test_get_disk_mapping_updates_original(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {
'root_device_name': '/dev/vda',
'swap': {'device_name': '/dev/vdb',
'device_type': 'really_lame_type',
'swap_size': 10},
'ephemerals': [{'disk_bus': 'no_such_bus',
'device_type': 'yeah_right',
'device_name': '/dev/vdc', 'size': 10}],
'block_device_mapping': [
{'connection_info': "fake",
'mount_device': None,
'device_type': 'lawnmower',
'delete_on_termination': True}]
}
expected_swap = {'device_name': '/dev/vdb', 'disk_bus': 'virtio',
'device_type': 'disk', 'swap_size': 10}
expected_ephemeral = {'disk_bus': 'virtio',
'device_type': 'disk',
'device_name': '/dev/vdc', 'size': 10}
expected_bdm = {'connection_info': "fake",
'mount_device': '/dev/vdd',
'device_type': 'disk',
'disk_bus': 'virtio',
'delete_on_termination': True}
with mock.patch.object(instance_ref, 'get_flavor') as get_flavor_mock:
blockinfo.get_disk_mapping("kvm", instance_ref,
"virtio", "ide",
image_meta,
block_device_info)
# we should have gotten the swap info from block_device_info rather
# than the flavor information on the instance
self.assertFalse(get_flavor_mock.called)
self.assertEqual(expected_swap, block_device_info['swap'])
self.assertEqual(expected_ephemeral,
block_device_info['ephemerals'][0])
self.assertEqual(expected_bdm,
block_device_info['block_device_mapping'][0])
def test_get_disk_bus(self):
instance = objects.Instance(**self.test_instance)
expected = (
(obj_fields.Architecture.X86_64, 'disk', 'virtio'),
(obj_fields.Architecture.X86_64, 'cdrom', 'ide'),
(obj_fields.Architecture.X86_64, 'floppy', 'fdc'),
(obj_fields.Architecture.PPC, 'disk', 'virtio'),
(obj_fields.Architecture.PPC, 'cdrom', 'scsi'),
(obj_fields.Architecture.PPC64, 'disk', 'virtio'),
(obj_fields.Architecture.PPC64, 'cdrom', 'scsi'),
(obj_fields.Architecture.PPCLE, 'disk', 'virtio'),
(obj_fields.Architecture.PPCLE, 'cdrom', 'scsi'),
(obj_fields.Architecture.PPC64LE, 'disk', 'virtio'),
(obj_fields.Architecture.PPC64LE, 'cdrom', 'scsi'),
(obj_fields.Architecture.S390, 'disk', 'virtio'),
(obj_fields.Architecture.S390, 'cdrom', 'scsi'),
(obj_fields.Architecture.S390X, 'disk', 'virtio'),
(obj_fields.Architecture.S390X, 'cdrom', 'scsi'),
(obj_fields.Architecture.AARCH64, 'disk', 'virtio'),
(obj_fields.Architecture.AARCH64, 'cdrom', 'scsi')
)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
for guestarch, dev, res in expected:
with mock.patch.object(blockinfo.libvirt_utils,
'get_arch',
return_value=guestarch):
bus = blockinfo.get_disk_bus_for_device_type(
instance, 'kvm', image_meta, dev)
self.assertEqual(res, bus)
expected = (
('kvm', 'scsi', None, 'disk', 'scsi'),
('kvm', None, 'scsi', 'cdrom', 'scsi'),
('kvm', 'usb', None, 'disk', 'usb'),
('parallels', 'scsi', None, 'disk', 'scsi'),
('parallels', None, None, 'disk', 'scsi'),
('parallels', None, 'ide', 'cdrom', 'ide'),
('parallels', None, None, 'cdrom', 'ide')
)
for hv, dbus, cbus, dev, res in expected:
props = {}
if dbus is not None:
props['hw_disk_bus'] = dbus
if cbus is not None:
props['hw_cdrom_bus'] = cbus
image_meta = objects.ImageMeta.from_dict(
{'properties': props})
bus = blockinfo.get_disk_bus_for_device_type(
instance, hv, image_meta, device_type=dev)
self.assertEqual(res, bus)
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_disk_bus': 'xen'}})
self.assertRaises(exception.UnsupportedHardware,
blockinfo.get_disk_bus_for_device_type,
instance, 'kvm', image_meta)
def test_get_disk_bus_with_osinfo(self):
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.osinfo.libosinfo',
fakelibosinfo))
instance = objects.Instance(**self.test_instance)
image_meta = {'properties': {'os_name': 'fedora22'}}
image_meta = objects.ImageMeta.from_dict(image_meta)
bus = blockinfo.get_disk_bus_for_device_type(instance,
'kvm', image_meta)
self.assertEqual('virtio', bus)
def test_success_get_disk_bus_for_disk_dev(self):
expected = (
('ide', ("kvm", "hda")),
('scsi', ("kvm", "sdf")),
('virtio', ("kvm", "vds")),
('fdc', ("kvm", "fdc")),
('uml', ("kvm", "ubd")),
('xen', ("xen", "sdf")),
('xen', ("xen", "xvdb"))
)
for res, args in expected:
self.assertEqual(res, blockinfo.get_disk_bus_for_disk_dev(*args))
def test_fail_get_disk_bus_for_disk_dev_unsupported_virt_type(self):
instance = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
self.assertRaises(exception.UnsupportedVirtType,
blockinfo.get_disk_bus_for_device_type,
instance, 'kvm1', image_meta)
def test_fail_get_disk_bus_for_disk_dev(self):
self.assertRaises(exception.NovaException,
blockinfo.get_disk_bus_for_disk_dev, 'inv', 'val')
@mock.patch('nova.virt.libvirt.utils.get_machine_type')
@mock.patch('nova.virt.libvirt.utils.get_arch')
def test_get_disk_bus_for_device_type_cdrom_with_q35_get_arch(self,
mock_get_arch, mock_get_machine_type):
instance = objects.Instance(**self.test_instance)
mock_get_machine_type.return_value = 'pc-q35-rhel8.0.0'
mock_get_arch.return_value = obj_fields.Architecture.X86_64
image_meta = {'properties': {}}
image_meta = objects.ImageMeta.from_dict(image_meta)
bus = blockinfo.get_disk_bus_for_device_type(instance, 'kvm',
image_meta,
device_type='cdrom')
self.assertEqual('sata', bus)
def test_get_disk_bus_for_device_type_cdrom_with_q35_image_meta(self):
instance = objects.Instance(**self.test_instance)
image_meta = {'properties': {'hw_machine_type': 'pc-q35-rhel8.0.0'}}
image_meta = objects.ImageMeta.from_dict(image_meta)
bus = blockinfo.get_disk_bus_for_device_type(instance, 'kvm',
image_meta,
device_type='cdrom')
self.assertEqual('sata', bus)
def test_get_config_drive_type_default(self):
config_drive_type = blockinfo.get_config_drive_type()
self.assertEqual('cdrom', config_drive_type)
def test_get_config_drive_type_cdrom(self):
self.flags(config_drive_format='iso9660')
config_drive_type = blockinfo.get_config_drive_type()
self.assertEqual('cdrom', config_drive_type)
def test_get_config_drive_type_disk(self):
self.flags(config_drive_format='vfat')
config_drive_type = blockinfo.get_config_drive_type()
self.assertEqual('disk', config_drive_type)
def test_get_info_from_bdm(self):
instance = objects.Instance(**self.test_instance)
bdms = [{'device_name': '/dev/vds', 'device_type': 'disk',
'disk_bus': 'usb', 'swap_size': 4},
{'device_type': 'disk', 'guest_format': 'ext4',
'device_name': '/dev/vdb', 'size': 2},
{'disk_bus': 'ide', 'guest_format': None,
'device_name': '/dev/vdc', 'size': 3},
{'connection_info': "fake",
'mount_device': "/dev/sdr",
'disk_bus': 'lame_bus',
'device_type': 'cdrom',
'boot_index': 0,
'delete_on_termination': True},
{'connection_info': "fake",
'mount_device': "/dev/vdo",
'disk_bus': 'scsi',
'boot_index': 1,
'device_type': 'lame_type',
'delete_on_termination': True},
{'disk_bus': 'sata', 'guest_format': None,
'device_name': '/dev/sda', 'size': 3}]
expected = [{'dev': 'vds', 'type': 'disk', 'bus': 'usb'},
{'dev': 'vdb', 'type': 'disk',
'bus': 'virtio', 'format': 'ext4'},
{'dev': 'vdc', 'type': 'disk', 'bus': 'ide'},
{'dev': 'sdr', 'type': 'cdrom',
'bus': 'scsi', 'boot_index': '1'},
{'dev': 'vdo', 'type': 'disk',
'bus': 'scsi', 'boot_index': '2'},
{'dev': 'sda', 'type': 'disk', 'bus': 'sata'}]
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
for bdm, expected in zip(bdms, expected):
self.assertEqual(expected,
blockinfo.get_info_from_bdm(instance,
'kvm',
image_meta,
bdm))
# Test that passed bus and type are considered
bdm = {'device_name': '/dev/vda'}
expected = {'dev': 'vda', 'type': 'disk', 'bus': 'ide'}
self.assertEqual(
expected, blockinfo.get_info_from_bdm(instance,
'kvm',
image_meta,
bdm,
disk_bus='ide',
dev_type='disk'))
# Test that lame bus values are defaulted properly
bdm = {'disk_bus': 'lame_bus', 'device_type': 'cdrom'}
with mock.patch.object(blockinfo,
'get_disk_bus_for_device_type',
return_value='ide') as get_bus:
blockinfo.get_info_from_bdm(instance,
'kvm',
image_meta,
bdm)
get_bus.assert_called_once_with(instance, 'kvm',
image_meta, 'cdrom')
# Test that missing device is defaulted as expected
bdm = {'disk_bus': 'ide', 'device_type': 'cdrom'}
expected = {'dev': 'vdd', 'type': 'cdrom', 'bus': 'ide'}
mapping = {'root': {'dev': 'vda'}}
with mock.patch.object(blockinfo,
'find_disk_dev_for_disk_bus',
return_value='vdd') as find_dev:
got = blockinfo.get_info_from_bdm(
instance,
'kvm',
image_meta,
bdm,
mapping,
assigned_devices=['vdb', 'vdc'])
find_dev.assert_called_once_with(
{'root': {'dev': 'vda'},
'vdb': {'dev': 'vdb'},
'vdc': {'dev': 'vdc'}}, 'ide')
self.assertEqual(expected, got)
def test_get_device_name(self):
bdm_obj = objects.BlockDeviceMapping(self.context,
**fake_block_device.FakeDbBlockDeviceDict(
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}))
self.assertEqual('/dev/vda', blockinfo.get_device_name(bdm_obj))
driver_bdm = driver_block_device.DriverVolumeBlockDevice(bdm_obj)
self.assertEqual('/dev/vda', blockinfo.get_device_name(driver_bdm))
bdm_obj.device_name = None
self.assertIsNone(blockinfo.get_device_name(bdm_obj))
driver_bdm = driver_block_device.DriverVolumeBlockDevice(bdm_obj)
self.assertIsNone(blockinfo.get_device_name(driver_bdm))
@mock.patch('nova.virt.libvirt.blockinfo.find_disk_dev_for_disk_bus',
return_value='vda')
def test_get_root_info_no_bdm(self, mock_find_dev):
instance = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
info = blockinfo.get_root_info(instance, 'kvm', image_meta, None,
'virtio', 'ide')
mock_find_dev.assert_called_once_with({}, 'virtio')
self.assertEqual('virtio', info['bus'])
@mock.patch('nova.virt.libvirt.blockinfo.find_disk_dev_for_disk_bus',
return_value='vda')
def test_get_root_info_no_bdm_empty_image_meta(self, mock_find_dev):
# The evacuate operation passes image_ref=None to the compute node for
# rebuild which then defaults image_meta to {}, so we don't have any
# attributes in the ImageMeta object passed to get_root_info and we
# need to make sure we don't try lazy-loading anything.
instance = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict({})
info = blockinfo.get_root_info(instance, 'kvm', image_meta, None,
'virtio', 'ide')
mock_find_dev.assert_called_once_with({}, 'virtio')
self.assertEqual('virtio', info['bus'])
@mock.patch('nova.virt.libvirt.blockinfo.get_info_from_bdm')
def test_get_root_info_bdm(self, mock_get_info):
instance = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
root_bdm = {'mount_device': '/dev/vda',
'disk_bus': 'scsi',
'device_type': 'disk'}
# No root_device_name
blockinfo.get_root_info(instance, 'kvm', image_meta, root_bdm,
'virtio', 'ide')
mock_get_info.assert_called_once_with(instance, 'kvm', image_meta,
root_bdm, {}, 'virtio')
mock_get_info.reset_mock()
# Both device names
blockinfo.get_root_info(instance, 'kvm', image_meta, root_bdm,
'virtio', 'ide', root_device_name='sda')
mock_get_info.assert_called_once_with(instance, 'kvm', image_meta,
root_bdm, {}, 'virtio')
mock_get_info.reset_mock()
# Missing device names
del root_bdm['mount_device']
blockinfo.get_root_info(instance, 'kvm', image_meta, root_bdm,
'virtio', 'ide', root_device_name='sda')
mock_get_info.assert_called_once_with(instance, 'kvm',
image_meta,
{'device_name': 'sda',
'disk_bus': 'scsi',
'device_type': 'disk'},
{}, 'virtio')
mock_get_info.reset_mock()
# xen with incompatible root_device_name/disk_bus combination
root_bdm['disk_bus'] = 'xen'
blockinfo.get_root_info(instance, 'xen', image_meta, root_bdm,
'xen', 'ide', root_device_name='sda')
mock_get_info.assert_called_once_with(instance, 'xen', image_meta,
{'device_name': 'xvda',
'disk_bus': 'xen',
'device_type': 'disk'},
{}, 'xen')
def test_get_boot_order_simple(self):
disk_info = {
'disk_bus': 'virtio',
'cdrom_bus': 'ide',
'mapping': {
'disk': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'root': {'bus': 'virtio', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
}
expected_order = ['hd']
self.assertEqual(expected_order, blockinfo.get_boot_order(disk_info))
def test_get_boot_order_complex(self):
disk_info = {
'disk_bus': 'virtio',
'cdrom_bus': 'ide',
'mapping': {
'disk': {'bus': 'virtio', 'dev': 'vdf',
'type': 'disk', 'boot_index': '1'},
'/dev/hda': {'bus': 'ide', 'dev': 'hda',
'type': 'cdrom', 'boot_index': '3'},
'/dev/fda': {'bus': 'fdc', 'dev': 'fda',
'type': 'floppy', 'boot_index': '2'},
'disk.eph0': {'bus': 'virtio', 'dev': 'vdb',
'type': 'disk', 'format': 'ext4'},
'disk.eph1': {'bus': 'ide', 'dev': 'vdc', 'type': 'disk'},
'disk.swap': {'bus': 'virtio', 'dev': 'vdy', 'type': 'disk'},
'root': {'bus': 'virtio', 'dev': 'vdf',
'type': 'disk', 'boot_index': '1'},
}
}
expected_order = ['hd', 'fd', 'cdrom']
self.assertEqual(expected_order, blockinfo.get_boot_order(disk_info))
def test_get_boot_order_overlapping(self):
disk_info = {
'disk_bus': 'virtio',
'cdrom_bus': 'ide',
'mapping': {
'/dev/vda': {'bus': 'scsi', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
'/dev/vdb': {'bus': 'virtio', 'dev': 'vdb',
'type': 'disk', 'boot_index': '2'},
'/dev/vdc': {'bus': 'virtio', 'dev': 'vdc',
'type': 'cdrom', 'boot_index': '3'},
'root': {'bus': 'scsi', 'dev': 'vda',
'type': 'disk', 'boot_index': '1'},
}
}
expected_order = ['hd', 'cdrom']
self.assertEqual(expected_order, blockinfo.get_boot_order(disk_info))
class DefaultDeviceNamesTestCase(test.NoDBTestCase):
def setUp(self):
super(DefaultDeviceNamesTestCase, self).setUp()
self.context = context.get_admin_context()
self.instance = objects.Instance(
uuid='32dfcb37-5af1-552b-357c-be8c3aa38310',
memory_kb='1024000',
basepath='/some/path',
bridge_name='br100',
vcpus=2,
project_id='fake',
bridge='br101',
image_ref='155d900f-4e14-4e4c-a73d-069cbf4541e6',
root_gb=10,
ephemeral_gb=20,
instance_type_id=2,
config_drive=False,
root_device_name = '/dev/vda',
system_metadata={})
self.image_meta = objects.ImageMeta(
disk_format='raw',
properties=objects.ImageMetaProps())
self.virt_type = 'kvm'
self.flavor = objects.Flavor(swap=4)
self.patchers = []
self.patchers.append(mock.patch.object(self.instance, 'get_flavor',
return_value=self.flavor))
self.patchers.append(mock.patch(
'nova.objects.block_device.BlockDeviceMapping.save'))
for patcher in self.patchers:
patcher.start()
self.ephemerals = [objects.BlockDeviceMapping(
self.context, **fake_block_device.FakeDbBlockDeviceDict(
{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/vdb',
'source_type': 'blank',
'destination_type': 'local',
'device_type': 'disk',
'disk_bus': 'virtio',
'delete_on_termination': True,
'guest_format': None,
'volume_size': 1,
'boot_index': -1}))]
self.swap = [objects.BlockDeviceMapping(
self.context, **fake_block_device.FakeDbBlockDeviceDict(
{'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/vdc',
'source_type': 'blank',
'destination_type': 'local',
'device_type': 'disk',
'disk_bus': 'virtio',
'delete_on_termination': True,
'guest_format': 'swap',
'volume_size': 1,
'boot_index': -1}))]
self.block_device_mapping = [
objects.BlockDeviceMapping(self.context,
**fake_block_device.FakeDbBlockDeviceDict(
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/vda',
'source_type': 'volume',
'destination_type': 'volume',
'device_type': 'disk',
'disk_bus': 'virtio',
'volume_id': 'fake-volume-id-1',
'boot_index': 0})),
objects.BlockDeviceMapping(self.context,
**fake_block_device.FakeDbBlockDeviceDict(
{'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/vdd',
'source_type': 'snapshot',
'device_type': 'disk',
'disk_bus': 'virtio',
'destination_type': 'volume',
'snapshot_id': 'fake-snapshot-id-1',
'boot_index': -1})),
objects.BlockDeviceMapping(self.context,
**fake_block_device.FakeDbBlockDeviceDict(
{'id': 5, 'instance_uuid': uuids.instance,
'device_name': '/dev/vde',
'source_type': 'blank',
'device_type': 'disk',
'disk_bus': 'virtio',
'destination_type': 'volume',
'boot_index': -1}))]
def tearDown(self):
super(DefaultDeviceNamesTestCase, self).tearDown()
for patcher in self.patchers:
patcher.stop()
def _test_default_device_names(self, eph, swap, bdm):
bdms = eph + swap + bdm
bdi = driver.get_block_device_info(self.instance, bdms)
blockinfo.default_device_names(self.virt_type,
self.context,
self.instance,
bdi,
self.image_meta)
def test_only_block_device_mapping(self):
# Test no-op
original_bdm = copy.deepcopy(self.block_device_mapping)
self._test_default_device_names([], [], self.block_device_mapping)
for original, defaulted in zip(
original_bdm, self.block_device_mapping):
self.assertEqual(original.device_name, defaulted.device_name)
# Assert it defaults the missing one as expected
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names([], [], self.block_device_mapping)
self.assertEqual('/dev/vdd',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vde',
self.block_device_mapping[2]['device_name'])
def test_with_ephemerals(self):
# Test ephemeral gets assigned
self.ephemerals[0]['device_name'] = None
self._test_default_device_names(self.ephemerals, [],
self.block_device_mapping)
self.assertEqual('/dev/vdb', self.ephemerals[0]['device_name'])
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names(self.ephemerals, [],
self.block_device_mapping)
self.assertEqual('/dev/vdd',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vde',
self.block_device_mapping[2]['device_name'])
def test_with_swap(self):
# Test swap only
self.swap[0]['device_name'] = None
self._test_default_device_names([], self.swap, [])
self.assertEqual('/dev/vdc', self.swap[0]['device_name'])
# Test swap and block_device_mapping
self.swap[0]['device_name'] = None
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names([], self.swap,
self.block_device_mapping)
self.assertEqual('/dev/vdc', self.swap[0]['device_name'])
self.assertEqual('/dev/vdd',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vde',
self.block_device_mapping[2]['device_name'])
def test_all_together(self):
# Test swap missing
self.swap[0]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual('/dev/vdc', self.swap[0]['device_name'])
# Test swap and eph missing
self.swap[0]['device_name'] = None
self.ephemerals[0]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual('/dev/vdb', self.ephemerals[0]['device_name'])
self.assertEqual('/dev/vdc', self.swap[0]['device_name'])
# Test all missing
self.swap[0]['device_name'] = None
self.ephemerals[0]['device_name'] = None
self.block_device_mapping[1]['device_name'] = None
self.block_device_mapping[2]['device_name'] = None
self._test_default_device_names(self.ephemerals,
self.swap, self.block_device_mapping)
self.assertEqual('/dev/vdb', self.ephemerals[0]['device_name'])
self.assertEqual('/dev/vdc', self.swap[0]['device_name'])
self.assertEqual('/dev/vdd',
self.block_device_mapping[1]['device_name'])
self.assertEqual('/dev/vde',
self.block_device_mapping[2]['device_name'])
| {
"content_hash": "a8242ebf8b4b9e0e64a9d0342dc4ca87",
"timestamp": "",
"source": "github",
"line_count": 1246,
"max_line_length": 79,
"avg_line_length": 44.951845906902086,
"alnum_prop": 0.4719157293340475,
"repo_name": "rahulunair/nova",
"id": "ca31258fc2fe5c948ed5e59ce76123b41887a150",
"size": "56666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/unit/virt/libvirt/test_blockinfo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "22804450"
},
{
"name": "Shell",
"bytes": "41649"
},
{
"name": "Smarty",
"bytes": "472764"
}
],
"symlink_target": ""
} |
"""Read and write FLAC Vorbis comments and stream information.
Read more about FLAC at http://flac.sourceforge.net.
FLAC supports arbitrary metadata blocks. The two most interesting ones
are the FLAC stream information block, and the Vorbis comment block;
these are also the only ones Mutagen can currently read.
This module does not handle Ogg FLAC files.
Based off documentation available at
http://flac.sourceforge.net/format.html
"""
__all__ = ["FLAC", "Open", "delete"]
import struct
from cStringIO import StringIO
from _vorbis import VCommentDict
from mutagen import FileType
from mutagen._util import insert_bytes
from mutagen.id3 import BitPaddedInt
class error(IOError): pass
class FLACNoHeaderError(error): pass
class FLACVorbisError(ValueError, error): pass
def to_int_be(string):
"""Convert an arbitrarily-long string to a long using big-endian
byte order."""
return reduce(lambda a, b: (a << 8) + ord(b), string, 0L)
class MetadataBlock(object):
"""A generic block of FLAC metadata.
This class is extended by specific used as an ancestor for more specific
blocks, and also as a container for data blobs of unknown blocks.
Attributes:
data -- raw binary data for this block
"""
def __init__(self, data):
"""Parse the given data string or file-like as a metadata block.
The metadata header should not be included."""
if data is not None:
if isinstance(data, str): data = StringIO(data)
elif not hasattr(data, 'read'):
raise TypeError(
"StreamInfo requires string data or a file-like")
self.load(data)
def load(self, data): self.data = data.read()
def write(self): return self.data
def writeblocks(blocks):
"""Render metadata block as a byte string."""
data = []
codes = [[block.code, block.write()] for block in blocks]
codes[-1][0] |= 128
for code, datum in codes:
byte = chr(code)
if len(datum) > 2**24:
raise error("block is too long to write")
length = struct.pack(">I", len(datum))[-3:]
data.append(byte + length + datum)
return "".join(data)
writeblocks = staticmethod(writeblocks)
def group_padding(blocks):
"""Consolidate FLAC padding metadata blocks.
The overall size of the rendered blocks does not change, so
this adds several bytes of padding for each merged block."""
paddings = filter(lambda x: isinstance(x, Padding), blocks)
map(blocks.remove, paddings)
padding = Padding()
# total padding size is the sum of padding sizes plus 4 bytes
# per removed header.
size = sum([padding.length for padding in paddings])
padding.length = size + 4 * (len(paddings) - 1)
blocks.append(padding)
group_padding = staticmethod(group_padding)
class StreamInfo(MetadataBlock):
"""FLAC stream information.
This contains information about the audio data in the FLAC file.
Unlike most stream information objects in Mutagen, changes to this
one will rewritten to the file when it is saved. Unless you are
actually changing the audio stream itself, don't change any
attributes of this block.
Attributes:
min_blocksize -- minimum audio block size
max_blocksize -- maximum audio block size
sample_rate -- audio sample rate in Hz
channels -- audio channels (1 for mono, 2 for stereo)
bits_per_sample -- bits per sample
total_samples -- total samples in file
length -- audio length in seconds
"""
code = 0
def __eq__(self, other):
try: return (self.min_blocksize == other.min_blocksize and
self.max_blocksize == other.max_blocksize and
self.sample_rate == other.sample_rate and
self.channels == other.channels and
self.bits_per_sample == other.bits_per_sample and
self.total_samples == other.total_samples)
except: return False
def load(self, data):
self.min_blocksize = int(to_int_be(data.read(2)))
self.max_blocksize = int(to_int_be(data.read(2)))
self.min_framesize = int(to_int_be(data.read(3)))
self.max_framesize = int(to_int_be(data.read(3)))
# first 16 bits of sample rate
sample_first = to_int_be(data.read(2))
# last 4 bits of sample rate, 3 of channels, first 1 of bits/sample
sample_channels_bps = to_int_be(data.read(1))
# last 4 of bits/sample, 36 of total samples
bps_total = to_int_be(data.read(5))
sample_tail = sample_channels_bps >> 4
self.sample_rate = int((sample_first << 4) + sample_tail)
self.channels = int(((sample_channels_bps >> 1) & 7) + 1)
bps_tail = bps_total >> 36
bps_head = (sample_channels_bps & 1) << 4
self.bits_per_sample = int(bps_head + bps_tail + 1)
self.total_samples = bps_total & 0xFFFFFFFFFL
self.length = self.total_samples / float(self.sample_rate)
self.md5_signature = to_int_be(data.read(16))
def write(self):
f = StringIO()
f.write(struct.pack(">I", self.min_blocksize)[-2:])
f.write(struct.pack(">I", self.max_blocksize)[-2:])
f.write(struct.pack(">I", self.min_framesize)[-3:])
f.write(struct.pack(">I", self.max_framesize)[-3:])
# first 16 bits of sample rate
f.write(struct.pack(">I", self.sample_rate >> 4)[-2:])
# 4 bits sample, 3 channel, 1 bps
byte = (self.sample_rate & 0xF) << 4
byte += ((self.channels - 1) & 3) << 1
byte += ((self.bits_per_sample - 1) >> 4) & 1
f.write(chr(byte))
# 4 bits of bps, 4 of sample count
byte = ((self.bits_per_sample - 1) & 0xF) << 4
byte += (self.total_samples >> 32) & 0xF
f.write(chr(byte))
# last 32 of sample count
f.write(struct.pack(">I", self.total_samples & 0xFFFFFFFFL))
# MD5 signature
sig = self.md5_signature
f.write(struct.pack(
">4I", (sig >> 96) & 0xFFFFFFFFL, (sig >> 64) & 0xFFFFFFFFL,
(sig >> 32) & 0xFFFFFFFFL, sig & 0xFFFFFFFFL))
return f.getvalue()
def pprint(self):
return "FLAC, %.2f seconds, %d Hz" % (self.length, self.sample_rate)
class SeekPoint(tuple):
"""A single seek point in a FLAC file.
Placeholder seek points have first_sample of 0xFFFFFFFFFFFFFFFFL,
and byte_offset and num_samples undefined. Seek points must be
sorted in ascending order by first_sample number. Seek points must
be unique by first_sample number, except for placeholder
points. Placeholder points must occur last in the table and there
may be any number of them.
Attributes:
first_sample -- sample number of first sample in the target frame
byte_offset -- offset from first frame to target frame
num_samples -- number of samples in target frame
"""
def __new__(cls, first_sample, byte_offset, num_samples):
return super(cls, SeekPoint).__new__(cls, (first_sample,
byte_offset, num_samples))
first_sample = property(lambda self: self[0])
byte_offset = property(lambda self: self[1])
num_samples = property(lambda self: self[2])
class SeekTable(MetadataBlock):
"""Read and write FLAC seek tables.
Attributes:
seekpoints -- list of SeekPoint objects
"""
__SEEKPOINT_FORMAT = '>QQH'
__SEEKPOINT_SIZE = struct.calcsize(__SEEKPOINT_FORMAT)
code = 3
def __init__(self, data):
self.seekpoints = []
super(SeekTable, self).__init__(data)
def __eq__(self, other):
try: return (self.seekpoints == other.seekpoints)
except (AttributeError, TypeError): return False
def load(self, data):
self.seekpoints = []
sp = data.read(self.__SEEKPOINT_SIZE)
while len(sp) == self.__SEEKPOINT_SIZE:
self.seekpoints.append(SeekPoint(
*struct.unpack(self.__SEEKPOINT_FORMAT, sp)))
sp = data.read(self.__SEEKPOINT_SIZE)
def write(self):
f = StringIO()
for seekpoint in self.seekpoints:
packed = struct.pack(self.__SEEKPOINT_FORMAT,
seekpoint.first_sample, seekpoint.byte_offset,
seekpoint.num_samples)
f.write(packed)
return f.getvalue()
def __repr__(self):
return "<%s seekpoints=%r>" % (type(self).__name__, self.seekpoints)
class VCFLACDict(VCommentDict):
"""Read and write FLAC Vorbis comments.
FLACs don't use the framing bit at the end of the comment block.
So this extends VCommentDict to not use the framing bit.
"""
code = 4
def load(self, data, errors='replace', framing=False):
super(VCFLACDict, self).load(data, errors=errors, framing=framing)
def write(self, framing=False):
return super(VCFLACDict, self).write(framing=framing)
class CueSheetTrackIndex(tuple):
"""Index for a track in a cuesheet.
For CD-DA, an index_number of 0 corresponds to the track
pre-gap. The first index in a track must have a number of 0 or 1,
and subsequently, index_numbers must increase by 1. Index_numbers
must be unique within a track. And index_offset must be evenly
divisible by 588 samples.
Attributes:
index_number -- index point number
index_offset -- offset in samples from track start
"""
def __new__(cls, index_number, index_offset):
return super(cls, CueSheetTrackIndex).__new__(cls,
(index_number, index_offset))
index_number = property(lambda self: self[0])
index_offset = property(lambda self: self[1])
class CueSheetTrack(object):
"""A track in a cuesheet.
For CD-DA, track_numbers must be 1-99, or 170 for the
lead-out. Track_numbers must be unique within a cue sheet. There
must be atleast one index in every track except the lead-out track
which must have none.
Attributes:
track_number -- track number
start_offset -- track offset in samples from start of FLAC stream
isrc -- ISRC code
type -- 0 for audio, 1 for digital data
pre_emphasis -- true if the track is recorded with pre-emphasis
indexes -- list of CueSheetTrackIndex objects
"""
def __init__(self, track_number, start_offset, isrc='', type_=0,
pre_emphasis=False):
self.track_number = track_number
self.start_offset = start_offset
self.isrc = isrc
self.type = type_
self.pre_emphasis = pre_emphasis
self.indexes = []
def __eq__(self, other):
try: return (self.track_number == other.track_number and
self.start_offset == other.start_offset and
self.isrc == other.isrc and
self.type == other.type and
self.pre_emphasis == other.pre_emphasis and
self.indexes == other.indexes)
except (AttributeError, TypeError): return False
def __repr__(self):
return ("<%s number=%r, offset=%d, isrc=%r, type=%r, "
"pre_emphasis=%r, indexes=%r)>") % (
type(self).__name__, self.track_number, self.start_offset,
self.isrc, self.type, self.pre_emphasis, self.indexes)
class CueSheet(MetadataBlock):
"""Read and write FLAC embedded cue sheets.
Number of tracks should be from 1 to 100. There should always be
exactly one lead-out track and that track must be the last track
in the cue sheet.
Attributes:
media_catalog_number -- media catalog number in ASCII
lead_in_samples -- number of lead-in samples
compact_disc -- true if the cuesheet corresponds to a compact disc
tracks -- list of CueSheetTrack objects
lead_out -- lead-out as CueSheetTrack or None if lead-out was not found
"""
__CUESHEET_FORMAT = '>128sQB258xB'
__CUESHEET_SIZE = struct.calcsize(__CUESHEET_FORMAT)
__CUESHEET_TRACK_FORMAT = '>QB12sB13xB'
__CUESHEET_TRACK_SIZE = struct.calcsize(__CUESHEET_TRACK_FORMAT)
__CUESHEET_TRACKINDEX_FORMAT = '>QB3x'
__CUESHEET_TRACKINDEX_SIZE = struct.calcsize(__CUESHEET_TRACKINDEX_FORMAT)
code = 5
media_catalog_number = ''
lead_in_samples = 88200
compact_disc = True
def __init__(self, data):
self.tracks = []
super(CueSheet, self).__init__(data)
def __eq__(self, other):
try:
return (self.media_catalog_number == other.media_catalog_number and
self.lead_in_samples == other.lead_in_samples and
self.compact_disc == other.compact_disc and
self.tracks == other.tracks)
except (AttributeError, TypeError): return False
def load(self, data):
header = data.read(self.__CUESHEET_SIZE)
media_catalog_number, lead_in_samples, flags, num_tracks = \
struct.unpack(self.__CUESHEET_FORMAT, header)
self.media_catalog_number = media_catalog_number.rstrip('\0')
self.lead_in_samples = lead_in_samples
self.compact_disc = bool(flags & 0x80)
self.tracks = []
for i in range(num_tracks):
track = data.read(self.__CUESHEET_TRACK_SIZE)
start_offset, track_number, isrc_padded, flags, num_indexes = \
struct.unpack(self.__CUESHEET_TRACK_FORMAT, track)
isrc = isrc_padded.rstrip('\0')
type_ = (flags & 0x80) >> 7
pre_emphasis = bool(flags & 0x40)
val = CueSheetTrack(
track_number, start_offset, isrc, type_, pre_emphasis)
for j in range(num_indexes):
index = data.read(self.__CUESHEET_TRACKINDEX_SIZE)
index_offset, index_number = struct.unpack(
self.__CUESHEET_TRACKINDEX_FORMAT, index)
val.indexes.append(
CueSheetTrackIndex(index_number, index_offset))
self.tracks.append(val)
def write(self):
f = StringIO()
flags = 0
if self.compact_disc: flags |= 0x80
packed = struct.pack(
self.__CUESHEET_FORMAT, self.media_catalog_number,
self.lead_in_samples, flags, len(self.tracks))
f.write(packed)
for track in self.tracks:
track_flags = 0
track_flags |= (track.type & 1) << 7
if track.pre_emphasis: track_flags |= 0x40
track_packed = struct.pack(
self.__CUESHEET_TRACK_FORMAT, track.start_offset,
track.track_number, track.isrc, track_flags,
len(track.indexes))
f.write(track_packed)
for index in track.indexes:
index_packed = struct.pack(
self.__CUESHEET_TRACKINDEX_FORMAT,
index.index_offset, index.index_number)
f.write(index_packed)
return f.getvalue()
def __repr__(self):
return ("<%s media_catalog_number=%r, lead_in=%r, compact_disc=%r, "
"tracks=%r>") % (
type(self).__name__, self.media_catalog_number,
self.lead_in_samples, self.compact_disc, self.tracks)
class Picture(MetadataBlock):
"""Read and write FLAC embed pictures.
Attributes:
type -- picture type (same as types for ID3 APIC frames)
mime -- MIME type of the picture
desc -- picture's description
width -- width in pixels
height -- height in pixels
depth -- color depth in bits-per-pixel
colors -- number of colors for indexed palettes (like GIF),
0 for non-indexed
data -- picture data
"""
code = 6
def __init__(self, data=None):
self.type = 0
self.mime = u''
self.desc = u''
self.width = 0
self.height = 0
self.depth = 0
self.colors = 0
self.data = ''
super(Picture, self).__init__(data)
def __eq__(self, other):
try: return (self.type == other.type and
self.mime == other.mime and
self.desc == other.desc and
self.width == other.width and
self.height == other.height and
self.depth == other.depth and
self.colors == other.colors and
self.data == other.data)
except (AttributeError, TypeError): return False
def load(self, data):
self.type, length = struct.unpack('>2I', data.read(8))
self.mime = data.read(length).decode('UTF-8', 'replace')
length, = struct.unpack('>I', data.read(4))
self.desc = data.read(length).decode('UTF-8', 'replace')
(self.width, self.height, self.depth,
self.colors, length) = struct.unpack('>5I', data.read(20))
self.data = data.read(length)
def write(self):
f = StringIO()
mime = self.mime.encode('UTF-8')
f.write(struct.pack('>2I', self.type, len(mime)))
f.write(mime)
desc = self.desc.encode('UTF-8')
f.write(struct.pack('>I', len(desc)))
f.write(desc)
f.write(struct.pack('>5I', self.width, self.height, self.depth,
self.colors, len(self.data)))
f.write(self.data)
return f.getvalue()
def __repr__(self):
return "<%s '%s' (%d bytes)>" % (type(self).__name__, self.mime,
len(self.data))
class Padding(MetadataBlock):
"""Empty padding space for metadata blocks.
To avoid rewriting the entire FLAC file when editing comments,
metadata is often padded. Padding should occur at the end, and no
more than one padding block should be in any FLAC file. Mutagen
handles this with MetadataBlock.group_padding.
"""
code = 1
def __init__(self, data=""): super(Padding, self).__init__(data)
def load(self, data): self.length = len(data.read())
def write(self):
try: return "\x00" * self.length
# On some 64 bit platforms this won't generate a MemoryError
# or OverflowError since you might have enough RAM, but it
# still generates a ValueError. On other 64 bit platforms,
# this will still succeed for extremely large values.
# Those should never happen in the real world, and if they
# do, writeblocks will catch it.
except (OverflowError, ValueError, MemoryError):
raise error("cannot write %d bytes" % self.length)
def __eq__(self, other):
return isinstance(other, Padding) and self.length == other.length
def __repr__(self):
return "<%s (%d bytes)>" % (type(self).__name__, self.length)
class FLAC(FileType):
"""A FLAC audio file.
Attributes:
info -- stream information (length, bitrate, sample rate)
tags -- metadata tags, if any
cuesheet -- CueSheet object, if any
seektable -- SeekTable object, if any
pictures -- list of embedded pictures
"""
_mimes = ["audio/x-flac", "application/x-flac"]
METADATA_BLOCKS = [StreamInfo, Padding, None, SeekTable, VCFLACDict,
CueSheet, Picture]
"""Known metadata block types, indexed by ID."""
def score(filename, fileobj, header):
return header.startswith("fLaC")
score = staticmethod(score)
def __read_metadata_block(self, file):
byte = ord(file.read(1))
size = to_int_be(file.read(3))
try:
data = file.read(size)
if len(data) != size:
raise error(
"file said %d bytes, read %d bytes" % (size, len(data)))
block = self.METADATA_BLOCKS[byte & 0x7F](data)
except (IndexError, TypeError):
block = MetadataBlock(data)
block.code = byte & 0x7F
self.metadata_blocks.append(block)
else:
self.metadata_blocks.append(block)
if block.code == VCFLACDict.code:
if self.tags is None: self.tags = block
else: raise FLACVorbisError("> 1 Vorbis comment block found")
elif block.code == CueSheet.code:
if self.cuesheet is None: self.cuesheet = block
else: raise error("> 1 CueSheet block found")
elif block.code == SeekTable.code:
if self.seektable is None: self.seektable = block
else: raise error("> 1 SeekTable block found")
return (byte >> 7) ^ 1
def add_tags(self):
"""Add a Vorbis comment block to the file."""
if self.tags is None:
self.tags = VCFLACDict()
self.metadata_blocks.append(self.tags)
else: raise FLACVorbisError("a Vorbis comment already exists")
add_vorbiscomment = add_tags
def delete(self, filename=None):
"""Remove Vorbis comments from a file.
If no filename is given, the one most recently loaded is used.
"""
if filename is None: filename = self.filename
for s in list(self.metadata_blocks):
if isinstance(s, VCFLACDict):
self.metadata_blocks.remove(s)
self.tags = None
self.save()
break
vc = property(lambda s: s.tags, doc="Alias for tags; don't use this.")
def load(self, filename):
"""Load file information from a filename."""
self.metadata_blocks = []
self.tags = None
self.cuesheet = None
self.seektable = None
self.filename = filename
fileobj = file(filename, "rb")
try:
self.__check_header(fileobj)
while self.__read_metadata_block(fileobj): pass
finally:
fileobj.close()
try: self.metadata_blocks[0].length
except (AttributeError, IndexError):
raise FLACNoHeaderError("Stream info block not found")
info = property(lambda s: s.metadata_blocks[0])
def add_picture(self, picture):
"""Add a new picture to the file."""
self.metadata_blocks.append(picture)
def clear_pictures(self):
"""Delete all pictures from the file."""
self.metadata_blocks = filter(lambda b: b.code != Picture.code,
self.metadata_blocks)
def __get_pictures(self):
return filter(lambda b: b.code == Picture.code, self.metadata_blocks)
pictures = property(__get_pictures, doc="List of embedded pictures")
def save(self, filename=None, deleteid3=False):
"""Save metadata blocks to a file.
If no filename is given, the one most recently loaded is used.
"""
if filename is None: filename = self.filename
f = open(filename, 'rb+')
# Ensure we've got padding at the end, and only at the end.
# If adding makes it too large, we'll scale it down later.
self.metadata_blocks.append(Padding('\x00' * 1020))
MetadataBlock.group_padding(self.metadata_blocks)
header = self.__check_header(f)
available = self.__find_audio_offset(f) - header # "fLaC" and maybe ID3
data = MetadataBlock.writeblocks(self.metadata_blocks)
# Delete ID3v2
if deleteid3 and header > 4:
available += header - 4
header = 4
if len(data) > available:
# If we have too much data, see if we can reduce padding.
padding = self.metadata_blocks[-1]
newlength = padding.length - (len(data) - available)
if newlength > 0:
padding.length = newlength
data = MetadataBlock.writeblocks(self.metadata_blocks)
assert len(data) == available
elif len(data) < available:
# If we have too little data, increase padding.
self.metadata_blocks[-1].length += (available - len(data))
data = MetadataBlock.writeblocks(self.metadata_blocks)
assert len(data) == available
if len(data) != available:
# We couldn't reduce the padding enough.
diff = (len(data) - available)
insert_bytes(f, diff, header)
f.seek(header - 4)
f.write("fLaC" + data)
# Delete ID3v1
if deleteid3:
try: f.seek(-128, 2)
except IOError: pass
else:
if f.read(3) == "TAG":
f.seek(-128, 2)
f.truncate()
def __find_audio_offset(self, fileobj):
byte = 0x00
while not (byte >> 7) & 1:
byte = ord(fileobj.read(1))
size = to_int_be(fileobj.read(3))
fileobj.read(size)
return fileobj.tell()
def __check_header(self, fileobj):
size = 4
header = fileobj.read(4)
if header != "fLaC":
size = None
if header[:3] == "ID3":
size = 14 + BitPaddedInt(fileobj.read(6)[2:])
fileobj.seek(size - 4)
if fileobj.read(4) != "fLaC": size = None
if size is None:
raise FLACNoHeaderError(
"%r is not a valid FLAC file" % fileobj.name)
return size
Open = FLAC
def delete(filename):
"""Remove tags from a file."""
FLAC(filename).delete()
| {
"content_hash": "ca4146ac4faade71061077b455a22da9",
"timestamp": "",
"source": "github",
"line_count": 680,
"max_line_length": 79,
"avg_line_length": 37.32205882352941,
"alnum_prop": 0.5947830883801568,
"repo_name": "tonyg/erlang-jukebox",
"id": "1669a029b61ba41b80d9a99b6fc1cbec467afde8",
"size": "25635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "priv/metadata/mutagen/flac.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "10959"
},
{
"name": "Erlang",
"bytes": "130795"
},
{
"name": "JavaScript",
"bytes": "33082"
},
{
"name": "Python",
"bytes": "271242"
}
],
"symlink_target": ""
} |
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from datetime import datetime
from pprint import pformat
from six import iteritems
class ProductMilestoneReleaseRest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ProductMilestoneReleaseRest - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'id': 'int',
'status': 'str',
'log': 'str',
'end_date': 'datetime',
'starting_date': 'datetime'
}
self.attribute_map = {
'id': 'id',
'status': 'status',
'log': 'log',
'end_date': 'endDate',
'starting_date': 'startingDate'
}
self._id = None
self._status = None
self._log = None
self._end_date = None
self._starting_date = None
@property
def id(self):
"""
Gets the id of this ProductMilestoneReleaseRest.
:return: The id of this ProductMilestoneReleaseRest.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ProductMilestoneReleaseRest.
:param id: The id of this ProductMilestoneReleaseRest.
:type: int
"""
self._id = id
@property
def status(self):
"""
Gets the status of this ProductMilestoneReleaseRest.
:return: The status of this ProductMilestoneReleaseRest.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this ProductMilestoneReleaseRest.
:param status: The status of this ProductMilestoneReleaseRest.
:type: str
"""
allowed_values = ["IN_PROGRESS", "FAILED", "SUCCEEDED", "SYSTEM_ERROR"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status`, must be one of {0}"
.format(allowed_values)
)
self._status = status
@property
def log(self):
"""
Gets the log of this ProductMilestoneReleaseRest.
:return: The log of this ProductMilestoneReleaseRest.
:rtype: str
"""
return self._log
@log.setter
def log(self, log):
"""
Sets the log of this ProductMilestoneReleaseRest.
:param log: The log of this ProductMilestoneReleaseRest.
:type: str
"""
self._log = log
@property
def end_date(self):
"""
Gets the end_date of this ProductMilestoneReleaseRest.
:return: The end_date of this ProductMilestoneReleaseRest.
:rtype: datetime
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""
Sets the end_date of this ProductMilestoneReleaseRest.
:param end_date: The end_date of this ProductMilestoneReleaseRest.
:type: datetime
"""
self._end_date = end_date
@property
def starting_date(self):
"""
Gets the starting_date of this ProductMilestoneReleaseRest.
:return: The starting_date of this ProductMilestoneReleaseRest.
:rtype: datetime
"""
return self._starting_date
@starting_date.setter
def starting_date(self, starting_date):
"""
Sets the starting_date of this ProductMilestoneReleaseRest.
:param starting_date: The starting_date of this ProductMilestoneReleaseRest.
:type: datetime
"""
self._starting_date = starting_date
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, datetime):
result[attr] = str(value.date())
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
| {
"content_hash": "13dcfbaaae7bdb2e14f2acd0ace3ad22",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 84,
"avg_line_length": 26.576923076923077,
"alnum_prop": 0.5691027496382055,
"repo_name": "thauser/pnc-cli",
"id": "176a76bff4bf1b601a1f6ab176efb4d0f46a9d6c",
"size": "5545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pnc_cli/swagger_client/models/product_milestone_release_rest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "44313"
},
{
"name": "Python",
"bytes": "1688077"
},
{
"name": "Shell",
"bytes": "720"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import froide.account.models
import froide.helper.storage
class Migration(migrations.Migration):
dependencies = [
("account", "0012_application"),
]
operations = [
migrations.AddField(
model_name="user",
name="profile_photo",
field=models.ImageField(
blank=True,
null=True,
storage=froide.helper.storage.HashedFilenameStorage(),
upload_to=froide.account.models.profile_photo_path,
),
),
migrations.AddField(
model_name="user",
name="profile_text",
field=models.TextField(blank=True),
),
]
| {
"content_hash": "ec361f7694934526cee0b8e5c9b35326",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 70,
"avg_line_length": 25.966666666666665,
"alnum_prop": 0.5673940949935815,
"repo_name": "fin/froide",
"id": "c71464c118e77b7bf6f2d3bf58bc0d39581e6dbb",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "froide/account/migrations/0013_auto_20180417_1113.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "302838"
},
{
"name": "JavaScript",
"bytes": "47357"
},
{
"name": "Makefile",
"bytes": "535"
},
{
"name": "Python",
"bytes": "1706123"
},
{
"name": "SCSS",
"bytes": "39397"
},
{
"name": "TypeScript",
"bytes": "57910"
},
{
"name": "Vue",
"bytes": "218866"
}
],
"symlink_target": ""
} |
import sys
# always use shared/modules version
SHARED_MODULE_PATH = "../../../../shared/modules"
sys.path.insert(0, SHARED_MODULE_PATH)
import testcheck_module
if __name__ == "__main__":
testcheck_module.main()
| {
"content_hash": "5038344442bb87f2f54401f1223d4dd5",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 49,
"avg_line_length": 24.11111111111111,
"alnum_prop": 0.6774193548387096,
"repo_name": "ykhodorkovskiy/clip",
"id": "b31fc3e21dd081d8a4dc5044600fb783211c8151",
"size": "236",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "packages/scap-security-guide/scap-security-guide-0.1.20/RHEL/7/input/checks/testcheck.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "209"
},
{
"name": "C",
"bytes": "13809"
},
{
"name": "Groff",
"bytes": "246662"
},
{
"name": "HTML",
"bytes": "1333"
},
{
"name": "Makefile",
"bytes": "88495"
},
{
"name": "Python",
"bytes": "95048"
},
{
"name": "Shell",
"bytes": "17539"
}
],
"symlink_target": ""
} |
from flask import request
from framework.auth import Auth, decorators
from framework.utils import iso8601format
from website.registries import utils
def _view_registries_landing_page(campaign=None, **kwargs):
"""Landing page for the various registrations"""
auth = kwargs['auth'] = Auth.from_kwargs(request.args.to_dict(), kwargs)
is_logged_in = kwargs['auth'].logged_in
if is_logged_in:
registerable_nodes = [
node for node
in auth.user.contributor_to
if node.has_permission(user=auth.user, permission='admin')
]
has_projects = bool(registerable_nodes)
else:
has_projects = False
return {
'is_logged_in': is_logged_in,
'has_draft_registrations': bool(utils.drafts_for_user(auth.user, campaign)),
'has_projects': has_projects,
'campaign_long': utils.REG_CAMPAIGNS.get(campaign),
'campaign_short': campaign
}
def registered_reports_landing(**kwargs):
return _view_registries_landing_page('registered_report', **kwargs)
@decorators.must_be_logged_in
def draft_registrations(auth, **kwargs):
"""API endpoint; returns various draft registrations the user can resume their draft"""
campaign = kwargs.get('campaign', None)
drafts = utils.drafts_for_user(auth.user, campaign)
return {
'draftRegistrations': [
{
'dateUpdated': iso8601format(draft.datetime_updated),
'dateInitiated': iso8601format(draft.datetime_initiated),
'node': {
'title': draft.branched_from.title,
},
'initiator': {
'name': draft.initiator.fullname,
},
'url': draft.branched_from.web_url_for(
'edit_draft_registration_page',
draft_id=draft._id,
),
}
for draft in drafts
],
}
def registries_landing_page(**kwargs):
# placeholder for developer who don't have ember app set up.
return {}
| {
"content_hash": "0dd28a9f800ec4a603e46fc3600fffd8",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 91,
"avg_line_length": 33.12698412698413,
"alnum_prop": 0.5975083852419741,
"repo_name": "icereval/osf.io",
"id": "ada0b6e53b0748bc86c300799e9083a8e2cd355b",
"size": "2111",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "website/registries/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "108526"
},
{
"name": "HTML",
"bytes": "261937"
},
{
"name": "JavaScript",
"bytes": "1856123"
},
{
"name": "Mako",
"bytes": "691640"
},
{
"name": "Python",
"bytes": "8331919"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
} |
__title__ = 'pyramid_cli'
__summary__ = 'Tools for managing pyramid applications!'
__uri__ = 'http://github.com/surveymonkey/pyramid_cli'
__version__ = '0.0.2'
__author__ = 'John Anderson'
__email__ = 'johna@surveymonkey.com'
__license__ = 'MIT'
__copyright__ = '2015 SurveyMonkey'
| {
"content_hash": "ea5918a1401610561415a561e13fb90c",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 56,
"avg_line_length": 35.25,
"alnum_prop": 0.648936170212766,
"repo_name": "SurveyMonkey/pyramid_cli",
"id": "8293bcf4586052117352f63c5eb7440016ccc435",
"size": "282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyramid_cli/__about__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17749"
}
],
"symlink_target": ""
} |
import sys
from setuptools.command.test import test as TestCommand
from setuptools import setup, find_packages
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# Import here, because outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='pattern-matcher',
version='0.1.0',
author='Damon Kelley',
author_email='damon.kelley@gmail.com',
url='https://github.com/damonkelley/pattern-matching-paths',
license='MIT',
packages=find_packages(exclude=["tests.*", "tests"]),
include_package_data=True,
description='Matching paths to patterns.',
tests_require=['pytest'],
cmdclass={'test': PyTest},
entry_points={
'console_scripts': ['pattern-matcher=pattern_matcher:main']
}
)
| {
"content_hash": "a52192625dc5687619f59b8361f100f3",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 28.6,
"alnum_prop": 0.6520979020979021,
"repo_name": "damonkelley/pattern-matcher",
"id": "6f54f774f61e845d80f36d9c5baee1f3df3a670b",
"size": "1168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22939"
}
],
"symlink_target": ""
} |
from .window import window
from strict_functions import strict_globals
@strict_globals(window=window)
def all_substrings(s):
''' yields all substrings of a string '''
join = ''.join
for i in range(1, len(s) + 1):
for sub in window(s, i):
yield join(sub)
del window
del strict_globals
| {
"content_hash": "9cfdc4f3697908ae914662620689ff08",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 45,
"avg_line_length": 22.785714285714285,
"alnum_prop": 0.6520376175548589,
"repo_name": "CodyKochmann/generators",
"id": "1c984c2b4c78d16a7ac42b11120b1030862e206d",
"size": "450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generators/all_substrings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "85464"
}
],
"symlink_target": ""
} |
"""
Copies the Unity Firestore testapp from the Firebase Unity SDK GitHub repository
into another directory and tweaks its structure and configuration to suit local
development. For example, it copies the "framework" files into their correct
locations even though they are located in a different directory tree in the Git
repository.
"""
from collections.abc import Sequence
import dataclasses
import json
import pathlib
import shutil
import re
import sys
from typing import Optional
from xml.dom import minidom
from absl import app
from absl import flags
from absl import logging
DEFAULTS_FILE = pathlib.Path.home() / ".cp_unity_testapp.flags.txt"
DEFAULT_GIT_REPO_DIR = pathlib.Path(__file__).parent.parent.parent
FLAG_DEFAULTS_FILE = flags.DEFINE_string(
name="defaults_file",
default=None,
help="The file from which to load the default values for flags that are "
"not explicitly specified. This is a text file where each line is stripped "
"of leading and trailing whitespace and each line is then treated as a "
f"single command-line flag. (default: {DEFAULTS_FILE})",
)
FLAG_GIT_REPO_DIR = flags.DEFINE_string(
name="git_repo_dir",
default=None,
help="The directory of the Unity Git repository whose Firestore testapp "
"to copy. If not specified, this directory is inferred from the location "
f"of this file, which, in this case, is {DEFAULT_GIT_REPO_DIR}",
)
FLAG_DEST_DIR_2017 = flags.DEFINE_string(
name="dest_dir_2017",
default=None,
help="The directory to which to assemble the Unity application, modified for "
"support in Unity 2017. This directory will be deleted if it exists.",
)
FLAG_DEST_DIR_2020 = flags.DEFINE_string(
name="dest_dir_2020",
default=None,
help="The directory to which to assemble the Unity application, modified for "
"support in Unity 2020. This directory will be deleted if it exists.",
)
FLAG_GOOGLE_SERVICES_JSON_FILE = flags.DEFINE_string(
name="google_services_json",
default=None,
help="The google-services.json file to use in the Unity application. "
"This file will be copied into the destination directory and the Android "
"package name will be read from it. The Unity project will then be edited "
"to use the parsed Android package name. If this flag is not specified "
"then these steps will need to be performed manually.",
)
FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE = flags.DEFINE_string(
name="google_service_info_plist",
default=None,
help="The GoogleService-Info.plist file to use in the Unity application. "
"This file will be copied into the destination directory and the Bundle ID "
"will be read from it. The Unity project will then be edited to use the "
"parsed Bundle ID. If this flag is not specified then these steps will "
"need to be performed manually if targeting iOS.",
)
FLAG_ANDROID_PACKAGE_NAME = flags.DEFINE_string(
name="android_package_name",
default=None,
help="The Android package name to use; must be one of the package names "
"listed in google-services.json. If this flag is not specified then the "
"first Android package name found in the google-services.json will be used.",
)
FLAG_APPLE_DEVELOPER_TEAM_ID = flags.DEFINE_string(
name="apple_developer_team_id",
default=None,
help="The Apple developer team ID to use. The Unity project in the "
"destination directory will be edited to use this value in the generated "
"Xcode project. If this flag is not specified then the Developer Team ID "
"will need to be manually set in Xcode.",
)
FLAG_HARDLINK_CS_FILES = flags.DEFINE_boolean(
name="hardlink",
default=False,
help="Instead of copying the .cs source files, hardlink them. This can be "
"useful when developing the C# code for the testapp itself, as changes "
"to those files will be instantly reflected both in the destination "
"Unity project and the GitHub repository."
)
def main(argv: Sequence[str]) -> None:
if len(argv) > 1:
raise app.UsageError(f"unexpected argument: {argv[1]}")
flags_parser = FlagsParser()
try:
flags = flags_parser.parse()
except flags_parser.DefaultsFileParseError as e:
print("ERROR: loading flag default values from "
f"{flags_parser.defaults_file} failed: {e}", file=sys.stderr)
sys.exit(1)
except flags_parser.Error as e:
print(f"ERROR: {e}", file=sys.stderr)
sys.exit(2)
copier = UnityTestappCopier(
git_repo_dir=flags.git_repo_dir,
dest_dir_2017=flags.dest_dir_2017,
dest_dir_2020=flags.dest_dir_2020,
google_services_json_file=flags.google_services_json_file,
google_service_info_plist_file=flags.google_service_info_plist_file,
android_package_name=flags.android_package_name,
apple_developer_team_id=flags.apple_developer_team_id,
hardlink_cs_files=flags.hardlink_cs_files,
)
try:
copier.run()
except copier.Error as e:
print(f"ERROR: {e}", file=sys.stderr)
sys.exit(1)
class FlagsParser:
def __init__(self) -> None:
if FLAG_DEFAULTS_FILE.value is not None:
self.defaults_file = pathlib.Path(FLAG_DEFAULTS_FILE.value)
else:
self.defaults_file = DEFAULTS_FILE
self.git_repo_dir = DEFAULT_GIT_REPO_DIR
self.dest_dir_2017: Optional[pathlib.Path] = None
self.dest_dir_2020: Optional[pathlib.Path] = None
self.google_services_json_file: Optional[pathlib.Path] = None
self.google_service_info_plist_file: Optional[pathlib.Path] = None
self.android_package_name: Optional[str] = None
self.apple_developer_team_id: Optional[str] = None
@dataclasses.dataclass(frozen=True)
class ParsedFlags:
git_repo_dir: pathlib.Path
dest_dir_2017: Optional[pathlib.Path]
dest_dir_2020: Optional[pathlib.Path]
google_services_json_file: Optional[pathlib.Path]
google_service_info_plist_file: Optional[pathlib.Path]
android_package_name: Optional[str]
apple_developer_team_id: Optional[str]
hardlink_cs_files: bool
def parse(self) -> ParsedFlags:
self._load_defaults_file()
self._load_flag_values()
return self._to_parsed_flags()
def _to_parsed_flags(self) -> ParsedFlags:
return self.ParsedFlags(
git_repo_dir = self.git_repo_dir,
dest_dir_2017 = self.dest_dir_2017,
dest_dir_2020 = self.dest_dir_2020,
google_services_json_file = self.google_services_json_file,
google_service_info_plist_file = self.google_service_info_plist_file,
android_package_name = self.android_package_name,
apple_developer_team_id = self.apple_developer_team_id,
hardlink_cs_files = FLAG_HARDLINK_CS_FILES.value,
)
def _load_defaults_file(self) -> None:
if not self.defaults_file.is_file():
return
logging.info("Loading flag default values from file: %s", self.defaults_file)
with self.defaults_file.open("rt", encoding="utf8") as f:
current_flag = None
for line_number, line in enumerate(f, start=1):
line = line.strip()
if current_flag is None:
if not line.startswith("--"):
raise self.DefaultsFileParseError(
f"line {line_number}: should start with --: {line}")
flag_name = line[2:]
current_flag = self._flag_from_flag_name(flag_name)
if current_flag is None:
raise self.DefaultsFileParseError(
f"line {line_number}: unknown flag: {line}")
else:
self._set_flag_value(current_flag, line)
current_flag = None
if current_flag is not None:
raise self.DefaultsFileParseError(
f"line {line_number}: expected line after this line: {line}")
def _load_flag_values(self) -> None:
if FLAG_GIT_REPO_DIR.value:
self._log_using_flag_from_command_line(FLAG_GIT_REPO_DIR)
self.git_repo_dir = pathlib.Path(FLAG_GIT_REPO_DIR.value)
if FLAG_DEST_DIR_2017.value:
self._log_using_flag_from_command_line(FLAG_DEST_DIR_2017)
self.dest_dir_2017 = pathlib.Path(FLAG_DEST_DIR_2017.value)
if FLAG_DEST_DIR_2020.value:
self._log_using_flag_from_command_line(FLAG_DEST_DIR_2020)
self.dest_dir_2020 = pathlib.Path(FLAG_DEST_DIR_2020.value)
if FLAG_GOOGLE_SERVICES_JSON_FILE.value:
self._log_using_flag_from_command_line(FLAG_GOOGLE_SERVICES_JSON_FILE)
self.google_services_json_file = pathlib.Path(FLAG_GOOGLE_SERVICES_JSON_FILE.value)
if FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE.value:
self._log_using_flag_from_command_line(FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE)
self.google_service_info_plist_file = pathlib.Path(FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE.value)
if FLAG_ANDROID_PACKAGE_NAME.value:
self._log_using_flag_from_command_line(FLAG_ANDROID_PACKAGE_NAME)
self.android_package_name = FLAG_ANDROID_PACKAGE_NAME.value
if FLAG_APPLE_DEVELOPER_TEAM_ID.value:
self._log_using_flag_from_command_line(FLAG_APPLE_DEVELOPER_TEAM_ID)
self.apple_developer_team_id = FLAG_APPLE_DEVELOPER_TEAM_ID.value
self._log_using_flag_from_command_line(FLAG_HARDLINK_CS_FILES)
@classmethod
def _log_using_flag_from_command_line(cls, flag: flags.Flag) -> None:
logging.info("Using flag from command line: --%s=%s", flag.name, flag.value)
@classmethod
def _flag_from_flag_name(cls, flag_name: str) -> Optional[flags.Flag]:
known_flags = (
FLAG_GIT_REPO_DIR,
FLAG_DEST_DIR_2017,
FLAG_DEST_DIR_2020,
FLAG_GOOGLE_SERVICES_JSON_FILE,
FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE,
FLAG_ANDROID_PACKAGE_NAME,
FLAG_APPLE_DEVELOPER_TEAM_ID,
)
for known_flag in known_flags:
if known_flag.name == flag_name:
return known_flag
else:
return None
def _set_flag_value(self, flag: flags.Flag, value: str) -> None:
if flag is FLAG_GIT_REPO_DIR:
self.git_repo_dir = pathlib.Path(value)
elif flag is FLAG_DEST_DIR_2017:
self.dest_dir_2017 = pathlib.Path(value)
elif flag is FLAG_DEST_DIR_2020:
self.dest_dir_2020 = pathlib.Path(value)
elif flag is FLAG_GOOGLE_SERVICES_JSON_FILE:
self.google_services_json_file = pathlib.Path(value)
elif flag is FLAG_GOOGLE_SERVICE_INFO_PLIST_FILE:
self.google_service_info_plist_file = pathlib.Path(value)
elif flag is FLAG_ANDROID_PACKAGE_NAME:
self.android_package_name = value
elif flag is FLAG_APPLE_DEVELOPER_TEAM_ID:
self.apple_developer_team_id = value
else:
raise RuntimeError(f"unknown flag: {flag.value}")
logging.info("Loaded flag from %s: --%s=%s", self.defaults_file, flag.name, value)
class Error(Exception):
pass
class DefaultsFileParseError(Error):
pass
class UnityTestappCopier:
def __init__(
self,
*,
git_repo_dir: pathlib.Path,
dest_dir_2017: Optional[pathlib.Path],
dest_dir_2020: Optional[pathlib.Path],
google_services_json_file: Optional[pathlib.Path],
google_service_info_plist_file: Optional[pathlib.Path],
android_package_name: Optional[str],
apple_developer_team_id: Optional[str],
hardlink_cs_files: bool,
) -> None:
self.git_repo_dir = git_repo_dir
self.dest_dir_2017 = dest_dir_2017
self.dest_dir_2020 = dest_dir_2020
self.google_services_json_file = google_services_json_file
self.google_service_info_plist_file = google_service_info_plist_file
self.android_package_name = android_package_name
self.apple_developer_team_id = apple_developer_team_id
self.hardlink_cs_files = hardlink_cs_files
def run(self) -> None:
something_done = False
if self.dest_dir_2017 is not None:
self._run(self.dest_dir_2017, 2017)
something_done = True
if self.dest_dir_2020 is not None:
self._run(self.dest_dir_2020, 2020)
something_done = True
if not something_done:
raise self.Error("Nothing to do; no destination directories specified")
def _run(self, dest_dir: pathlib.Path, unity_version: int) -> None:
if dest_dir.exists():
self._rmtree(dest_dir)
testapp_dir = self.git_repo_dir / "firestore" / "testapp"
self._copy_tree(testapp_dir, dest_dir)
# Delete the nunit tests, since they are not maintained.
self._rmtree(dest_dir / "Assets" / "Tests")
# Copy AutomatedTestRunner.cs
automated_test_runner_cs_src = self.git_repo_dir / "scripts" / "gha" / \
"integration_testing" / "automated_testapp" / "AutomatedTestRunner.cs"
automated_test_runner_cs_dest = dest_dir / "Assets" / "Firebase" / \
"Sample" / "AutomatedTestRunner.cs"
self._copy_file(automated_test_runner_cs_src, automated_test_runner_cs_dest)
# Copy ftl_testapp_files directory.
ftl_testapp_files_src = self.git_repo_dir / "scripts" / "gha" / \
"integration_testing" / "automated_testapp" / "ftl_testapp_files"
ftl_testapp_files_dest = dest_dir / "Assets" / "Firebase" / \
"Sample" / "FirebaseTestLab"
self._copy_tree(ftl_testapp_files_src, ftl_testapp_files_dest)
# Delete Builder.cs in Unity 2017 since it doesn't compile
if unity_version == 2017:
builder_cs_file = dest_dir / "Assets" / "Firebase" / "Editor" / "Builder.cs"
builder_cs_file.unlink()
if self.google_services_json_file is None:
android_package_name = None
else:
google_services_json_dest_file = dest_dir / "Assets" / "Firebase" / \
"Sample" / "Firestore" / "google-services.json"
self._copy_file(self.google_services_json_file, google_services_json_dest_file)
android_package_name = self._load_android_package_name(google_services_json_dest_file)
if self.google_service_info_plist_file is None:
bundle_id = None
else:
google_service_info_plist_dest_file = dest_dir / "Assets" / "Firebase" / \
"Sample" / "Firestore" / "GoogleService-Info.plist"
self._copy_file(self.google_service_info_plist_file, google_service_info_plist_dest_file)
bundle_id = self._load_bundle_id(google_service_info_plist_dest_file)
if android_package_name is not None or bundle_id is not None:
project_settings_file = dest_dir / "ProjectSettings" / "ProjectSettings.asset"
self._update_unity_app_info(project_settings_file, android_package_name, bundle_id)
# A drop-in replacement for `shutil.copy()` that creates hard links for some files
# if hardlink_cs_files=True was specified to __init__().
def _copy(self, src, dst, *, follow_symlinks=True):
if self.hardlink_cs_files and str(src).endswith(".cs"):
src_file = pathlib.Path(src)
dst_file = pathlib.Path(dst)
src_file.link_to(dst_file)
else:
shutil.copy(src, dst, follow_symlinks=follow_symlinks)
def _copy_file(self, src_file: pathlib.Path, dest_file: pathlib.Path) -> None:
logging.info("Copying %s to %s", src_file, dest_file)
self._copy(src_file, dest_file)
def _copy_tree(self, src_dir: pathlib.Path, dest_dir: pathlib.Path) -> None:
logging.info("Copying %s to %s", src_dir, dest_dir)
shutil.copytree(src_dir, dest_dir, copy_function=self._copy)
@classmethod
def _rmtree(cls, dir_path: pathlib.Path) -> None:
logging.info("Deleting %s", dir_path)
shutil.rmtree(dir_path)
def _load_android_package_name(self, file_path: pathlib.Path) -> str:
logging.info("Loading Android package name from %s", file_path)
with file_path.open("rb") as f:
data = json.load(f)
package_names = []
clients = data.get("client", [])
for client in clients:
client_info = client.get("client_info")
if client_info is None:
continue
android_client_info = client_info.get("android_client_info")
if android_client_info is None:
continue
package_name = android_client_info.get("package_name")
if package_name is not None:
logging.debug(f"Found package name in {file_path}: {package_name}")
package_names.append(package_name)
if len(package_names) == 0:
raise self.Error(f"No Android package names found in {file_path}")
if self.android_package_name is None:
package_name = package_names[0]
elif self.android_package_name not in package_names:
raise self.Error(
f"Android package name {self.android_package_name} not found in {file_path}; "
f"consider instead using one of the following {len(package_names)} package "
f"names that were found: "
+ ", ".join(sorted(package_names)))
else:
package_name = self.android_package_name
logging.info("Loaded Android package name from %s: %s", file_path, package_name)
return package_name
@classmethod
def _load_bundle_id(self, file_path: pathlib.Path) -> str:
logging.info("Loading bundle ID from %s", file_path)
with file_path.open("rb") as f:
root = minidom.parse(f)
next_element_is_bundle_id = False
for dict_node in root.documentElement.getElementsByTagName("dict"):
for child_node in dict_node.childNodes:
if child_node.nodeType != child_node.ELEMENT_NODE:
continue
elif next_element_is_bundle_id:
bundle_id = child_node.firstChild.nodeValue
logging.info("Loaded bundle ID from %s: %s", file_path, bundle_id)
return bundle_id
elif child_node.tagName == "key":
if child_node.firstChild.nodeValue == "BUNDLE_ID":
next_element_is_bundle_id = True
raise self.Error("No bundle ID found in {file_path}")
def _update_unity_app_info(
self,
file_path: pathlib.Path,
android_package_name: Optional[str],
bundle_id: Optional[str],
) -> None:
if android_package_name is not None:
logging.info("Setting Android package name to %s in %s",
android_package_name, file_path)
if bundle_id is not None:
logging.info("Setting Bundle ID to %s in %s", bundle_id, file_path)
with file_path.open("rt", encoding="utf8") as f:
lines = list(f)
app_id_expr = re.compile(r"\s+(\w+):\s*([a-zA-Z][a-zA-Z0-9.]+)\s*$")
for i in range(len(lines)):
line = lines[i]
apple_developer_team_id_token = "appleDeveloperTeamID:"
if line.strip() == apple_developer_team_id_token:
if self.apple_developer_team_id is not None:
token_index = line.index(apple_developer_team_id_token)
eol_index = len(line.rstrip())
lines[i] = line[:token_index] + apple_developer_team_id_token \
+ " " + self.apple_developer_team_id + line[eol_index:]
continue
match = app_id_expr.match(line)
if not match:
continue
key = match.group(1)
value = match.group(2)
if key in ("Android", "Standalone"):
new_value = android_package_name
elif key in ("iOS", "iPhone", "tvOS"):
new_value = bundle_id
else:
new_value = None
if new_value is not None:
lines[i] = line[:match.start(2)] + new_value + line[match.end(2):]
with file_path.open("wt", encoding="utf8") as f:
f.writelines(lines)
class Error(Exception):
pass
if __name__ == "__main__":
app.run(main)
| {
"content_hash": "2401002c46b5c3bcf3278fd4ece1222d",
"timestamp": "",
"source": "github",
"line_count": 501,
"max_line_length": 99,
"avg_line_length": 37.936127744510976,
"alnum_prop": 0.6784699568557297,
"repo_name": "firebase/firebase-unity-sdk",
"id": "08fea64304eb0f198b3eea0c087403cece4ebb88",
"size": "19029",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "firestore/scripts/cp_unity_testapp.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "2040323"
},
{
"name": "C++",
"bytes": "107768"
},
{
"name": "CMake",
"bytes": "138433"
},
{
"name": "Java",
"bytes": "3844"
},
{
"name": "JavaScript",
"bytes": "1567"
},
{
"name": "Kotlin",
"bytes": "3277"
},
{
"name": "Objective-C",
"bytes": "6604"
},
{
"name": "Python",
"bytes": "400764"
},
{
"name": "SWIG",
"bytes": "306817"
},
{
"name": "Shell",
"bytes": "12811"
},
{
"name": "Swift",
"bytes": "8627"
}
],
"symlink_target": ""
} |
from lib.test import Shaman
queries = [
(
"What is the tallest building in the world?",
"Burj Khalifa (2717 feet)", "tallest_building.dat"
), (
"Where was George Washington born?",
"Westmoreland County, Virginia", "george_washington_birthplace.dat"
), (
"When is Easter?",
"Sunday, March 31, 2013", "easter_date.dat"
), (
"How many cups are in a gallon?",
"16 cups", "cups_in_gallon.dat"
), (
"How much is 15 miles in feet?",
"79200 feet", "15miles_in_feet.dat"
), (
"15 USD in RMB",
"yuan93.44 (Chinese yuan)", "15USD_in_RMB.dat"
)
]
class WolframAlpha(Shaman):
def setUp(self):
# Register URLs
self.config['WolframAlpha.AppID'] = 'test123'
for question, answer, file in queries:
self.web.route(
url='http://api.wolframalpha.com/v2/query',
get={
'input': question,
'appid': self.config['WolframAlpha.AppID']
},
format='xml',
file=file
)
@Shaman.generate((query[:2] for query in queries))
def test(self, question, answer):
self.assertLooksLike(
self.query(question),
answer
)
| {
"content_hash": "d80151727328489de7647c9b35af553d",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 75,
"avg_line_length": 27.102040816326532,
"alnum_prop": 0.5143072289156626,
"repo_name": "dvrasp/TheTroz",
"id": "94be2372c9b6569963e1345ba5e3c4281ad4233a",
"size": "1328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spells/wolframAlpha/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "65975"
}
],
"symlink_target": ""
} |
""" formcontrol.py - The helper class to control form transition
and helper decorators
$Id: formcontrol.py 639 2010-08-10 04:08:49Z ats $
"""
__author__ = 'Atsushi Shibata <shibata@webcore.co.jp>'
__docformat__ = 'plaintext'
__licence__ = 'BSD'
__all__ = ('FormControl', 'handle_state', 'validate')
class FormControl(object):
"""
A class of from controller, managing the transition of the form.
"""
INITIAL = 'initial'
PROCESSING = 'processing'
FAILURE = 'failure'
SUCCESS = 'success'
def __init__(self, states = None):
"""
Initialize method, creating instance according to given arguments.
states argument should be like:
{STATE1:METHOD1,
STATE2:(METHOD2, VALIDATOR2(opt.)),
}
"""
if states == None:
states = {}
# Checking states
if states != {} and self.INITIAL not in states:
raise KeyError('The initial state needs for states.')
self._states = states
# Checking values of states dict.
for k, v in self._states.items():
cc = None
if isinstance(v, tuple):
cc = v[0]
else:
cc = v
if not callable(cc):
raise ValueError(("""The first tuple item for state %s """
"""should be method(callable)""") % k)
if (not isinstance(v, tuple) or
(isinstance(v, tuple) and len(v) == 1)):
newv = (cc, None)
self._states[k] = newv
else:
if not callable(v[1]):
raise ValueError(("""The second tuple item for state %s """
"""should be method(callable)""") % k)
def add_state(self, state, c, v = None):
"""
A method to add state to FormControl instance.
"""
if state in self._states:
raise KeyError("""A key '%s' is already defined""" % state)
if not callable(c):
raise ValueError("The second argumentshould be method(callable)")
self._states[state] = (c, v)
def add_method(self, state, c):
"""
A method to add method to state of FormControl instance.
"""
if not callable(c):
raise ValueError("The second argumentshould be method(callable)")
if state not in self._states:
# Installing new satte
self._states[state] = (c, None)
else:
# Changing existing state
self._states[state] = (c, self._states[state][1])
def add_validator(self, state, v):
"""
A method to add method to state of FormControl instance.
"""
if v is not None and not callable(v):
raise ValueError("The second argumentshould be method(callable)")
if state not in self._states:
self._states[state] = (None, v)
else:
self._states[state] = (self._states[state][0], v)
def check_state(self, state):
"""
A method to check if given state is available or not.
"""
if state not in self.get_states():
raise KeyError("The state '%s' is not available" % state)
def get_states(self):
"""
A method to obtain list of existing states.
"""
return self._states.keys()
def get_processor(self, state):
"""
A method to obtain the callable for given state.
"""
self.check_state(state)
return self._states[state][0]
def get_validator(self, state):
"""
A method to obtain the callable for given state.
"""
self.check_state(state)
return self._states[state][1]
def validate(self, state, *params, **kwd):
"""
A method to process validation and obtain FormState object, and
process form.
"""
v = self.get_validator(state)
if v == None:
return state
# Process validator
return v(state = state, *params, **kwd)
def process(self, state, *params, **kwd):
"""
A method to process job and obtain FormState object.
"""
p = self.get_processor(state)
# Process validator
return p(*params, **kwd)
def handle_state(self, *states):
def set_states(func):
for s in states:
self.add_method(s, func)
return func
return set_states
def handle_validate(self, *states):
def set_validators(func):
for s in states:
self.add_validator(s, func)
return func
return set_validators
#
# Decorators
#
class handle_state(object):
def __init__(self, klass, state):
self.klass = klass
self.state = state
def __call__(self, func):
if isinstance(self.state, tuple):
for s in self.state:
self.klass.add_method(s, func)
else:
self.klass.add_method(self.state, func)
return func
class validate(object):
def __init__(self, klass, state):
self.klass = klass
self.state = state
def __call__(self, func):
if isinstance(self.state, tuple):
for s in self.state:
self.klass.add_validator(s, func)
else:
self.klass.add_validator(self.state, func)
return func
| {
"content_hash": "30ab33f1802e0213285da59223d79c99",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 79,
"avg_line_length": 29.094736842105263,
"alnum_prop": 0.5300289435600579,
"repo_name": "Letractively/aha-gae",
"id": "f96bec6ac9ec897f2f5c9b653eeb5faa41b51d40",
"size": "5862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aha/modelcontroller/formcontrol.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "51818"
},
{
"name": "HTML",
"bytes": "29371"
},
{
"name": "JavaScript",
"bytes": "39684"
},
{
"name": "Makefile",
"bytes": "50"
},
{
"name": "Python",
"bytes": "417917"
}
],
"symlink_target": ""
} |
def hello(name):
print('Hello ' + name)
hello('Alice')
hello('Bob')
| {
"content_hash": "d0ebc41c02a25eb4a2e28c4cf127960d",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 26,
"avg_line_length": 15.6,
"alnum_prop": 0.5641025641025641,
"repo_name": "jakdept/pythonbook",
"id": "7b7b309020d5a98c6f5bb485523ffb777c7641ca",
"size": "78",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "book_materials/helloFunc2.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "324"
},
{
"name": "Python",
"bytes": "207695"
}
],
"symlink_target": ""
} |
"""Tests for debugger functionalities in tf.Session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import glob
import os
import shutil
import tempfile
import threading
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.util import event_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.lib import debug_graphs
from tensorflow.python.debug.lib import debug_utils
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
import tensorflow.python.ops.tensor_array_grad # pylint: disable=unused-import
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
def no_rewrite_session_config():
rewriter_config = rewriter_config_pb2.RewriterConfig(
disable_model_pruning=True,
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF,
dependency_optimization=rewriter_config_pb2.RewriterConfig.OFF)
graph_options = config_pb2.GraphOptions(rewrite_options=rewriter_config)
return config_pb2.ConfigProto(graph_options=graph_options)
class _RNNCellForTest(rnn_cell_impl.RNNCell):
"""RNN cell for testing."""
def __init__(self, input_output_size, state_size):
self._input_output_size = input_output_size
self._state_size = state_size
self._w = variables.VariableV1(1.0, dtype=dtypes.float32, name="w")
@property
def output_size(self):
return self._input_output_size
@property
def state_size(self):
return self._state_size
def __call__(self, input_, state, scope=None):
return (math_ops.multiply(self._w, input_), state)
@test_util.run_v1_only("b/120545219")
class SessionDebugTestBase(test_util.TensorFlowTestCase):
"""Base class for unit tests of tfdbg running with tf.Session."""
@classmethod
def setUpClass(cls):
if test.is_gpu_available():
cls._expected_partition_graph_count = 2
cls._expected_num_devices = 2
gpu_name = test_util.gpu_device_name()
cls._main_device = "/job:localhost/replica:0/task:0" + gpu_name
else:
cls._expected_partition_graph_count = 1
cls._expected_num_devices = 1
cls._main_device = "/job:localhost/replica:0/task:0/device:CPU:0"
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
self._dump_root = tempfile.mkdtemp()
def tearDown(self):
ops.reset_default_graph()
# Tear down temporary dump directory.
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _debug_urls(self, run_number=None):
raise NotImplementedError(
"_debug_urls() method is not implemented in the base test class.")
def _debug_dump_dir(self, run_number=None):
raise NotImplementedError(
"_debug_dump_dir() method is not implemented in the base test class.")
def _debug_run_and_get_dump(self,
sess,
fetches,
feed_dict=None,
debug_ops="DebugIdentity",
tolerate_debug_op_creation_failures=False,
global_step=-1,
validate=True,
expected_partition_graph_count=None):
"""Run fetches with debugging and obtain DebugDumpDir.
Args:
sess: the tf.compat.v1.Session to be used.
fetches: fetches of the Session.run().
feed_dict: feed dict for the Session.run().
debug_ops: name(s) of the debug ops to be used.
tolerate_debug_op_creation_failures: whether to tolerate debug op
creation failures.
global_step: Optional global step.
validate: whether to validate dumped tensors against graph.
expected_partition_graph_count: optional count of partition graphs to
assert on.
Returns:
1. Return values of the Session.run().
2. The DebugDumpDir object from the debugged run().
"""
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=debug_ops,
debug_urls=self._debug_urls(),
tolerate_debug_op_creation_failures=tolerate_debug_op_creation_failures,
global_step=global_step)
run_metadata = config_pb2.RunMetadata()
run_output = sess.run(fetches,
feed_dict=feed_dict,
options=run_options,
run_metadata=run_metadata)
if expected_partition_graph_count is not None:
self.assertEqual(expected_partition_graph_count,
len(run_metadata.partition_graphs))
return run_output, debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs,
validate=validate)
def _generate_dump_from_simple_addition_graph(self):
with session.Session(config=no_rewrite_session_config()) as sess:
u_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
v_init_val = np.array([[2.0], [-1.0]])
# Use node names with overlapping namespace (i.e., parent directory) to
# test concurrent, non-racing directory creation.
u_name = "u"
v_name = "v"
w_name = "w"
u_init = constant_op.constant(u_init_val, shape=[2, 2])
u = variables.VariableV1(u_init, name=u_name)
v_init = constant_op.constant(v_init_val, shape=[2, 1])
v = variables.VariableV1(v_init, name=v_name)
w = math_ops.matmul(u, v, name=w_name)
u.initializer.run()
v.initializer.run()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_urls = "file://%s" % self._dump_root
# Add debug tensor watch for u.
debug_utils.add_debug_tensor_watch(
run_options, "%s/read" % u_name, 0, debug_urls=debug_urls)
# Add debug tensor watch for v.
debug_utils.add_debug_tensor_watch(
run_options, "%s/read" % v_name, 0, debug_urls=debug_urls)
run_metadata = config_pb2.RunMetadata()
# Invoke Session.run().
sess.run(w, options=run_options, run_metadata=run_metadata)
self.assertEqual(self._expected_partition_graph_count,
len(run_metadata.partition_graphs))
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
simple_add_results = collections.namedtuple("SimpleAddResults", [
"u_init_val", "v_init_val", "u", "v", "w", "u_name", "v_name", "w_name",
"dump"
])
return simple_add_results(u_init_val, v_init_val, u, v, w, u_name, v_name,
w_name, dump)
def testCopyNodesHaveCorrectDebugOpsAndURLsAttributeValues(self):
with session.Session() as sess:
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess.run(variables.global_variables_initializer())
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_urls = self._debug_urls()
debug_utils.add_debug_tensor_watch(
run_options,
"u",
0, ["DebugNumericSummary(gated_grpc=True)", "DebugIdentity"],
debug_urls=debug_urls)
debug_utils.add_debug_tensor_watch(
run_options, "v", 0, ["DebugNumericSummary"], debug_urls=debug_urls)
run_metadata = config_pb2.RunMetadata()
r = sess.run(w, options=run_options, run_metadata=run_metadata)
self.assertAllClose(42.0, r)
u_copy_node_def = None
v_copy_node_def = None
for partition_graph in run_metadata.partition_graphs:
for node_def in partition_graph.node:
if debug_graphs.is_copy_node(node_def.name):
if node_def.name == "__copy_u_0":
u_copy_node_def = node_def
elif node_def.name == "__copy_v_0":
v_copy_node_def = node_def
self.assertIsNotNone(u_copy_node_def)
debug_ops_spec = u_copy_node_def.attr["debug_ops_spec"].list.s
self.assertEqual(2, len(debug_ops_spec))
self.assertEqual("DebugNumericSummary;%s;1" % debug_urls[0],
debug_ops_spec[0].decode("utf-8"))
self.assertEqual("DebugIdentity;%s;0" % debug_urls[0],
debug_ops_spec[1].decode("utf-8"))
self.assertIsNotNone(v_copy_node_def)
debug_ops_spec = v_copy_node_def.attr["debug_ops_spec"].list.s
self.assertEqual(1, len(debug_ops_spec))
self.assertEqual("DebugNumericSummary;%s;0" % debug_urls[0],
debug_ops_spec[0].decode("utf-8"))
def testConcurrentDumpingToPathsWithOverlappingParentDirsWorks(self):
results = self._generate_dump_from_simple_addition_graph()
self.assertTrue(results.dump.loaded_partition_graphs())
# Since global_step is not explicitly specified, it should take its default
# value: -1.
self.assertEqual(-1, results.dump.core_metadata.global_step)
self.assertGreaterEqual(results.dump.core_metadata.session_run_index, 0)
self.assertGreaterEqual(results.dump.core_metadata.executor_step_index, 0)
self.assertEqual([], results.dump.core_metadata.input_names)
self.assertEqual([results.w.name], results.dump.core_metadata.output_names)
self.assertEqual([], results.dump.core_metadata.target_nodes)
# Verify the dumped tensor values for u and v.
self.assertEqual(2, results.dump.size)
self.assertAllClose([results.u_init_val],
results.dump.get_tensors("%s/read" % results.u_name, 0,
"DebugIdentity"))
self.assertAllClose([results.v_init_val],
results.dump.get_tensors("%s/read" % results.v_name, 0,
"DebugIdentity"))
self.assertGreaterEqual(
results.dump.get_rel_timestamps("%s/read" % results.u_name, 0,
"DebugIdentity")[0], 0)
self.assertGreaterEqual(
results.dump.get_rel_timestamps("%s/read" % results.v_name, 0,
"DebugIdentity")[0], 0)
self.assertGreater(
results.dump.get_dump_sizes_bytes("%s/read" % results.u_name, 0,
"DebugIdentity")[0], 0)
self.assertGreater(
results.dump.get_dump_sizes_bytes("%s/read" % results.v_name, 0,
"DebugIdentity")[0], 0)
def testGetOpTypeWorks(self):
results = self._generate_dump_from_simple_addition_graph()
self.assertEqual(results.u.op.type,
results.dump.node_op_type(results.u_name))
self.assertIn(results.v.op.type, results.dump.node_op_type(results.v_name))
self.assertIn(results.w.op.type, results.dump.node_op_type(results.w_name))
with self.assertRaisesRegexp(
ValueError, r"None of the .* device\(s\) has a node named "):
results.dump.node_op_type("foo_bar")
def testDumpStringTensorsWorks(self):
with session.Session(config=no_rewrite_session_config()) as sess:
str1_init_val = np.array(b"abc")
str2_init_val = np.array(b"def")
str1_init = constant_op.constant(str1_init_val)
str2_init = constant_op.constant(str2_init_val)
str1_name = "str1"
str2_name = "str2"
str1 = variables.VariableV1(str1_init, name=str1_name)
str2 = variables.VariableV1(str2_init, name=str2_name)
# Concatenate str1 and str2
str_concat = math_ops.add(str1, str2, name="str_concat")
str1.initializer.run()
str2.initializer.run()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_urls = self._debug_urls()
# Add debug tensor watch for u.
debug_utils.add_debug_tensor_watch(
run_options, "%s/read" % str1_name, 0, debug_urls=debug_urls)
# Add debug tensor watch for v.
debug_utils.add_debug_tensor_watch(
run_options, "%s/read" % str2_name, 0, debug_urls=debug_urls)
run_metadata = config_pb2.RunMetadata()
sess.run(str_concat, options=run_options, run_metadata=run_metadata)
# String ops are located on CPU.
self.assertEqual(1, len(run_metadata.partition_graphs))
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
self.assertIn(str1_name, dump.nodes())
self.assertIn(str2_name, dump.nodes())
self.assertEqual(2, dump.size)
self.assertEqual([str1_init_val],
dump.get_tensors("%s/read" % str1_name, 0,
"DebugIdentity"))
self.assertEqual([str2_init_val],
dump.get_tensors("%s/read" % str2_name, 0,
"DebugIdentity"))
self.assertGreaterEqual(
dump.get_rel_timestamps("%s/read" % str1_name, 0, "DebugIdentity")[0],
0)
self.assertGreaterEqual(
dump.get_rel_timestamps("%s/read" % str2_name, 0, "DebugIdentity")[0],
0)
self.assertGreater(
dump.get_dump_sizes_bytes("%s/read" % str1_name, 0,
"DebugIdentity")[0], 0)
self.assertGreater(
dump.get_dump_sizes_bytes("%s/read" % str2_name, 0,
"DebugIdentity")[0], 0)
def testDumpUninitializedVariable(self):
op_namespace = "testDumpUninitializedVariable"
with session.Session() as sess:
u_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
s_init_val = b"str1"
u_name = "%s/u" % op_namespace
s_name = "%s/s" % op_namespace
u_init = constant_op.constant(u_init_val, shape=[2, 2])
u = variables.VariableV1(u_init, name=u_name)
s_init = constant_op.constant(s_init_val)
s = variables.VariableV1(s_init, name=s_name)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_urls = self._debug_urls()
# Add debug tensor watch for u.
debug_utils.add_debug_tensor_watch(
run_options, u_name, 0, debug_urls=debug_urls)
debug_utils.add_debug_tensor_watch(
run_options, s_name, 0, debug_urls=debug_urls)
run_metadata = config_pb2.RunMetadata()
# Initialize u and s.
sess.run(variables.global_variables_initializer(),
options=run_options,
run_metadata=run_metadata)
# Verify the dump file for the uninitialized value of u.
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
self.assertEqual(2, dump.size)
self.assertEqual(self._expected_partition_graph_count,
len(run_metadata.partition_graphs))
# Verify that the variable is properly initialized by the run() call.
u_vals = dump.get_tensors(u_name, 0, "DebugIdentity")
s_vals = dump.get_tensors(s_name, 0, "DebugIdentity")
self.assertEqual(1, len(u_vals))
self.assertIsInstance(u_vals[0], debug_data.InconvertibleTensorProto)
self.assertFalse(u_vals[0].initialized)
self.assertEqual(1, len(s_vals))
self.assertIsInstance(s_vals[0], debug_data.InconvertibleTensorProto)
self.assertFalse(s_vals[0].initialized)
# Call run() again, to check that u is initialized properly.
self.assertAllClose(u_init_val, sess.run(u))
self.assertEqual(s_init_val, sess.run(s))
def testDebugWhileLoopGeneratesMultipleDumps(self):
with session.Session(config=no_rewrite_session_config()) as sess:
num_iter = 10
# "u" is the Variable being updated in the loop.
u_name = "testDumpToFileWhileLoop/u"
u_namespace = u_name.split("/")[0]
u_init_val = np.array(11.0)
u_init = constant_op.constant(u_init_val)
u = variables.VariableV1(u_init, name=u_name)
# "v" is the increment.
v_name = "testDumpToFileWhileLoop/v"
v_namespace = v_name.split("/")[0]
v_init_val = np.array(2.0)
v_init = constant_op.constant(v_init_val)
v = variables.VariableV1(v_init, name=v_name)
u.initializer.run()
v.initializer.run()
i = constant_op.constant(0, name="testDumpToFileWhileLoop/i")
def cond(i):
return math_ops.less(i, num_iter)
def body(i):
new_u = state_ops.assign_add(u, v)
new_i = math_ops.add(i, 1)
op = control_flow_ops.group(new_u)
new_i = control_flow_ops.with_dependencies([op], new_i)
return [new_i]
loop = control_flow_ops.while_loop(
cond, body, [i], parallel_iterations=10)
# Create RunOptions for debug-watching tensors
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_urls = self._debug_urls()
# Add debug tensor watch for u.
debug_utils.add_debug_tensor_watch(
run_options, u_name, 0, debug_urls=debug_urls)
# Add debug tensor watch for v.
debug_utils.add_debug_tensor_watch(
run_options, "%s/read" % v_name, 0, debug_urls=debug_urls)
# Add debug tensor watch for while/Identity.
debug_utils.add_debug_tensor_watch(
run_options, "while/Identity", 0, debug_urls=debug_urls)
# Add debug tensor watch for while/Add/y.
debug_utils.add_debug_tensor_watch(
run_options, "while/Add/y", 0, debug_urls=debug_urls)
run_metadata = config_pb2.RunMetadata()
r = sess.run(loop, options=run_options, run_metadata=run_metadata)
self.assertEqual(self._expected_partition_graph_count,
len(run_metadata.partition_graphs))
self.assertEqual(num_iter, r)
u_val_final = sess.run(u)
self.assertAllClose(u_init_val + num_iter * v_init_val, u_val_final)
# Verify dump files
self.assertTrue(os.path.isdir(self._dump_root))
u_glob_out = glob.glob(os.path.join(self._dump_root, "*", u_namespace))
v_glob_out = glob.glob(os.path.join(
self._dump_root, "*", v_namespace, "v"))
self.assertTrue(os.path.isdir(u_glob_out[0]))
self.assertTrue(os.path.isdir(v_glob_out[0]))
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
# Expected dumped tensors: u, v/read, 10 iterations of while/Identity,
# and 10 iterations of while/Add/y.
self.assertEqual(1 + 1 + num_iter + num_iter, dump.size)
# Verify tensor values.
self.assertAllClose([u_init_val],
dump.get_tensors(u_name, 0, "DebugIdentity"))
self.assertAllClose([v_init_val],
dump.get_tensors("%s/read" % v_name, 0,
"DebugIdentity"))
while_id_tensors = dump.get_tensors("while/Identity", 0, "DebugIdentity")
self.assertEqual(10, len(while_id_tensors))
for k in xrange(len(while_id_tensors)):
self.assertAllClose(np.array(k), while_id_tensors[k])
# Verify ascending timestamps from the while loops.
while_id_rel_timestamps = dump.get_rel_timestamps("while/Identity", 0,
"DebugIdentity")
while_id_dump_sizes_bytes = dump.get_dump_sizes_bytes("while/Identity", 0,
"DebugIdentity")
self.assertEqual(10, len(while_id_rel_timestamps))
prev_rel_time = 0
prev_dump_size_bytes = while_id_dump_sizes_bytes[0]
for rel_time, dump_size_bytes in zip(while_id_rel_timestamps,
while_id_dump_sizes_bytes):
self.assertGreaterEqual(rel_time, prev_rel_time)
self.assertEqual(dump_size_bytes, prev_dump_size_bytes)
prev_rel_time = rel_time
prev_dump_size_bytes = dump_size_bytes
# Test querying debug watch keys from node name.
watch_keys = dump.debug_watch_keys("while/Identity")
self.assertEqual(["while/Identity:0:DebugIdentity"], watch_keys)
# Test querying debug datum instances from debug watch key.
self.assertEqual(10, len(dump.watch_key_to_data(watch_keys[0])))
self.assertEqual([], dump.watch_key_to_data("foo"))
def testDebugWhileLoopWatchingWholeGraphWorks(self):
with session.Session() as sess:
loop_body = lambda i: math_ops.add(i, 2)
loop_cond = lambda i: math_ops.less(i, 16)
i = constant_op.constant(10, name="i")
loop = control_flow_ops.while_loop(loop_cond, loop_body, [i])
loop_result, dump = self._debug_run_and_get_dump(sess, loop)
self.assertEqual(16, loop_result)
self.assertEqual(
[[10]], dump.get_tensors("while/Enter", 0, "DebugIdentity"))
self.assertEqual(
[[12], [14], [16]],
dump.get_tensors("while/NextIteration", 0, "DebugIdentity"))
def testDebugTrainingDynamicRNNWorks(self):
with session.Session() as sess:
input_size = 3
state_size = 2
time_steps = 4
batch_size = 2
input_values = np.random.randn(time_steps, batch_size, input_size)
sequence_length = np.random.randint(0, time_steps, size=batch_size)
concat_inputs = array_ops.placeholder(
dtypes.float32, shape=(time_steps, batch_size, input_size))
outputs_dynamic, _ = rnn.dynamic_rnn(
_RNNCellForTest(input_size, state_size),
inputs=concat_inputs,
sequence_length=sequence_length,
time_major=True,
dtype=dtypes.float32)
toy_loss = math_ops.reduce_sum(outputs_dynamic * outputs_dynamic)
train_op = gradient_descent.GradientDescentOptimizer(
learning_rate=0.1).minimize(toy_loss, name="train_op")
sess.run(variables.global_variables_initializer())
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph_with_blacklists(
run_options,
sess.graph,
node_name_regex_blacklist="(.*rnn/while/.*|.*TensorArray.*)",
debug_urls=self._debug_urls())
# b/36870549: Nodes with these name patterns need to be excluded from
# tfdbg in order to prevent MSAN warnings of uninitialized Tensors
# under both file:// and grpc:// debug URL schemes.
run_metadata = config_pb2.RunMetadata()
sess.run(train_op, feed_dict={concat_inputs: input_values},
options=run_options, run_metadata=run_metadata)
debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
def testDebugCondWatchingWholeGraphWorks(self):
with session.Session() as sess:
x = variables.VariableV1(10.0, name="x")
y = variables.VariableV1(20.0, name="y")
cond = control_flow_ops.cond(
x > y, lambda: math_ops.add(x, 1), lambda: math_ops.add(y, 1))
sess.run(variables.global_variables_initializer())
cond_result, dump = self._debug_run_and_get_dump(sess, cond)
self.assertEqual(21, cond_result)
self.assertAllClose(
[21.0], dump.get_tensors("cond/Merge", 0, "DebugIdentity"))
def testFindNodesWithBadTensorValues(self):
with session.Session() as sess:
u_name = "testFindNodesWithBadTensorValues/u"
v_name = "testFindNodesWithBadTensorValues/v"
w_name = "testFindNodesWithBadTensorValues/w"
x_name = "testFindNodesWithBadTensorValues/x"
y_name = "testFindNodesWithBadTensorValues/y"
z_name = "testFindNodesWithBadTensorValues/z"
u_init = constant_op.constant([2.0, 4.0])
u = variables.VariableV1(u_init, name=u_name)
v_init = constant_op.constant([2.0, 1.0])
v = variables.VariableV1(v_init, name=v_name)
# Expected output: [0.0, 3.0]
w = math_ops.subtract(u, v, name=w_name)
# Expected output: [inf, 1.3333]
x = math_ops.div(u, w, name=x_name)
# Expected output: [nan, 4.0]
y = math_ops.multiply(w, x, name=y_name)
z = math_ops.multiply(y, y, name=z_name)
u.initializer.run()
v.initializer.run()
_, dump = self._debug_run_and_get_dump(
sess, z,
expected_partition_graph_count=self._expected_partition_graph_count)
def has_bad_value(_, tensor):
return np.any(np.isnan(tensor)) or np.any(np.isinf(tensor))
# Find all "offending tensors".
bad_data = dump.find(has_bad_value)
# Verify that the nodes with bad values are caught through running find
# on the debug dump.
self.assertEqual(3, len(bad_data))
self.assertEqual(x_name, bad_data[0].node_name)
self.assertEqual(y_name, bad_data[1].node_name)
self.assertEqual(z_name, bad_data[2].node_name)
# Test first_n kwarg of find(): Find the first offending tensor.
first_bad_datum = dump.find(has_bad_value, first_n=1)
self.assertEqual(1, len(first_bad_datum))
self.assertEqual(x_name, first_bad_datum[0].node_name)
def testFindInfOrNanWithOpNameExclusion(self):
with session.Session() as sess:
u_name = "testFindInfOrNanWithOpNameExclusion/u"
v_name = "testFindInfOrNanWithOpNameExclusion/v"
w_name = "testFindInfOrNanWithOpNameExclusion/w"
x_name = "testFindInfOrNanWithOpNameExclusion/x"
y_name = "testFindInfOrNanWithOpNameExclusion/y"
z_name = "testFindInfOrNanWithOpNameExclusion/z"
u_init = constant_op.constant([2.0, 4.0])
u = variables.VariableV1(u_init, name=u_name)
v_init = constant_op.constant([2.0, 1.0])
v = variables.VariableV1(v_init, name=v_name)
# Expected output: [0.0, 3.0]
w = math_ops.subtract(u, v, name=w_name)
# Expected output: [inf, 1.3333]
x = math_ops.div(u, w, name=x_name)
# Expected output: [nan, 4.0]
y = math_ops.multiply(w, x, name=y_name)
z = math_ops.multiply(y, y, name=z_name)
u.initializer.run()
v.initializer.run()
_, dump = self._debug_run_and_get_dump(
sess, z,
expected_partition_graph_count=self._expected_partition_graph_count)
# Find all "offending tensors".
bad_data = dump.find(debug_data.has_inf_or_nan,
exclude_node_names=".*/x$")
# Verify that the nodes with bad values are caught through running find
# on the debug dump.
self.assertEqual(2, len(bad_data))
# Assert that the node `x` should have been excluded.
self.assertEqual(y_name, bad_data[0].node_name)
self.assertEqual(z_name, bad_data[1].node_name)
first_bad_datum = dump.find(
debug_data.has_inf_or_nan, first_n=1, exclude_node_names=".*/x$")
self.assertEqual(1, len(first_bad_datum))
self.assertEqual(y_name, first_bad_datum[0].node_name)
def _session_run_for_graph_structure_lookup(self):
with session.Session(config=no_rewrite_session_config()) as sess:
u_name = "testDumpGraphStructureLookup/u"
v_name = "testDumpGraphStructureLookup/v"
w_name = "testDumpGraphStructureLookup/w"
u_init = constant_op.constant([2.0, 4.0])
u = variables.VariableV1(u_init, name=u_name)
v = math_ops.add(u, u, name=v_name)
w = math_ops.add(v, v, name=w_name)
u.initializer.run()
_, dump = self._debug_run_and_get_dump(
sess, w,
expected_partition_graph_count=self._expected_partition_graph_count)
return u_name, v_name, w_name, dump
def testGraphStructureLookupGivesDevicesAndNodesInfo(self):
u_name, _, _, dump = self._session_run_for_graph_structure_lookup()
# Test num_devices().
self.assertEqual(self._expected_num_devices, len(dump.devices()))
# Test node_device().
self.assertEqual(self._main_device, dump.node_device(u_name))
with self.assertRaisesRegexp(ValueError,
"does not exist in partition graphs"):
dump.node_device(u_name + "foo")
# Test node_exists().
self.assertTrue(dump.node_exists(u_name))
self.assertTrue(dump.node_exists(u_name + "/read"))
self.assertFalse(dump.node_exists(u_name + "/read" + "/foo"))
def testGraphStructureLookupGivesNodesAndAttributes(self):
u_name, _, _, dump = self._session_run_for_graph_structure_lookup()
u_read_name = u_name + "/read"
# Test node name list lookup of the DebugDumpDir object.
if test_util.gpu_device_name():
node_names = dump.nodes(
device_name="/job:localhost/replica:0/task:0/device:GPU:0")
else:
node_names = dump.nodes()
self.assertTrue(u_name in node_names)
self.assertTrue(u_read_name in node_names)
# Test querying node attributes.
u_attr = dump.node_attributes(u_name)
self.assertEqual(dtypes.float32, u_attr["dtype"].type)
self.assertEqual(1, len(u_attr["shape"].shape.dim))
self.assertEqual(2, u_attr["shape"].shape.dim[0].size)
with self.assertRaisesRegexp(
ValueError, r"None of the .* device\(s\) has a node named "):
dump.node_attributes("foo")
def testGraphStructureLookupGivesDebugWatchKeys(self):
u_name, v_name, w_name, dump = (
self._session_run_for_graph_structure_lookup())
# Test querying the debug watch keys with node names.
self.assertEqual(["%s:0:DebugIdentity" % u_name],
dump.debug_watch_keys(u_name))
self.assertEqual(["%s:0:DebugIdentity" % v_name],
dump.debug_watch_keys(v_name))
self.assertEqual(["%s:0:DebugIdentity" % w_name],
dump.debug_watch_keys(w_name))
self.assertEqual([], dump.debug_watch_keys("foo"))
# Test querying debug datum instances from debug watch.
u_data = dump.watch_key_to_data(dump.debug_watch_keys(u_name)[0])
self.assertEqual(1, len(u_data))
self.assertEqual(u_name, u_data[0].node_name)
self.assertEqual(0, u_data[0].output_slot)
self.assertEqual("DebugIdentity", u_data[0].debug_op)
self.assertGreaterEqual(u_data[0].timestamp, 0)
self.assertEqual([], dump.watch_key_to_data("foo"))
def testGraphStructureLookupGivesNodeInputsAndRecipients(self):
u_name, v_name, w_name, dump = (
self._session_run_for_graph_structure_lookup())
u_read_name = u_name + "/read"
# Test the inputs lookup of the DebugDumpDir object.
self.assertEqual([], dump.node_inputs(u_name))
self.assertEqual([u_name], dump.node_inputs(u_read_name))
self.assertEqual([u_read_name] * 2, dump.node_inputs(v_name))
self.assertEqual([v_name] * 2, dump.node_inputs(w_name))
self.assertEqual([], dump.node_inputs(u_name, is_control=True))
self.assertEqual([], dump.node_inputs(u_read_name, is_control=True))
self.assertEqual([], dump.node_inputs(v_name, is_control=True))
self.assertEqual([], dump.node_inputs(w_name, is_control=True))
# Test the outputs recipient lookup of the DebugDumpDir object.
self.assertTrue(u_read_name in dump.node_recipients(u_name))
self.assertEqual(2, dump.node_recipients(u_read_name).count(v_name))
self.assertEqual(2, dump.node_recipients(v_name).count(w_name))
self.assertEqual([], dump.node_recipients(u_name, is_control=True))
self.assertEqual([], dump.node_recipients(u_read_name, is_control=True))
self.assertEqual([], dump.node_recipients(v_name, is_control=True))
self.assertEqual([], dump.node_recipients(w_name, is_control=True))
# Test errors raised on invalid node names.
with self.assertRaisesRegexp(
ValueError, r"None of the .* device\(s\) has a node named "):
dump.node_inputs(u_name + "foo")
with self.assertRaisesRegexp(
ValueError, r"None of the .* device\(s\) has a node named "):
dump.node_recipients(u_name + "foo")
# Test transitive_inputs().
self.assertEqual([], dump.transitive_inputs(u_name))
self.assertEqual([u_name], dump.transitive_inputs(u_read_name))
self.assertEqual(
set([u_name, u_read_name]), set(dump.transitive_inputs(v_name)))
self.assertEqual(
set([u_name, u_read_name, v_name]), set(dump.transitive_inputs(w_name)))
with self.assertRaisesRegexp(
ValueError, r"None of the .* device\(s\) has a node named "):
dump.transitive_inputs(u_name + "foo")
def testGraphStructureLookupWithoutPartitionGraphsDoesNotErrorOut(self):
_, _, _, dump = self._session_run_for_graph_structure_lookup()
# Now load the dump again, without the partition graphs, so we can check
# errors are not raised because the partition graphs are loaded from the
# dump directory.
dump = debug_data.DebugDumpDir(self._dump_root, validate=False)
self.assertTrue(dump.loaded_partition_graphs())
def testGraphPathFindingOnControlEdgesWorks(self):
with session.Session(config=no_rewrite_session_config()) as sess:
v1 = variables.VariableV1(1.0, name="v1")
v2 = variables.VariableV1(2.0, name="v2")
v3 = variables.VariableV1(3.0, name="v3")
a = math_ops.add(v1, v2, name="a")
with ops.control_dependencies([a]):
c = math_ops.subtract(v3, v3, name="c")
sess.run(variables.global_variables_initializer())
_, dump = self._debug_run_and_get_dump(sess, c)
self.assertEqual(["v1", "v1/read", "a", "c"],
dump.find_some_path("v1", "c"))
self.assertIsNone(dump.find_some_path("v1", "c", include_control=False))
def testGraphPathFindingReverseRefEdgeWorks(self):
with session.Session(config=no_rewrite_session_config()) as sess:
v = variables.VariableV1(10.0, name="v")
delta = variables.VariableV1(1.0, name="delta")
inc_v = state_ops.assign_add(v, delta, name="inc_v")
sess.run(variables.global_variables_initializer())
_, dump = self._debug_run_and_get_dump(sess, inc_v)
self.assertEqual(
["delta", "delta/read", "inc_v", "v"],
dump.find_some_path("delta", "v", include_reversed_ref=True))
self.assertIsNone(dump.find_some_path("delta", "v"))
def testCausalityCheckOnDumpsDetectsWrongTemporalOrder(self):
with session.Session(config=no_rewrite_session_config()) as sess:
u_name = "testDumpCausalityCheck/u"
v_name = "testDumpCausalityCheck/v"
w_name = "testDumpCausalityCheck/w"
u_init = constant_op.constant([2.0, 4.0])
u = variables.VariableV1(u_init, name=u_name)
v = math_ops.add(u, u, name=v_name)
w = math_ops.add(v, v, name=w_name)
u.initializer.run()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity"],
debug_urls=self._debug_urls())
run_metadata = config_pb2.RunMetadata()
sess.run(w, options=run_options, run_metadata=run_metadata)
self.assertEqual(self._expected_partition_graph_count,
len(run_metadata.partition_graphs))
# First, loading the original dump without supplying the
# partition_graphs should not cause a LookupError, validation occurs
# only with partition_graphs loaded.
debug_data.DebugDumpDir(self._dump_root)
# Now, loading the original dump with partition graphs supplied should
# succeed. The validation should pass quietly.
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
# Get the dump file names and compute their timestamps.
self.assertEqual(
1, len(dump.get_tensor_file_paths(v_name, 0, "DebugIdentity")))
v_file_path = dump.get_tensor_file_paths(v_name, 0, "DebugIdentity")[0]
self.assertEqual(
1, len(dump.get_tensor_file_paths(w_name, 0, "DebugIdentity")))
w_file_path = dump.get_tensor_file_paths(w_name, 0, "DebugIdentity")[0]
v_timestamp = int(v_file_path[v_file_path.rindex("_") + 1:])
w_timestamp = int(w_file_path[w_file_path.rindex("_") + 1:])
# Swap and slightly shift the time stamps of the last two dumped tensors,
# to simulate "causality violation", which can happen if the dump
# directory contains incomplete data and/or mixes data from different
# Session.run() calls.
v_file_path_1 = v_file_path[:v_file_path.rindex(
"_")] + "_%d" % w_timestamp
w_file_path_1 = w_file_path[:w_file_path.rindex("_")] + "_%d" % (
v_timestamp - 1)
os.rename(v_file_path, v_file_path_1)
os.rename(w_file_path, w_file_path_1)
# Load the dump directory again. Now a ValueError is expected to be
# raised due to the timestamp swap.
with self.assertRaisesRegexp(ValueError, "Causality violated"):
dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
# Loading the dump directory with kwarg "validate" set explicitly to
# False should get rid of the error.
dump = debug_data.DebugDumpDir(
self._dump_root,
partition_graphs=run_metadata.partition_graphs,
validate=False)
# Next, set the two times stamps to be the same, which should be fine.
v_file_path_2 = v_file_path[:v_file_path.rindex(
"_")] + "_%d" % w_timestamp
w_file_path_2 = w_file_path[:w_file_path.rindex(
"_")] + "_%d" % w_timestamp
os.rename(v_file_path_1, v_file_path_2)
os.rename(w_file_path_1, w_file_path_2)
debug_data.DebugDumpDir(
self._dump_root, partition_graphs=run_metadata.partition_graphs)
def testWatchingOnlyOneOfTwoOutputSlotsDoesNotLeadToCausalityFailure(self):
with session.Session() as sess:
x_name = "oneOfTwoSlots/x"
u_name = "oneOfTwoSlots/u"
v_name = "oneOfTwoSlots/v"
w_name = "oneOfTwoSlots/w"
y_name = "oneOfTwoSlots/y"
x = variables.VariableV1([1, 3, 3, 7], dtype=dtypes.int32, name=x_name)
sess.run(x.initializer)
unique_x, indices, _ = array_ops.unique_with_counts(x, name=u_name)
v = math_ops.add(unique_x, unique_x, name=v_name)
w = math_ops.add(indices, indices, name=w_name)
y = math_ops.add(w, w, name=y_name)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
# Watch only the first output slot of u, even though it has two output
# slots.
debug_utils.add_debug_tensor_watch(
run_options, u_name, 0, debug_urls=self._debug_urls())
debug_utils.add_debug_tensor_watch(
run_options, w_name, 0, debug_urls=self._debug_urls())
debug_utils.add_debug_tensor_watch(
run_options, y_name, 0, debug_urls=self._debug_urls())
run_metadata = config_pb2.RunMetadata()
sess.run([v, y], options=run_options, run_metadata=run_metadata)
dump = debug_data.DebugDumpDir(
self._dump_root,
partition_graphs=run_metadata.partition_graphs,
validate=True)
self.assertAllClose([1, 3, 7],
dump.get_tensors(u_name, 0, "DebugIdentity")[0])
def testOutputSlotWithoutOutgoingEdgeCanBeWatched(self):
"""Test watching output slots not attached to any outgoing edges."""
with session.Session(config=no_rewrite_session_config()) as sess:
u_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
u = constant_op.constant(u_init_val, shape=[2, 2], name="u")
# Create a control edge from a node with an output: From u to z.
# Node u will get executed only because of the control edge. The output
# tensor u:0 is not attached to any outgoing edge in the graph. This test
# checks that the debugger can watch such a tensor.
with ops.control_dependencies([u]):
z = control_flow_ops.no_op(name="z")
_, dump = self._debug_run_and_get_dump(sess, z)
# Assert that the DebugIdentity watch on u works properly.
self.assertEqual(1, len(dump.dumped_tensor_data))
datum = dump.dumped_tensor_data[0]
self.assertEqual("u", datum.node_name)
self.assertEqual(0, datum.output_slot)
self.assertEqual("DebugIdentity", datum.debug_op)
self.assertAllClose([[5.0, 3.0], [-1.0, 0.0]], datum.get_tensor())
def testWatchingVariableUpdateOpsSeesUpdatedValues(self):
"""Watch output slots on Variable-updating ops, with no emitted edges."""
with session.Session(config=no_rewrite_session_config()) as sess:
u_init = constant_op.constant(10.0)
u = variables.VariableV1(u_init, name="gdo/u")
v_init = constant_op.constant(20.0)
v = variables.VariableV1(v_init, name="gdo/v")
w = math_ops.multiply(u, v, name="gdo/w")
# gdo stands for GradientDescentOptimizer.
train_op = gradient_descent.GradientDescentOptimizer(
learning_rate=0.1).minimize(
w, name="gdo/train")
u.initializer.run()
v.initializer.run()
_, dump = self._debug_run_and_get_dump(sess, train_op)
update_u_data = dump.watch_key_to_data(
"gdo/train/update_gdo/u/ApplyGradientDescent:0:DebugIdentity")
self.assertEqual(1, len(update_u_data))
# Gradient descent on u: w = u * v, so dw / du = v.
# Updated value of u should be:
# 10.0 - learning_rate * v = 10.0 - 0.1 * 20.0 = 8.0
self.assertAllClose(8.0, update_u_data[0].get_tensor())
update_v_data = dump.watch_key_to_data(
"gdo/train/update_gdo/v/ApplyGradientDescent:0:DebugIdentity")
self.assertEqual(1, len(update_v_data))
# Gradient descent on u: w = u * v, so dw / dv = u.
# Updated value of u should be:
# 20.0 - learning_rate * u = 20.0 - 0.1 * 10.0 = 19.0
self.assertAllClose(19.0, update_v_data[0].get_tensor())
# Verify that the Variables u and v are updated properly.
self.assertAllClose(8.0, sess.run(u))
self.assertAllClose(19.0, sess.run(v))
def testAllowsWatchingUnconnectedOutputTensor(self):
"""Watch an output slot not emitting any edges.
(Not even control edges from the node.)
"""
with session.Session() as sess:
x_init = constant_op.constant([2, 2, 3, 5, 5])
x = variables.VariableV1(x_init, name="unconnected/x")
# The UniqueOp (tf.unique) has two output slots. Use only slot 0 in the
# graph. Let the debugger watch the unused slot 1.
unique_x, _ = array_ops.unique(x, name="unconnected/unique_x")
y = math_ops.add(unique_x, [0, 1, 2], name="unconnected/y")
x.initializer.run()
# Verify that only slot 0 of unique_x has recipients, while slot 1 of the
# same node does not have recipients.
unique_x_slot_0_recipients = []
unique_x_slot_1_recipients = []
for op in sess.graph.get_operations():
for inp in op.inputs:
if inp.name == "unconnected/unique_x:0":
unique_x_slot_0_recipients.append(op.name)
elif inp.name == "unconnected/unique_x:1":
unique_x_slot_1_recipients.append(op.name)
self.assertEqual(["unconnected/y"], unique_x_slot_0_recipients)
self.assertEqual([], unique_x_slot_1_recipients)
y_result, dump = self._debug_run_and_get_dump(sess, y)
self.assertAllClose([2, 4, 7], y_result)
# Assert that the connected slot (slot 0) is dumped properly.
unique_x_slot_0_dumps = dump.watch_key_to_data(
"unconnected/unique_x:0:DebugIdentity")
self.assertEqual(1, len(unique_x_slot_0_dumps))
self.assertEqual("unconnected/unique_x",
unique_x_slot_0_dumps[0].node_name)
self.assertEqual(0, unique_x_slot_0_dumps[0].output_slot)
self.assertAllClose([2, 3, 5], unique_x_slot_0_dumps[0].get_tensor())
# Assert that the unconnected slot (slot 1) is dumped properly.
unique_x_slot_1_dumps = dump.watch_key_to_data(
"unconnected/unique_x:1:DebugIdentity")
self.assertEqual(1, len(unique_x_slot_1_dumps))
self.assertEqual("unconnected/unique_x",
unique_x_slot_1_dumps[0].node_name)
self.assertEqual(1, unique_x_slot_1_dumps[0].output_slot)
self.assertAllClose([0, 0, 1, 2, 2],
unique_x_slot_1_dumps[0].get_tensor())
def testSuccessiveDebuggingRunsIncreasesCounters(self):
"""Test repeated Session.run() calls with debugger increments counters."""
with session.Session() as sess:
ph = array_ops.placeholder(dtypes.float32, name="successive/ph")
x = array_ops.transpose(ph, name="mismatch/x")
y = array_ops.squeeze(ph, name="mismatch/y")
_, dump1 = self._debug_run_and_get_dump(
sess, x, feed_dict={ph: np.array([[7.0, 8.0]])}, global_step=1)
self.assertEqual(1, dump1.core_metadata.global_step)
self.assertGreaterEqual(dump1.core_metadata.session_run_index, 0)
self.assertEqual(0, dump1.core_metadata.executor_step_index)
self.assertEqual([ph.name], dump1.core_metadata.input_names)
self.assertEqual([x.name], dump1.core_metadata.output_names)
self.assertEqual([], dump1.core_metadata.target_nodes)
shutil.rmtree(self._dump_root)
# Calling run() with the same feed, same output and same debug watch
# options should increment both session_run_index and
# executor_step_index.
_, dump2 = self._debug_run_and_get_dump(
sess, x, feed_dict={ph: np.array([[7.0, 8.0]])}, global_step=2)
self.assertEqual(2, dump2.core_metadata.global_step)
self.assertEqual(dump1.core_metadata.session_run_index + 1,
dump2.core_metadata.session_run_index)
self.assertEqual(dump1.core_metadata.executor_step_index + 1,
dump2.core_metadata.executor_step_index)
self.assertEqual([ph.name], dump2.core_metadata.input_names)
self.assertEqual([x.name], dump2.core_metadata.output_names)
self.assertEqual([], dump2.core_metadata.target_nodes)
shutil.rmtree(self._dump_root)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options, sess.graph, debug_urls=self._debug_urls(), global_step=3)
# Calling run() with a different output should increment
# session_run_index, but not executor_step_index.
_, dump3 = self._debug_run_and_get_dump(
sess, y, feed_dict={ph: np.array([[7.0, 8.0]])}, global_step=3)
self.assertEqual(3, dump3.core_metadata.global_step)
self.assertEqual(dump2.core_metadata.session_run_index + 1,
dump3.core_metadata.session_run_index)
self.assertEqual(0, dump3.core_metadata.executor_step_index)
self.assertEqual([ph.name], dump3.core_metadata.input_names)
self.assertEqual([y.name], dump3.core_metadata.output_names)
self.assertEqual([], dump3.core_metadata.target_nodes)
def testDebuggingDuringOpError(self):
"""Test the debug tensor dumping when error occurs in graph runtime."""
with session.Session() as sess:
ph = array_ops.placeholder(dtypes.float32, name="mismatch/ph")
x = array_ops.transpose(ph, name="mismatch/x")
m = constant_op.constant(
np.array(
[[1.0, 2.0]], dtype=np.float32), name="mismatch/m")
y = math_ops.matmul(m, x, name="mismatch/y")
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity"],
debug_urls=self._debug_urls())
with self.assertRaises(errors.OpError):
sess.run(y,
options=run_options,
feed_dict={ph: np.array([[-3.0], [0.0]])})
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertGreaterEqual(dump.core_metadata.session_run_index, 0)
self.assertGreaterEqual(dump.core_metadata.executor_step_index, 0)
self.assertEqual([ph.name], dump.core_metadata.input_names)
self.assertEqual([y.name], dump.core_metadata.output_names)
self.assertEqual([], dump.core_metadata.target_nodes)
# Despite the fact that the run() call errored out and partition_graphs
# are not available via run_metadata, the partition graphs should still
# have been loaded from the dump directory.
self.assertTrue(dump.loaded_partition_graphs())
m_dumps = dump.watch_key_to_data("mismatch/m:0:DebugIdentity")
self.assertEqual(1, len(m_dumps))
self.assertAllClose(np.array([[1.0, 2.0]]), m_dumps[0].get_tensor())
x_dumps = dump.watch_key_to_data("mismatch/x:0:DebugIdentity")
self.assertEqual(1, len(x_dumps))
self.assertAllClose(np.array([[-3.0, 0.0]]), x_dumps[0].get_tensor())
def testDebugNumericSummaryOnInitializedTensorGivesCorrectResult(self):
with session.Session(config=no_rewrite_session_config()) as sess:
a = variables.VariableV1(
[
np.nan, np.nan, 0.0, 0.0, 0.0, -1.0, -3.0, 3.0, 7.0, -np.inf,
-np.inf, np.inf, np.inf, np.inf, np.inf, np.inf, np.nan, np.nan
],
dtype=np.float32,
name="numeric_summary/a")
b = variables.VariableV1(
[0.0] * 18, dtype=np.float32, name="numeric_summary/b")
c = math_ops.add(a, b, name="numeric_summary/c")
sess.run(variables.global_variables_initializer())
_, dump = self._debug_run_and_get_dump(
sess, c, debug_ops=["DebugNumericSummary"])
self.assertTrue(dump.loaded_partition_graphs())
self.assertAllClose([[
1.0, 18.0, 4.0, 2.0, 2.0, 3.0, 2.0, 5.0, -3.0, 7.0, 0.85714286,
8.97959184, 1.0, 1.0, 18.0
]], dump.get_tensors("numeric_summary/a/read", 0, "DebugNumericSummary"))
def testDebugNumericSummaryOnUninitializedTensorGivesCorrectResult(self):
with session.Session() as sess:
a = variables.VariableV1(
[42], dtype=np.float32, name="numeric_summary_uninit/a")
_, dump = self._debug_run_and_get_dump(
sess, a.initializer, debug_ops=["DebugNumericSummary"])
self.assertTrue(dump.loaded_partition_graphs())
# DebugNumericSummary output should reflect the uninitialized state of
# the watched tensor.
numeric_summary = dump.get_tensors("numeric_summary_uninit/a", 0,
"DebugNumericSummary")[0]
self.assertAllClose([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
numeric_summary[0:8])
# Check dtype (index 12), ndims (index 13) and dimension sizes (index
# 14+).
self.assertAllClose([1.0, 1.0, 1.0], numeric_summary[12:])
self.assertTrue(np.isinf(numeric_summary[8]))
self.assertGreater(numeric_summary[8], 0.0)
self.assertTrue(np.isinf(numeric_summary[9]))
self.assertLess(numeric_summary[9], 0.0)
self.assertTrue(np.isnan(numeric_summary[10]))
self.assertTrue(np.isnan(numeric_summary[11]))
def testDebugNumericSummaryFailureIsToleratedWhenOrdered(self):
with session.Session() as sess:
a = variables.VariableV1("1", name="a")
b = variables.VariableV1("3", name="b")
c = variables.VariableV1("2", name="c")
d = math_ops.add(a, b, name="d")
e = math_ops.add(d, c, name="e")
n = parsing_ops.string_to_number(e, name="n")
m = math_ops.add(n, n, name="m")
sess.run(variables.global_variables_initializer())
# Using DebugNumericSummary on sess.run(m) with the default
# tolerate_debug_op_creation_failures=False should error out due to the
# presence of string-dtype Tensors in the graph.
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugNumericSummary"],
debug_urls=self._debug_urls())
with self.assertRaises(errors.FailedPreconditionError):
sess.run(m, options=run_options, run_metadata=run_metadata)
# Using tolerate_debug_op_creation_failures=True should get rid of the
# error.
m_result, dump = self._debug_run_and_get_dump(
sess, m, debug_ops=["DebugNumericSummary"],
tolerate_debug_op_creation_failures=True)
self.assertEqual(264, m_result)
# The integer-dtype Tensors in the graph should have been dumped
# properly.
self.assertIn("n:0:DebugNumericSummary", dump.debug_watch_keys("n"))
self.assertIn("m:0:DebugNumericSummary", dump.debug_watch_keys("m"))
def testDebugNumericSummaryInvalidAttributesStringAreCaught(self):
with session.Session(config=no_rewrite_session_config()) as sess:
a = variables.VariableV1(10.0, name="a")
b = variables.VariableV1(0.0, name="b")
c = variables.VariableV1(0.0, name="c")
x = math_ops.divide(a, b, name="x")
y = math_ops.multiply(x, c, name="y")
sess.run(variables.global_variables_initializer())
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugNumericSummary(foo=1.0)"],
debug_urls=self._debug_urls())
with self.assertRaisesRegexp(
errors.FailedPreconditionError,
r"1 attribute key\(s\) were not valid for debug node "
r"__dbg_.:0_0_DebugNumericSummary: foo"):
sess.run(y, options=run_options, run_metadata=run_metadata)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugNumericSummary(foo=1.0; bar=false)"],
debug_urls=self._debug_urls())
with self.assertRaisesRegexp(
errors.FailedPreconditionError,
r"2 attribute key\(s\) were not valid for debug node "
r"__dbg_.:0_0_DebugNumericSummary:"):
sess.run(y, options=run_options, run_metadata=run_metadata)
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugNumericSummary(foo=1.0; mute_if_healthy=true)"],
debug_urls=self._debug_urls())
with self.assertRaisesRegexp(
errors.FailedPreconditionError,
r"1 attribute key\(s\) were not valid for debug node "
r"__dbg_.:0_0_DebugNumericSummary: foo"):
sess.run(y, options=run_options, run_metadata=run_metadata)
def testDebugNumericSummaryMuteOnHealthyMutesOnlyHealthyTensorDumps(self):
with session.Session(config=no_rewrite_session_config()) as sess:
a = variables.VariableV1(10.0, name="a")
b = variables.VariableV1(0.0, name="b")
c = variables.VariableV1(0.0, name="c")
x = math_ops.divide(a, b, name="x")
y = math_ops.multiply(x, c, name="y")
sess.run(variables.global_variables_initializer())
# Here, validate=False is necessary to avoid causality check error.
# TODO(cais): Maybe let DebugDumpDir constructor automatically ignore
# debug ops with mute_if_healthy=false attribute during validation.
_, dump = self._debug_run_and_get_dump(
sess, y, debug_ops=["DebugNumericSummary(mute_if_healthy=true)"],
validate=False)
self.assertEqual(2, dump.size)
self.assertAllClose([[
1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, np.inf, -np.inf, np.nan,
np.nan, 1.0, 0.0
]], dump.get_tensors("x", 0, "DebugNumericSummary"))
self.assertAllClose([[
1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, np.inf, -np.inf, np.nan,
np.nan, 1.0, 0.0
]], dump.get_tensors("y", 0, "DebugNumericSummary"))
# Another run with the default mute_if_healthy (false) value should
# dump all the tensors.
shutil.rmtree(self._dump_root)
_, dump = self._debug_run_and_get_dump(
sess, y, debug_ops=["DebugNumericSummary()"])
self.assertEqual(8, dump.size)
def testDebugNumericSummaryMuteOnHealthyAndCustomBoundsWork(self):
with session.Session() as sess:
a = variables.VariableV1([10.0, 10.0], name="a")
b = variables.VariableV1([10.0, 2.0], name="b")
x = math_ops.add(a, b, name="x") # [20.0, 12.0]
y = math_ops.divide(x, b, name="y") # [2.0, 6.0]
sess.run(variables.global_variables_initializer())
# Here, validate=False is necessary to avoid causality check error.
# TODO(cais): Maybe let DebugDumpDir constructor automatically ignore
# debug ops with mute_if_healthy=false attribute during validation.
_, dump = self._debug_run_and_get_dump(
sess, y, debug_ops=[
"DebugNumericSummary(mute_if_healthy=true; upper_bound=11.0)"],
validate=False)
self.assertEqual(1, dump.size)
self.assertAllClose([[
1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 12.0, 20.0, 16.0, 16.0, 1.0,
1.0, 2.0]], dump.get_tensors("x", 0, "DebugNumericSummary"))
def testDebugQueueOpsDoesNotoErrorOut(self):
with session.Session() as sess:
q = data_flow_ops.FIFOQueue(3, "float", name="fifo_queue")
q_init = q.enqueue_many(([101.0, 202.0, 303.0],), name="enqueue_many")
_, dump = self._debug_run_and_get_dump(sess, q_init)
self.assertTrue(dump.loaded_partition_graphs())
fifo_queue_tensor = dump.get_tensors("fifo_queue", 0, "DebugIdentity")[0]
self.assertIsInstance(fifo_queue_tensor,
debug_data.InconvertibleTensorProto)
self.assertTrue(fifo_queue_tensor.initialized)
self.assertAllClose(
[101.0, 202.0, 303.0],
dump.get_tensors("enqueue_many/component_0", 0, "DebugIdentity")[0])
def testLookUpNodePythonTracebackWorks(self):
with session.Session() as sess:
u_init = constant_op.constant(10.0)
u = variables.VariableV1(u_init, name="traceback/u")
v_init = constant_op.constant(20.0)
v = variables.VariableV1(v_init, name="traceback/v")
w = math_ops.multiply(u, v, name="traceback/w")
sess.run(variables.global_variables_initializer())
_, dump = self._debug_run_and_get_dump(sess, w)
# Prior to setting the Python graph, attempts to do traceback lookup
# should lead to exceptions.
with self.assertRaisesRegexp(
LookupError, "Python graph is not available for traceback lookup"):
dump.node_traceback("traceback/w")
dump.set_python_graph(sess.graph)
# After setting the Python graph, attempts to look up nonexistent nodes
# should lead to exceptions.
with self.assertRaisesRegexp(KeyError,
r"Cannot find node \"foo\" in Python graph"):
dump.node_traceback("foo")
# Lookup should work with node name input.
traceback = dump.node_traceback("traceback/w")
self.assertIsInstance(traceback, list)
self.assertGreater(len(traceback), 0)
for trace in traceback:
self.assertIsInstance(trace, tuple)
# Lookup should also work with tensor name input.
traceback = dump.node_traceback("traceback/w:0")
self.assertIsInstance(traceback, list)
self.assertGreater(len(traceback), 0)
for trace in traceback:
self.assertIsInstance(trace, tuple)
class DebugConcurrentRunCallsTest(test_util.TensorFlowTestCase):
"""Test for debugging concurrent Session.run() calls."""
def _get_concurrent_debug_urls(self):
"""Abstract method to generate debug URLs for concurrent debugged runs."""
raise NotImplementedError(
"_get_concurrent_debug_urls is not implemented in the base test class")
def testDebugConcurrentVariableUpdates(self):
if test.is_gpu_available():
self.skipTest("No testing concurrent runs on a single GPU.")
with session.Session() as sess:
v = variables.VariableV1(30.0, name="v")
constants = []
for i in xrange(self._num_concurrent_runs):
constants.append(constant_op.constant(1.0, name="c%d" % i))
incs = [
state_ops.assign_add(
v, c, use_locking=True, name=("inc%d" % i))
for (i, c) in enumerate(constants)
]
sess.run(v.initializer)
concurrent_debug_urls = self._get_concurrent_debug_urls()
def inc_job(index):
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options, sess.graph, debug_urls=concurrent_debug_urls[index])
for _ in xrange(100):
sess.run(incs[index], options=run_options)
inc_threads = []
for index in xrange(self._num_concurrent_runs):
inc_thread = threading.Thread(target=functools.partial(inc_job, index))
inc_thread.start()
inc_threads.append(inc_thread)
for inc_thread in inc_threads:
inc_thread.join()
self.assertAllClose(30.0 + 1.0 * self._num_concurrent_runs * 100,
sess.run(v))
all_session_run_indices = []
for index in xrange(self._num_concurrent_runs):
dump = debug_data.DebugDumpDir(self._dump_roots[index])
self.assertTrue(dump.loaded_partition_graphs())
v_data = dump.get_tensors("v", 0, "DebugIdentity")
self.assertEqual(100, len(v_data))
# Examine all the core metadata files
core_metadata_files = glob.glob(
os.path.join(self._dump_roots[index], "_tfdbg_core*"))
timestamps = []
session_run_indices = []
executor_step_indices = []
for core_metadata_file in core_metadata_files:
with open(core_metadata_file, "rb") as f:
event = event_pb2.Event()
event.ParseFromString(f.read())
core_metadata = (
debug_data.extract_core_metadata_from_event_proto(event))
timestamps.append(event.wall_time)
session_run_indices.append(core_metadata.session_run_index)
executor_step_indices.append(core_metadata.executor_step_index)
all_session_run_indices.extend(session_run_indices)
# Assert that executor_step_index increases by one at a time.
executor_step_indices = zip(timestamps, executor_step_indices)
executor_step_indices = sorted(
executor_step_indices, key=lambda x: x[0])
for i in xrange(len(executor_step_indices) - 1):
self.assertEquals(executor_step_indices[i][1] + 1,
executor_step_indices[i + 1][1])
# Assert that session_run_index increase monotonically.
session_run_indices = zip(timestamps, session_run_indices)
session_run_indices = sorted(session_run_indices, key=lambda x: x[0])
for i in xrange(len(session_run_indices) - 1):
self.assertGreater(session_run_indices[i + 1][1],
session_run_indices[i][1])
# Assert that the session_run_indices from the concurrent run() calls are
# all unique.
self.assertEqual(len(all_session_run_indices),
len(set(all_session_run_indices)))
if __name__ == "__main__":
googletest.main()
| {
"content_hash": "a069721d101c5c9b0d92ebfc49a18951",
"timestamp": "",
"source": "github",
"line_count": 1557,
"max_line_length": 80,
"avg_line_length": 41.04303147077714,
"alnum_prop": 0.6384576865297947,
"repo_name": "alsrgv/tensorflow",
"id": "b438b6500ae2432307720d80b7d957c6bf24aabc",
"size": "64593",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/debug/lib/session_debug_testlib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3568"
},
{
"name": "Batchfile",
"bytes": "15317"
},
{
"name": "C",
"bytes": "755360"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "68001148"
},
{
"name": "CMake",
"bytes": "204596"
},
{
"name": "Dockerfile",
"bytes": "73602"
},
{
"name": "Go",
"bytes": "1627121"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "842866"
},
{
"name": "Jupyter Notebook",
"bytes": "1665584"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "101157"
},
{
"name": "Objective-C",
"bytes": "104061"
},
{
"name": "Objective-C++",
"bytes": "175222"
},
{
"name": "PHP",
"bytes": "17570"
},
{
"name": "Pascal",
"bytes": "3239"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "48843099"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "4733"
},
{
"name": "Shell",
"bytes": "488241"
},
{
"name": "Smarty",
"bytes": "27495"
},
{
"name": "Swift",
"bytes": "56155"
},
{
"name": "TSQL",
"bytes": "921"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SteamProfile.userid'
db.add_column(u'profiles_steamprofile', 'userid',
self.gf('django.db.models.fields.CharField')(max_length=25, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SteamProfile.userid'
db.delete_column(u'profiles_steamprofile', 'userid')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'profiles.battlefieldfourprofile': {
'Meta': {'object_name': 'BattlefieldFourProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.blizzardprofile': {
'Meta': {'object_name': 'BlizzardProfile'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'realid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.eveonlineprofile': {
'Meta': {'object_name': 'EveOnlineProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.leagueoflegendsprofile': {
'Meta': {'object_name': 'LeagueOfLegendsProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.minecraftprofile': {
'Meta': {'object_name': 'MinecraftProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.nintendoprofile': {
'Meta': {'object_name': 'NintendoProfile'},
'friendcode': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.playstationnetworkprofile': {
'Meta': {'object_name': 'PlaystationNetworkProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.somethingawfulprofile': {
'Meta': {'object_name': 'SomethingAwfulProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postcount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'regdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'userid': ('django.db.models.fields.TextField', [], {}),
'username': ('django.db.models.fields.TextField', [], {})
},
u'profiles.steamprofile': {
'Meta': {'object_name': 'SteamProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'userid': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bf4': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.BattlefieldFourProfile']", 'null': 'True', 'blank': 'True'}),
'blizzard': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.BlizzardProfile']", 'null': 'True', 'blank': 'True'}),
'eveonline': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.EveOnlineProfile']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'leagueoflegends': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.LeagueOfLegendsProfile']", 'null': 'True', 'blank': 'True'}),
'minecraft': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.MinecraftProfile']", 'null': 'True', 'blank': 'True'}),
'nintendo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.NintendoProfile']", 'null': 'True', 'blank': 'True'}),
'psn': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.PlaystationNetworkProfile']", 'null': 'True', 'blank': 'True'}),
'somethingawful': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['profiles.SomethingAwfulProfile']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'steam': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.SteamProfile']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'}),
'verification_code': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'worldoftanks': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.WorldOfTanksProfile']", 'null': 'True', 'blank': 'True'}),
'xbl': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.XboxLiveProfile']", 'null': 'True', 'blank': 'True'})
},
u'profiles.worldoftanksprofile': {
'Meta': {'object_name': 'WorldOfTanksProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'profiles.xboxliveprofile': {
'Meta': {'object_name': 'XboxLiveProfile'},
'gamertag': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['profiles'] | {
"content_hash": "1ea8d4357557502c22aa915d45278b72",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 195,
"avg_line_length": 73.69014084507042,
"alnum_prop": 0.5560970948012233,
"repo_name": "Ell/goonauth",
"id": "098e52bdb246369cdc68958f8f968fcdf45e94dd",
"size": "10488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "profiles/migrations/0010_auto__add_field_steamprofile_userid.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18938"
},
{
"name": "Python",
"bytes": "189132"
}
],
"symlink_target": ""
} |
"""
WSGI config for atmsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "atmsite.settings")
application = get_wsgi_application()
| {
"content_hash": "eea435196b6ca391602f1c96484dbf76",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.4375,
"alnum_prop": 0.7698209718670077,
"repo_name": "andkononykhin/atmsite",
"id": "113d75de6403fd635221aa278d9da8d6635e3ee9",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atmsite/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1917"
},
{
"name": "HTML",
"bytes": "13330"
},
{
"name": "JavaScript",
"bytes": "45242"
},
{
"name": "Makefile",
"bytes": "1772"
},
{
"name": "Python",
"bytes": "22043"
}
],
"symlink_target": ""
} |
import os
import sys
import sphinx_bootstrap_theme
# import your package to obtain the version info to display on the docs website
sys.path.insert(0, os.path.abspath("../"))
import spaghetti
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [ #'sphinx_gallery.gen_gallery',
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.viewcode",
"sphinxcontrib.bibtex",
"sphinx.ext.mathjax",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"numpydoc",
"nbsphinx",
]
bibtex_bibfiles = ["_static/references.bib"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "spaghetti"
copyright = "2017-, pysal developers"
author = "pysal developers"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version.
version = spaghetti.__version__
release = spaghetti.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "tests/*"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = "alabaster"
html_theme = "bootstrap"
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
html_title = "%s v%s Manual" % (project, version)
# (Optional) Logo of your package. Should be small enough to fit the navbar (ideally 24x24).
# Path should be relative to the ``_static`` files directory.
# html_logo = "_static/images/package_logo.jpg"
# (Optional) PySAL favicon
html_favicon = "_static/images/pysal_favicon.ico"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
"navbar_title": project,
# Render the next and previous page links in navbar. (Default: true)
"navbar_sidebarrel": False,
# Render the current pages TOC in the navbar. (Default: true)
#'navbar_pagenav': True,
#'navbar_pagenav': False,
# No sidebar
"nosidebar": True,
# Tab name for the current pages TOC. (Default: "Page")
#'navbar_pagenav_name': "Page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
"globaltoc_depth": 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
"globaltoc_includehidden": "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
#'navbar_class': "navbar navbar-inverse",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
"navbar_fixed_top": "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
"source_link_position": "footer",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing (default) or the name of a valid theme
# such as "amelia" or "cosmo", "yeti", "flatly".
"bootswatch_theme": "yeti",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
"bootstrap_version": "3",
# Navigation bar menu
"navbar_links": [
("Installation", "installation"),
("Tutorials", "tutorials"),
("API", "api"),
("References", "references"),
],
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = project + "doc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"%s.tex" % project,
"%s Documentation" % project,
"pysal developers",
"manual",
)
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "%s" % project, "%s Documentation" % project, [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"%s" % project,
"%s Documentation" % project,
author,
"%s" % project,
"SPAtial GrapHs: nETworks, Topology, & Inference",
"Miscellaneous",
)
]
# -----------------------------------------------------------------------------
# Autosummary
# -----------------------------------------------------------------------------
# Generate the API documentation when building
autosummary_generate = True
# avoid showing members twice
numpydoc_show_class_members = False
numpydoc_use_plots = True
class_members_toctree = True
numpydoc_show_inherited_class_members = True
numpydoc_xref_param_type = True
# automatically document class members
autodoc_default_options = {"members": True, "undoc-members": True}
# display the source code for Plot directive
plot_include_source = True
def setup(app):
app.add_css_file("pysal-styles.css")
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"esda": ("https://pysal.org/esda/", None),
"geopandas": ("https://geopandas.org/en/latest/", None),
"libpysal": ("https://pysal.org/libpysal/", None),
"matplotlib": ("https://matplotlib.org/stable/", None),
"networkx": ("https://networkx.org/documentation/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
"pointpats": ("https://pointpats.readthedocs.io/en/latest/", None),
"python": ("https://docs.python.org/3.11/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/", None),
}
# This is processed by Jinja2 and inserted before each notebook
nbsphinx_prolog = r"""
{% set docname = env.doc2path(env.docname, base=None) %}
.. only:: html
.. role:: raw-html(raw)
:format: html
.. nbinfo::
This page was generated from `{{ docname }}`__.
Interactive online version:
:raw-html:`<a href="https://mybinder.org/v2/gh/pysal/spaghetti/main?filepath={{ docname }}"><img alt="Binder badge" src="https://mybinder.org/badge_logo.svg" style="vertical-align:text-bottom"></a>`
__ https://github.com/pysal/spaghetti/blob/main/{{ docname }}
.. raw:: latex
\nbsphinxstartnotebook{\scriptsize\noindent\strut
\textcolor{gray}{The following section was generated from
\sphinxcode{\sphinxupquote{\strut {{ docname | escape_latex }}}} \dotfill}}
"""
# This is processed by Jinja2 and inserted after each notebook
nbsphinx_epilog = r"""
.. raw:: latex
\nbsphinxstopnotebook{\scriptsize\noindent\strut
\textcolor{gray}{\dotfill\ \sphinxcode{\sphinxupquote{\strut
{{ env.doc2path(env.docname, base='doc') | escape_latex }}}} ends here.}}
"""
# List of arguments to be passed to the kernel that executes the notebooks:
nbsphinx_execute_arguments = [
"--InlineBackend.figure_formats={'svg', 'pdf'}",
"--InlineBackend.rc={'figure.dpi': 96}",
]
mathjax3_config = {
"TeX": {"equationNumbers": {"autoNumber": "AMS", "useLabelIds": True}},
}
| {
"content_hash": "f9fcda08fed2a918cf6363fcabf840b7",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 206,
"avg_line_length": 33.08080808080808,
"alnum_prop": 0.645089058524173,
"repo_name": "pysal/spaghetti",
"id": "35a5f57e3df0367b8005ac8d461af65b1ab5fd99",
"size": "10431",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "27718660"
},
{
"name": "TeX",
"bytes": "7917"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class NameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="name", parent_name="parcoords", **kwargs):
super(NameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs,
)
| {
"content_hash": "4bab0bd4931e5d316daef94ac8cdf8b0",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 78,
"avg_line_length": 35.54545454545455,
"alnum_prop": 0.6163682864450127,
"repo_name": "plotly/plotly.py",
"id": "014cc51124093bd58d1c7d8a23d449ab732ae298",
"size": "391",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/parcoords/_name.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
'''
Created on Aug 30, 2015
@author: Amol
'''
from pydelicious import get_tagposts, get_userposts, get_urlposts
from time import sleep
from os.path import exists
from pickle import load, dump
from itertools import groupby
from recommendations import getRecommendations, topMatches
import random
#Note: This reco syste, doesn't work very well. For speed up, lookups are cached. In case a fresh lookup
#is needed, delete the .p files and then try running again. I personally havent had lock in find a positive correlation
#between two individuals
#Convenience method that wraps getting the user details and hand exceptions and retry
def get_userpost_details(user, num_retries = 3, sleep_time=10):
for i in range(num_retries):
try:
posts = get_userposts(user)
break
except:
print "Exception caught while retrying for user ", user, " retry number ", i
sleep(sleep_time)
return posts
#Uses Pickle to avoid frequent API calls and get result reproducabilty. To fetch the data delete the .p pickle filles
#Initilizes an empty user dict with user name as the key and an empty map as the value
def initializeUserDict(tag, count=5, cache_dict = True):
#get_popular doesn't work any more. Getting by tag
#return [url_post['user'] for item in get_tagposts(tag=tag)[0:count] for url_post in get_urlposts(item['url'])]
#from hashlib import md5
#print md5('http://usingtechnologybetter.com/chrome-extensions-for-teachers/').hexdigest()
user_dict_cache_file = "user_dict.p"
if cache_dict and exists(user_dict_cache_file):
print "Reading the user_dict from cache file", user_dict_cache_file
cache_file = open(user_dict_cache_file, "rb")
user_dict = load(cache_file)
cache_file.close()
else:
user_dict = dict(
[(url_info['user'], {})
for item in get_tagposts(tag=tag)[0:count]
for url_info in get_urlposts(item['url']) if url_info['user'] != '']
)
if cache_dict:
print "Writing the user_dict to cache file", user_dict_cache_file
cache_file = open(user_dict_cache_file, "wb")
dump(user_dict, cache_file)
cache_file.close()
return user_dict
def fillItems(user_dict, cache_dict = True):
items_cache_file = "items_cache.p"
if cache_dict and exists(items_cache_file):
print "Reading the items from cache file", items_cache_file
cache_file = open(items_cache_file, "rb")
user_url_tuple = load(cache_file)
cache_file.close()
else:
user_url_tuple = sorted([(user, post["url"], ) for user in user_dict for post in get_userpost_details(user)])
if cache_dict:
print "Saving to cache file"
cache_file = open(items_cache_file, "wb")
dump(user_url_tuple, cache_file)
cache_file.close()
#Very Imperative
grouped_by_values = groupby(user_url_tuple, lambda (user, _): user)
distinct_url_sequence = map(lambda (_, url) : (url, 0), user_url_tuple)
for user in user_dict:
user_dict[user] = dict(distinct_url_sequence)
for user, grouped_values in grouped_by_values:
for _, url in grouped_values:
user_dict[user][url] = 1
user_dict = initializeUserDict('technology', count=10)
fillItems(user_dict)
user = user_dict.keys()[random.randint(0, len(user_dict) - 1)]
print "Top matches for ", user, " are ", topMatches(user_dict, user)
print "Recommendations for user ", user, " are ", getRecommendations(user_dict, user) | {
"content_hash": "aac015b2fdba70ab6b8d9502ef876b9d",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 119,
"avg_line_length": 41.449438202247194,
"alnum_prop": 0.6427216047709406,
"repo_name": "amolnayak311/Programming-Collective-Intelligence",
"id": "8cc6fa6f3e6196b9604584c94d92f52ee1b14f84",
"size": "3689",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ch2/deliciousrec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "6860"
},
{
"name": "Python",
"bytes": "44944"
}
],
"symlink_target": ""
} |
from kivy.factory import Factory
r = Factory.register
r('Adapter', module='kivy.adapters.adapter')
r('ListAdapter', module='kivy.adapters.listadapter')
r('SimpleListAdapter', module='kivy.adapters.simplelistadapter')
r('DictAdapter', module='kivy.adapters.dictadapter')
r('SelectableDataItem', module='kivy.adapters.models')
r('Animation', module='kivy.animation')
r('AnimationTransition', module='kivy.animation')
r('ExceptionHandler', module='kivy.base')
r('Cache', module='kivy.cache')
r('ClockBase', module='kivy.clock')
r('ColorPicker', module='kivy.uix.colorpicker')
r('ColorWheel', module='kivy.uix.colorpicker')
r('ConfigParser', module='kivy.config')
r('EventDispatcher', module='kivy.event')
r('Observable', module='kivy.event')
r('FactoryException', module='kivy.factory')
r('Gesture', module='kivy.gesture')
r('GestureDatabase', module='kivy.gesture')
r('GesturePoint', module='kivy.gesture')
r('GestureStroke', module='kivy.gesture')
r('Parser', module='kivy.lang')
r('LoaderBase', module='kivy.loader')
r('ProxyImage', module='kivy.loader')
r('LoggerHistory', module='kivy.logger')
r('NumericProperty', module='kivy.properties')
r('StringProperty', module='kivy.properties')
r('ListProperty', module='kivy.properties')
r('ObjectProperty', module='kivy.properties')
r('BooleanProperty', module='kivy.properties')
r('BoundedNumericProperty', module='kivy.properties')
r('OptionProperty', module='kivy.properties')
r('ReferenceListProperty', module='kivy.properties')
r('AliasProperty', module='kivy.properties')
r('NumericProperty', module='kivy.properties')
r('Property', module='kivy.properties')
r('SafeList', module='kivy.utils')
r('Vector', module='kivy.vector')
r('Color', module='kivy.graphics.context_instructions')
r('BindTexture', module='kivy.graphics.context_instructions')
r('PushMatrix', module='kivy.graphics.context_instructions')
r('PopMatrix', module='kivy.graphics.context_instructions')
r('Rotate', module='kivy.graphics.context_instructions')
r('Scale', module='kivy.graphics.context_instructions')
r('Translate', module='kivy.graphics.context_instructions')
r('MatrixInstruction', module='kivy.graphics.context_instructions')
r('Fbo', module='kivy.graphics.fbo')
r('Instruction', module='kivy.graphics.instructions')
r('InstructionGroup', module='kivy.graphics.instructions')
r('ContextInstruction', module='kivy.graphics.instructions')
r('VertexInstruction', module='kivy.graphics.instructions')
r('Canvas', module='kivy.graphics.instructions')
r('CanvasBase', module='kivy.graphics.instructions')
r('RenderContext', module='kivy.graphics.instructions')
r('Shader', module='kivy.graphics.shader')
r('Texture', module='kivy.graphics.texture')
r('TextureRegion', module='kivy.graphics.texture')
r('Matrix', module='kivy.graphics.transformation')
r('VBO', module='kivy.graphics.vbo')
r('VertexBatch', module='kivy.graphics.vbo')
r('StencilPush', module='kivy.graphics.stencil_instructions')
r('StencilPop', module='kivy.graphics.stencil_instructions')
r('StencilUse', module='kivy.graphics.stencil_instructions')
r('StencilUnUse', module='kivy.graphics.stencil_instructions')
r('Triangle', module='kivy.graphics.vertex_instructions')
r('Quad', module='kivy.graphics.vertex_instructions')
r('Rectangle', module='kivy.graphics.vertex_instructions')
r('BorderImage', module='kivy.graphics.vertex_instructions')
r('Ellipse', module='kivy.graphics.vertex_instructions')
r('Line', module='kivy.graphics.vertex_instructions')
r('SmoothLine', module='kivy.graphics.vertex_instructions')
r('Point', module='kivy.graphics.vertex_instructions')
r('Bezier', module='kivy.graphics.vertex_instructions')
r('Mesh', module='kivy.graphics.vertex_instructions')
r('Svg', module='kivy.graphics.svg')
r('MotionEventFactory', module='kivy.input.factory')
r('MotionEventProvider', module='kivy.input.provider')
r('Shape', module='kivy.input.shape')
r('ShapeRect', module='kivy.input.shape')
r('ActionBar', module='kivy.uix.actionbar')
r('ActionItem', module='kivy.uix.actionbar')
r('ActionButton', module='kivy.uix.actionbar')
r('ActionToggleButton', module='kivy.uix.actionbar')
r('ActionCheck', module='kivy.uix.actionbar')
r('ActionSeparator', module='kivy.uix.actionbar')
r('ActionDropDown', module='kivy.uix.actionbar')
r('ActionGroup', module='kivy.uix.actionbar')
r('ActionOverflow', module='kivy.uix.actionbar')
r('ActionView', module='kivy.uix.actionbar')
r('ContextualActionView', module='kivy.uix.actionbar')
r('AnchorLayout', module='kivy.uix.anchorlayout')
r('BoxLayout', module='kivy.uix.boxlayout')
r('GridLayout', module='kivy.uix.gridlayout')
r('PageLayout', module='kivy.uix.pagelayout')
r('Accordion', module='kivy.uix.accordion')
r('AccordionItem', module='kivy.uix.accordion')
r('Button', module='kivy.uix.button')
r('ButtonBehavior', module='kivy.uix.behaviors')
r('ToggleButtonBehavior', module='kivy.uix.behaviors')
r('DragBehavior', module='kivy.uix.behaviors')
r('FocusBehavior', module='kivy.uix.behaviors')
r('CompoundSelectionBehavior', module='kivy.uix.behaviors')
r('Bubble', module='kivy.uix.bubble')
r('BubbleButton', module='kivy.uix.bubble')
r('Camera', module='kivy.uix.camera')
r('Carousel', module='kivy.uix.carousel')
r('CodeInput', module='kivy.uix.codeinput')
r('CheckBox', module='kivy.uix.checkbox')
r('DropDown', module='kivy.uix.dropdown')
r('EffectWidget', module='kivy.uix.effectwidget')
r('FloatLayout', module='kivy.uix.floatlayout')
r('RelativeLayout', module='kivy.uix.relativelayout')
r('ScatterLayout', module='kivy.uix.scatterlayout')
r('ScatterPlaneLayout', module='kivy.uix.scatterlayout')
r('FileChooserListView', module='kivy.uix.filechooser')
r('FileChooserIconView', module='kivy.uix.filechooser')
r('FileChooser', module='kivy.uix.filechooser')
r('Image', module='kivy.uix.image')
r('AsyncImage', module='kivy.uix.image')
r('Label', module='kivy.uix.label')
r('Layout', module='kivy.uix.layout')
r('AbstractView', module='kivy.uix.abstractview')
r('CompositeListItem', module='kivy.uix.listview')
r('ListItemButton', module='kivy.uix.listview')
r('ListItemLabel', module='kivy.uix.listview')
r('ListView', module='kivy.uix.listview')
r('SelectableView', module='kivy.uix.listview')
r('ModalView', module='kivy.uix.modalview')
r('ProgressBar', module='kivy.uix.progressbar')
r('Popup', module='kivy.uix.popup')
r('Scatter', module='kivy.uix.scatter')
r('ScatterPlane', module='kivy.uix.scatter')
r('ScrollView', module='kivy.uix.scrollview')
r('Settings', module='kivy.uix.settings')
r('Slider', module='kivy.uix.slider')
r('Screen', module='kivy.uix.screenmanager')
r('ScreenManager', module='kivy.uix.screenmanager')
r('Spinner', module='kivy.uix.spinner')
r('Splitter', module='kivy.uix.splitter')
r('StackLayout', module='kivy.uix.stacklayout')
r('StencilView', module='kivy.uix.stencilview')
r('Switch', module='kivy.uix.switch')
r('TabbedPanel', module='kivy.uix.tabbedpanel')
r('TabbedPanelHeader', module='kivy.uix.tabbedpanel')
r('TextInput', module='kivy.uix.textinput')
r('ToggleButton', module='kivy.uix.togglebutton')
r('TreeView', module='kivy.uix.treeview')
r('TreeViewLabel', module='kivy.uix.treeview')
r('TreeViewNode', module='kivy.uix.treeview')
r('ShaderTransition', module='kivy.uix.screenmanager')
r('SlideTransition', module='kivy.uix.screenmanager')
r('SwapTransition', module='kivy.uix.screenmanager')
r('WipeTransition', module='kivy.uix.screenmanager')
r('FadeTransition', module='kivy.uix.screenmanager')
r('Sandbox', module='kivy.uix.sandbox')
r('Video', module='kivy.uix.video')
r('VideoPlayer', module='kivy.uix.videoplayer')
r('VideoPlayerVolume', module='kivy.uix.videoplayer')
r('VideoPlayerStop', module='kivy.uix.videoplayer')
r('VideoPlayerPlayPause', module='kivy.uix.videoplayer')
r('VideoPlayerProgressBar', module='kivy.uix.videoplayer')
r('VKeyboard', module='kivy.uix.vkeyboard')
r('Widget', module='kivy.uix.widget')
r('WidgetException', module='kivy.uix.widget')
r('RstDocument', module='kivy.uix.rst')
r('KineticEffect', module='kivy.effects.kinetic')
r('ScrollEffect', module='kivy.effects.scroll')
r('DampedScrollEffect', module='kivy.effects.dampedscroll')
r('OpacityScrollEffect', module='kivy.effects.opacityscroll')
r('Recognizer', module='kivy.multistroke')
r('MultistrokeGesture', module='kivy.multistroke')
r('UnistrokeTemplate', module='kivy.multistroke')
r('ProgressTracker', module='kivy.multistroke')
r('GestureSurface', module='kivy.uix.gesturesurface')
r('GestureContainer', module='kivy.uix.gesturesurface')
| {
"content_hash": "030123d3f9b8aa9aed8da6e0b23354d4",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 67,
"avg_line_length": 47.69491525423729,
"alnum_prop": 0.7527837005448946,
"repo_name": "Farkal/kivy",
"id": "72bd3e6815322c8e330a017b71305f71b6bc9191",
"size": "8491",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "kivy/factory_registers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2902"
},
{
"name": "C",
"bytes": "326681"
},
{
"name": "Emacs Lisp",
"bytes": "9695"
},
{
"name": "GLSL",
"bytes": "1572"
},
{
"name": "HTML",
"bytes": "19384"
},
{
"name": "Makefile",
"bytes": "5261"
},
{
"name": "Objective-C",
"bytes": "14779"
},
{
"name": "Python",
"bytes": "3700124"
},
{
"name": "Shell",
"bytes": "8829"
},
{
"name": "VimL",
"bytes": "1123"
}
],
"symlink_target": ""
} |
import pygame
import os
import pygame.event
import xml.etree.cElementTree as ET
from pygame.locals import *
from pprint import pprint
def load_png(name):
"""
Load image and return image object
"""
fullname = os.path.join('data', name)
try:
image = pygame.image.load(fullname)
if image.get_alpha() is None:
image = image.convert()
else:
image = image.convert_alpha()
except pygame.error, message:
print 'Cannot load image:', fullname
raise SystemExit(message)
return image, image.get_rect()
def blurSurf(surface, amt):
"""
Blur the given surface by the given 'amount'. Only values 1 and greater are valid. Value 1 = no blur.
"""
if amt < 1.0:
raise ValueError("Arg 'amt' must be greater than 1.0, passed in value is %s" % amt)
scale = 1.0/float(amt)
surf_size = surface.get_size()
scale_size = (int(surf_size[0]*scale), int(surf_size[1]*scale))
surf = pygame.transform.smoothscale(surface, scale_size)
surf = pygame.transform.smoothscale(surf, surf_size)
return surf
def isPointInsideRect(x, y, rect):
"""
Checks if point is inside some rectangle (Rect object)
:param x: horizontal
:param y: vertical
:param rect: rect to check inside
:return: bool
"""
if (x > rect.left) and (x < rect.right) and (y > rect.top) and (y < rect.bottom):
return True
else:
return False
def doRectsOverlap(rect1, rect2):
"""
Checks if rectangles (Rect objects) overlap
:param rect1: first Rect object
:param rect2: second Rect object
:return: bool
"""
for a, b in [(rect1, rect2), (rect2, rect1)]:
# Check if a's corners are inside b
if((isPointInsideRect(a.left, a.top, b)) or
(isPointInsideRect(a.left, a.bottom, b)) or
(isPointInsideRect(a.right, a.top, b)) or
(isPointInsideRect(a.right, a.bottom, b))):
return True
return False
class Tileset():
def __init__(self, name):
self.name = name
self.image, self.rect = load_png(self.name)
self.pos_x = 0
self.pos_y = 0
def change_pos_rel(self, x, y):
self.rect.x += x
#self.rect.move(x, y)
self.pos_x += x
self.rect.y += y
self.pos_y += y
#self.rect.x, self.rect.y = self.pos_x, self.pos_y
#print(self.image.get_rect())
print(self.rect, self.pos_x, self.pos_y)
class SelectedTile():
def __init__(self, x, y):
self.image = pygame.Surface((32, 32))
self.pos_x = x
self.pos_y = y
self.pos_tileset_x = 0
self.pos_tileset_y = 0
#self.rect = Rect(x, y, 32, 32)
self.collision = False
self.event = None
def select_tile(self, postx, posty):
self.pos_tileset_x = postx
self.pos_tileset_y = posty
def mutate(self, t):
t.pos_tileset_x = self.pos_tileset_x
t.pos_tileset_y = self.pos_tileset_y
#t.rect = self.rect
t.collision = self.collision
t.event = self.event
t.image.blit(self.image, (0, 0))
class Tile():
def __init__(self):
self.image = pygame.Surface((32, 32))
#self.pos_x = None
#self.pos_y = None
self.pos_tileset_x = None
self.pos_tileset_y = None
#self.rect = None
self.collision = False
self.event = None
class Playfield():
def __init__(self, w, h, tileset):
self.mapp = []
self.tileset = tileset
for y in xrange(h):
wline = []
for x in xrange(w):
wline += [Tile()]
self.mapp += [wline]
pprint(self.mapp)
class App:
def __init__(self):
self._running = True
self._display_surf = None
self.size = self.width, self.height = 640, 448
self.offset = 0
self.switch = True
self.tilesetfile = "tileset.png"
self.vx = 0
self.vy = 0
self.vxmax = self.width/32
self.vymax = self.height/32
def on_init(self):
pygame.init()
pygame.display.set_caption('mapEditor')
self._display_surf = pygame.display.set_mode(self.size, pygame.HWSURFACE | pygame.DOUBLEBUF)
pygame.display.set_icon(pygame.Surface((32,32), pygame.SRCALPHA, 32).convert_alpha())
self._running = True
self.Tileset = Tileset(self.tilesetfile)
self.Playfield = Playfield(self.vxmax, self.vymax, self.Tileset)
self.SelectedTile = SelectedTile(self.width-32, self.height-32)
#print(type(self.Playfield.mapp[0]))
return True
def save(self):
#2015-07-30
size = ET.Element("size")
size_x = ET.SubElement(size, "x")
size_y = ET.SubElement(size, "y")
size_x.text = str(self.vxmax)
size_y.text = str(self.vymax)
tileset = ET.Element("tileset")
tileset.text = str(self.tilesetfile)
mapp = ET.Element("map")
for y in xrange(len(self.Playfield.mapp)):
for x in xrange(len(self.Playfield.mapp[y])):
tile = ET.SubElement(mapp, "tile")
pos_tile_x = ET.SubElement(tile, "pos_tile_x")
pos_tile_y = ET.SubElement(tile, "pos_tile_y")
collision = ET.SubElement(tile, "collision")
event = ET.SubElement(tile, "event")
pos_tile_x.text = str(self.Playfield.mapp[y][x].pos_tileset_x)
pos_tile_y.text = str(self.Playfield.mapp[y][x].pos_tileset_y)
collision.text = str(self.Playfield.mapp[y][x].collision)
event.text = str(self.Playfield.mapp[y][x].event)
tree = ET.ElementTree(mapp)
try:
tree.write("testmap.xml")
print("[SUCCES] Wrote to %s successfully!" % "testmap.xml")
except:
print("[ERROR] Couldn't write!")
def on_event(self, event):
if event.type == pygame.QUIT:
self._running = False
if event.type == pygame.KEYDOWN:
#Tileset screen
if self.switch:
if event.key == K_PAGEUP:
self.Tileset.change_pos_rel(0, 32)
self.offset += 32
print(K_PAGEUP)
if event.key == K_PAGEDOWN:
self.Tileset.change_pos_rel(0, -32)
self.offset += -32
print(K_PAGEDOWN)
if event.key == K_TAB:
if self.switch:
self.switch = False
elif not self.switch:
self.switch = True
print(self.switch)
#Map screen
elif not self.switch:
if event.key == K_TAB:
if self.switch:
self.switch = False
elif not self.switch:
self.switch = True
print(self.switch)
if event.key == K_UP:
self.vy += 32
print(self.vy)
if event.key == K_DOWN:
self.vy -= 32
print(self.vy)
if event.key == K_LEFT:
self.vx += 32
print(self.vx)
if event.key == K_RIGHT:
self.vx -= 32
print(self.vx)
if event.key == K_r:
if self.SelectedTile.collision:
self.SelectedTile.collision = False
elif not self.SelectedTile.collision:
self.SelectedTile.collision = True
if event.key == K_s and pygame.key.get_mods() & KMOD_LCTRL:
self.save()
if event.type == pygame.MOUSEBUTTONDOWN:
#Tileset screen
if self.switch:
if pygame.mouse.get_pressed()[0]:
print(pygame.mouse.get_pos())
if isPointInsideRect(pygame.mouse.get_pos()[0], pygame.mouse.get_pos()[1], self.Tileset.rect):
x, y = pygame.mouse.get_pos()
#MARCIN LOVE
nx = x - (x % 32)
ny = y - (y % 32) - self.offset
print("Formatted: %i, %i" % (nx, ny))
self.SelectedTile.image.fill((255, 255, 255))
self.SelectedTile.image.blit(self.Tileset.image, (0, 0), (nx, ny, 32, 32))
self.SelectedTile.select_tile(nx, ny)
print("Clicked on tileset")
elif not self.switch:
if pygame.mouse.get_pressed()[0]:
#pygame.event.pool()
print(pygame.mouse.get_pos())
#paint selected tile
x, y = pygame.mouse.get_pos()
#MARCIN LOVE
nx = x - (x % 32) - self.vx
ny = y - (y % 32) - self.vy
print("Formatted: %i, %i" % (nx, ny))
try:
self.SelectedTile.mutate(self.Playfield.mapp[ny/32][nx/32])
print(self.SelectedTile.collision)
except IndexError:
pass
elif pygame.mouse.get_pressed()[2]:
print(pygame.mouse.get_pos())
#paint selected tile
x, y = pygame.mouse.get_pos()
#MARCIN LOVE
nx = x - (x % 32) - self.vx
ny = y - (y % 32) - self.vy
print("Formatted: %i, %i" % (nx, ny))
try:
self.SelectedTile = SelectedTile(self.SelectedTile.pos_x, self.SelectedTile.pos_y)
self.SelectedTile.mutate(self.Playfield.mapp[ny/32][nx/32])
except IndexError:
pass
def on_loop(self):
pass
def on_render(self):
if self.switch:
self._display_surf.fill((255, 255, 255))
self._display_surf.blit(self.Tileset.image, (self.Tileset.pos_x, self.Tileset.pos_y))
self._display_surf.blit(self.SelectedTile.image, (self.SelectedTile.pos_x, self.SelectedTile.pos_y))
if not self.switch:
self._display_surf.fill((255, 255, 255))
for y in xrange(len(self.Playfield.mapp)):
for x in xrange(len(self.Playfield.mapp[y])):
self._display_surf.blit(self.Playfield.mapp[y][x].image, (32*x + self.vx, 32*y + self.vy))
#Draw rectangle to show collision
if self.Playfield.mapp[y][x].collision:
s = pygame.Surface((32, 32), pygame.SRCALPHA)
s.fill((255, 0, 0, 100))
self._display_surf.blit(s, (32*x + self.vx, 32*y + self.vy))
pygame.draw.rect(self._display_surf,
(255, 0, 0),
Rect(32*x + self.vx, 32*y + self.vy, 32, 32),
2)
self._display_surf.blit(self.SelectedTile.image, (self.SelectedTile.pos_x, self.SelectedTile.pos_y))
pygame.display.flip()
def on_cleanup(self):
pygame.quit()
def on_execute(self):
if not self.on_init():
self._running = False
while self._running:
for event in pygame.event.get():
self.on_event(event)
self.on_loop()
self.on_render()
self.on_cleanup()
if __name__ == "__main__":
theApp = App()
theApp.on_execute() | {
"content_hash": "93726f94de87061e9ca0d943265908e3",
"timestamp": "",
"source": "github",
"line_count": 322,
"max_line_length": 114,
"avg_line_length": 37.76086956521739,
"alnum_prop": 0.48934945308002303,
"repo_name": "Reyuu/yiisu",
"id": "cd5a78fa6347518fc40d021b84c1dec8d034ef90",
"size": "12159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old/map - 2015-07-30.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "123426"
}
],
"symlink_target": ""
} |
import requests
from ubersmith_client.exceptions import UbersmithException, get_exception_for
def init(url, user, password):
return UbersmithApi(url, user, password)
class UbersmithApi(object):
def __init__(self, url, user, password):
self.url = url
self.user = user
self.password = password
def __getattr__(self, module):
return UbersmithRequest(self.url, self.user, self.password, module)
class UbersmithRequest(object):
def __init__(self, url, user, password, module):
self.url = url
self.user = user
self.password = password
self.module = module
self.methods = []
self.http_methods = {'GET': 'get', 'POST': 'post'}
def __getattr__(self, function):
self.methods.append(function)
return self
def __call__(self, **kwargs):
return self.http_get(**kwargs)
def process_request(self, http_method, **kwargs):
callable_http_method = getattr(requests, http_method)
response = callable_http_method(self.url, auth=(self.user, self.password), **kwargs)
if response.status_code < 200 or response.status_code >= 400:
raise get_exception_for(status_code=response.status_code)
response_json = response.json()
if not response_json['status']:
raise UbersmithException(
500,
"error {0}, {1}".format(response_json['error_code'], response_json['error_message'])
)
return response.json()["data"]
def http_get(self, **kwargs):
self._build_request_params(kwargs)
response = self.process_request(self.http_methods.get('GET'), params=kwargs)
return response
def http_post(self, **kwargs):
self._build_request_params(kwargs)
response = self.process_request(self.http_methods.get('POST'), data=kwargs)
return response
def _build_request_params(self, kwargs):
_methods = ".".join(self.methods)
kwargs['method'] = "{0}.{1}".format(self.module, _methods)
| {
"content_hash": "ece9e8f934fc510d2d12659b963b37c4",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 100,
"avg_line_length": 30.470588235294116,
"alnum_prop": 0.6172779922779923,
"repo_name": "Marx314/python-ubersmithclient",
"id": "5796838026ceeaadaecd48f644ac28f93c74e34e",
"size": "2645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ubersmith_client/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16396"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os.path
from typing import Optional
from typing import Sequence
def main(argv=None): # type: (Optional[Sequence[str]]) -> int
parser = argparse.ArgumentParser(description='Checks for broken symlinks.')
parser.add_argument('filenames', nargs='*', help='Filenames to check')
args = parser.parse_args(argv)
retv = 0
for filename in args.filenames:
if (
os.path.islink(filename) and
not os.path.exists(filename)
): # pragma: no cover (symlink support required)
print('{}: Broken symlink'.format(filename))
retv = 1
return retv
if __name__ == '__main__':
exit(main())
| {
"content_hash": "911b7eac2a2008c2caf9f879e33cfbae",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 79,
"avg_line_length": 27.1,
"alnum_prop": 0.6396063960639606,
"repo_name": "Harwood/pre-commit-hooks",
"id": "736bf99c0edb76202fc663f10c071567ab84f39b",
"size": "813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pre_commit_hooks/check_symlinks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "114801"
}
],
"symlink_target": ""
} |
import logging
from botocore.exceptions import ClientError
from concurrent.futures import as_completed
from c7n.actions import ActionRegistry, BaseAction
from c7n.filters import FilterRegistry, AgeFilter, OPERATORS
import c7n.filters.vpc as net_filters
from c7n.manager import resources
from c7n.query import QueryResourceManager
from c7n.utils import (
type_schema, local_session, snapshot_identifier, chunks)
log = logging.getLogger('custodian.rds-cluster')
filters = FilterRegistry('rds-cluster.filters')
actions = ActionRegistry('rds-cluster.actions')
@resources.register('rds-cluster')
class RDSCluster(QueryResourceManager):
"""Resource manager for RDS clusters.
"""
class Meta(object):
service = 'rds'
type = 'rds-cluster'
enum_spec = ('describe_db_clusters', 'DBClusters', None)
name = id = 'DBClusterIdentifier'
filter_name = None
filter_type = None
dimension = 'DBClusterIdentifier'
date = None
resource_type = Meta
filter_registry = filters
action_registry = actions
@filters.register('security-group')
class SecurityGroupFilter(net_filters.SecurityGroupFilter):
RelatedIdsExpression = "VpcSecurityGroups[].VpcSecurityGroupId"
@actions.register('delete')
class Delete(BaseAction):
schema = type_schema(
'delete', **{'skip-snapshot': {'type': 'boolean'},
'delete-instances': {'type': 'boolean'}})
def process(self, clusters):
skip = self.data.get('skip-snapshot', False)
delete_instances = self.data.get('delete-instances', True)
client = local_session(self.manager.session_factory).client('rds')
for cluster in clusters:
if delete_instances:
for instance in cluster.get('DBClusterMembers', []):
client.delete_db_instance(
DBInstanceIdentifier=instance['DBInstanceIdentifier'],
SkipFinalSnapshot=True)
self.log.info(
'Deleted RDS instance: %s',
instance['DBInstanceIdentifier'])
params = {'DBClusterIdentifier': cluster['DBClusterIdentifier']}
if skip:
params['SkipFinalSnapshot'] = True
else:
params['FinalDBSnapshotIdentifier'] = snapshot_identifier(
'Final', cluster['DBClusterIdentifier'])
try:
client.delete_db_cluster(**params)
except ClientError as e:
if e.response['Error']['Code'] == 'InvalidDBClusterStateFault':
self.log.info(
'RDS cluster in invalid state: %s',
cluster['DBClusterIdentifier'])
continue
raise
self.log.info(
'Deleted RDS cluster: %s',
cluster['DBClusterIdentifier'])
@actions.register('retention')
class RetentionWindow(BaseAction):
date_attribute = "BackupRetentionPeriod"
# Tag copy not yet available for Aurora:
# https://forums.aws.amazon.com/thread.jspa?threadID=225812
schema = type_schema(
'retention',
**{'days': {'type': 'number'}})
def process(self, clusters):
with self.executor_factory(max_workers=2) as w:
futures = []
for cluster in clusters:
futures.append(w.submit(
self.process_snapshot_retention,
cluster))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception setting RDS cluster retention \n %s",
f.exception())
def process_snapshot_retention(self, cluster):
current_retention = int(cluster.get('BackupRetentionPeriod', 0))
new_retention = self.data['days']
if current_retention < new_retention:
self.set_retention_window(
cluster,
max(current_retention, new_retention))
return cluster
def set_retention_window(self, cluster, retention):
c = local_session(self.manager.session_factory).client('rds')
c.modify_db_cluster(
DBClusterIdentifier=cluster['DBClusterIdentifier'],
BackupRetentionPeriod=retention,
PreferredBackupWindow=cluster['PreferredBackupWindow'],
PreferredMaintenanceWindow=cluster['PreferredMaintenanceWindow'])
@actions.register('snapshot')
class Snapshot(BaseAction):
schema = type_schema('snapshot')
def process(self, clusters):
with self.executor_factory(max_workers=3) as w:
futures = []
for cluster in clusters:
futures.append(w.submit(
self.process_cluster_snapshot,
cluster))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception creating RDS cluster snapshot \n %s",
f.exception())
return clusters
def process_cluster_snapshot(self, cluster):
c = local_session(self.manager.session_factory).client('rds')
c.create_db_cluster_snapshot(
DBClusterSnapshotIdentifier=snapshot_identifier(
'Backup',
cluster['DBClusterIdentifier']),
DBClusterIdentifier=cluster['DBClusterIdentifier'])
@resources.register('rds-cluster-snapshot')
class RDSClusterSnapshot(QueryResourceManager):
"""Resource manager for RDS cluster snapshots.
"""
class Meta(object):
service = 'rds'
type = 'rds-cluster-snapshot'
enum_spec = (
'describe_db_cluster_snapshots', 'DBClusterSnapshots', None)
name = id = 'DBClusterSnapshotIdentifier'
filter_name = None
filter_type = None
dimension = None
date = 'SnapshotCreateTime'
resource_type = Meta
filter_registry = FilterRegistry('rdscluster-snapshot.filters')
action_registry = ActionRegistry('rdscluster-snapshot.actions')
@RDSClusterSnapshot.filter_registry.register('age')
class RDSSnapshotAge(AgeFilter):
schema = type_schema(
'age', days={'type': 'number'},
op={'type': 'string', 'enum': OPERATORS.keys()})
date_attribute = 'SnapshotCreateTime'
@RDSClusterSnapshot.action_registry.register('delete')
class RDSClusterSnapshotDelete(BaseAction):
def process(self, snapshots):
log.info("Deleting %d RDS cluster snapshots", len(snapshots))
with self.executor_factory(max_workers=3) as w:
futures = []
for snapshot_set in chunks(reversed(snapshots), size=50):
futures.append(
w.submit(self.process_snapshot_set, snapshot_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception deleting snapshot set \n %s",
f.exception())
return snapshots
def process_snapshot_set(self, snapshots_set):
c = local_session(self.manager.session_factory).client('rds')
for s in snapshots_set:
c.delete_db_cluster_snapshot(
DBClusterSnapshotIdentifier=s['DBClusterSnapshotIdentifier'])
| {
"content_hash": "74580422a508718cc63c183cc4cabe72",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 79,
"avg_line_length": 34.61214953271028,
"alnum_prop": 0.6034831915755366,
"repo_name": "jeffastorey/cloud-custodian",
"id": "8a01fcb17126a0a45707e722a13a4eba5b8126b7",
"size": "7992",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "c7n/resources/rdscluster.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1183"
},
{
"name": "Python",
"bytes": "802050"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
from . import forms, views
from .models import (Application, ConnectedAccount, Charge, Customer, Plan,
StandardAccount, Subscription)
from copy import copy
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from django.contrib import admin
try:
from django.contrib.admin.utils import model_ngettext
except ImportError:
# Django 1.6
from django.contrib.admin.util import model_ngettext
from django.http import HttpResponseRedirect
class StripeAPIActionMixin(object):
actions = ['clear_api_cache']
def clear_api_cache(self, request, queryset):
for instance in queryset:
instance.api(clear=True)
count = len(queryset)
message = 'Cleared API cache for {count} {items}.'.format(
count=count, items=model_ngettext(self.opts, count))
self.message_user(request, message)
clear_api_cache.short_description = ('Clear API cache for selected '
'%(verbose_name_plural)s')
class ApplicationAdmin(StripeAPIActionMixin, admin.ModelAdmin):
form = forms.ApplicationForm
list_display = ['__str__', 'account', 'callback_uri']
readonly_fields = []
def _get_callback_uri(self, request):
url = reverse(
'{0}:stripe_connect_oauth2_callback'.format(self.admin_site.name))
return request.build_absolute_uri(url)
@admin.options.csrf_protect_m
def changelist_view(self, request, *args, **kwargs):
self._callback_uri = self._get_callback_uri(request)
return super(ApplicationAdmin, self).changelist_view(request, *args,
**kwargs)
def callback_uri(self, *args, **kwargs):
return self._callback_uri
callback_uri.short_description = 'Callback URI'
def get_readonly_fields(self, request, obj=None, **kwargs):
readonly_fields = copy(
super(ApplicationAdmin, self).get_readonly_fields(
request, obj, **kwargs))
if obj is not None:
readonly_fields += ['account']
return readonly_fields
admin.site.register(Application, ApplicationAdmin)
class ChargeAdmin(StripeAPIActionMixin, admin.ModelAdmin):
list_display = ['__str__', 'stripe_id', 'account', 'application']
list_filter = ['account', 'account__application']
readonly_fields = ['stripe_id']
def application(self, obj):
return obj.account.application
def get_queryset(self, *args, **kwargs):
queryset = super(ChargeAdmin, self).get_queryset(*args, **kwargs)
return queryset.select_related('account', 'account__application')
def has_add_permission(self, *args, **kwargs):
return False
admin.site.register(Charge, ChargeAdmin)
class ConnectedAccountAdmin(StripeAPIActionMixin, admin.ModelAdmin):
exclude = ['_secret_key', '_publishable_key']
form = forms.AccountForm
list_display = ['__str__', 'stripe_id', 'application']
readonly_fields = ['stripe_id']
def get_readonly_fields(self, request, obj=None, **kwargs):
readonly_fields = copy(
super(ConnectedAccountAdmin, self).get_readonly_fields(
request, obj, **kwargs))
if obj is not None:
readonly_fields += ['application']
return readonly_fields
def get_urls(self):
urlpatterns = patterns(
'thecut.stripe.views',
url(r'^(?P<pk>\d+)/oauth2/request$',
views.OAuth2RequestTokenView.as_view(),
name='stripe_connect_oauth2_request_token'),
url(r'^oauth2/callback$',
views.OAuth2CallbackView.as_view(),
name='stripe_connect_oauth2_callback'),
)
urlpatterns += super(ConnectedAccountAdmin, self).get_urls()
return urlpatterns
def response_add(self, request, obj, **kwargs):
# Redirect to request oauth2 credentials
return HttpResponseRedirect(
request.build_absolute_uri('../{0}/oauth2/request'.format(obj.pk)))
admin.site.register(ConnectedAccount, ConnectedAccountAdmin)
class SubscriptionInline(StripeAPIActionMixin, admin.StackedInline):
model = Subscription
extra = 0
readonly_fields = ['stripe_id']
def has_add_permission(self, *args, **kwarg):
return False
def has_delete_permission(self, *args, **kwarg):
return False
class CustomerAdmin(StripeAPIActionMixin, admin.ModelAdmin):
inlines = [SubscriptionInline]
list_display = ['__str__', 'stripe_id', 'account', 'application']
list_filter = ['account', 'account__application']
readonly_fields = ['stripe_id']
def application(self, obj):
return obj.account.application
def get_queryset(self, *args, **kwargs):
queryset = super(CustomerAdmin, self).get_queryset(*args, **kwargs)
return queryset.select_related('account', 'account__application')
def has_add_permission(self, *args, **kwargs):
return False
admin.site.register(Customer, CustomerAdmin)
class PlanAdmin(StripeAPIActionMixin, admin.ModelAdmin):
list_display = ['__str__', 'stripe_id', 'account', 'application']
list_filter = ['account', 'account__application']
readonly_fields = ['stripe_id']
def application(self, obj):
return obj.account.application
def get_queryset(self, *args, **kwargs):
queryset = super(PlanAdmin, self).get_queryset(*args, **kwargs)
return queryset.select_related('account', 'account__application')
def has_add_permission(self, *args, **kwargs):
return False
admin.site.register(Plan, PlanAdmin)
class StandardAccountAdmin(StripeAPIActionMixin, admin.ModelAdmin):
exclude = ['application']
form = forms.AccountForm
list_display = ['__str__', 'stripe_id']
readonly_fields = ['stripe_id']
admin.site.register(StandardAccount, StandardAccountAdmin)
class SubscriptionAdmin(StripeAPIActionMixin, admin.ModelAdmin):
list_display = ['__str__', 'stripe_id', 'customer', 'account',
'application']
list_filter = ['plan', 'customer', 'account', 'account__application']
readonly_fields = ['stripe_id']
def application(self, obj):
return obj.account.application
def get_queryset(self, *args, **kwargs):
queryset = super(SubscriptionAdmin, self).get_queryset(*args, **kwargs)
return queryset.select_related('plan', 'customer', 'account',
'account__application')
def has_add_permission(self, *args, **kwargs):
return False
admin.site.register(Subscription, SubscriptionAdmin)
| {
"content_hash": "9ad3e71d78a5f5897d580a4ca5bdc4de",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 79,
"avg_line_length": 30.945205479452056,
"alnum_prop": 0.6468939058580493,
"repo_name": "thecut/thecut-stripe",
"id": "a8046275a468efaaab4728f031b27e460e562553",
"size": "6801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thecut/stripe/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "984"
},
{
"name": "Python",
"bytes": "71057"
}
],
"symlink_target": ""
} |
from setuptools import setup
from setuptools import find_packages
version = "0.1"
setup(
name="rapidsms",
version="0.1",
maintainer="RapidSMS development community",
maintainer_email="rapidsms@googlegroups.com",
description="A framework for building messaging applications",
url="http://rapidsms.org/",
package_dir={'': 'lib'},
packages=find_packages('lib'),
package_data={'rapidsms': [
'skeleton/project/*.ini',
'skeleton/project/manage.py' , 'skeleton/app/*.py']},
scripts=["rapidsms"],
install_requires=[
"setuptools",
"setuptools-git",
"pytz",
"Django",
],
long_description="\n\n".join(
(open("README.txt").read(), open("CHANGES.txt").read())),
test_suite="rapidsms.tests",
)
| {
"content_hash": "d8d127c57eaea0cacd1e523122342ab2",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 66,
"avg_line_length": 28.642857142857142,
"alnum_prop": 0.6097256857855362,
"repo_name": "genova/rapidsms-senegal",
"id": "bcccab6cce94e9d2a6c3ae5a881c79522afc1d2d",
"size": "825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Google Cloud Bigtable HappyBase pool module."""
import contextlib
import threading
import six
from gcloud.bigtable.happybase.connection import Connection
from gcloud.bigtable.happybase.connection import _get_instance
_MIN_POOL_SIZE = 1
"""Minimum allowable size of a connection pool."""
class NoConnectionsAvailable(RuntimeError):
"""Exception raised when no connections are available.
This happens if a timeout was specified when obtaining a connection,
and no connection became available within the specified timeout.
"""
class ConnectionPool(object):
"""Thread-safe connection pool.
.. note::
All keyword arguments are passed unmodified to the
:class:`Connection <.happybase.connection.Connection>` constructor
**except** for ``autoconnect``. This is because the ``open`` /
``closed`` status of a connection is managed by the pool. In addition,
if ``instance`` is not passed, the default / inferred instance is
determined by the pool and then passed to each
:class:`Connection <.happybase.connection.Connection>` that is created.
:type size: int
:param size: The maximum number of concurrently open connections.
:type kwargs: dict
:param kwargs: Keyword arguments passed to
:class:`Connection <.happybase.Connection>`
constructor.
:raises: :class:`TypeError <exceptions.TypeError>` if ``size``
is non an integer.
:class:`ValueError <exceptions.ValueError>` if ``size``
is not positive.
"""
def __init__(self, size, **kwargs):
if not isinstance(size, six.integer_types):
raise TypeError('Pool size arg must be an integer')
if size < _MIN_POOL_SIZE:
raise ValueError('Pool size must be positive')
self._lock = threading.Lock()
self._queue = six.moves.queue.LifoQueue(maxsize=size)
self._thread_connections = threading.local()
connection_kwargs = kwargs
connection_kwargs['autoconnect'] = False
if 'instance' not in connection_kwargs:
connection_kwargs['instance'] = _get_instance(
timeout=kwargs.get('timeout'))
for _ in six.moves.range(size):
connection = Connection(**connection_kwargs)
self._queue.put(connection)
def _acquire_connection(self, timeout=None):
"""Acquire a connection from the pool.
:type timeout: int
:param timeout: (Optional) Time (in seconds) to wait for a connection
to open.
:rtype: :class:`Connection <.happybase.Connection>`
:returns: An active connection from the queue stored on the pool.
:raises: :class:`NoConnectionsAvailable` if ``Queue.get`` fails
before the ``timeout`` (only if a timeout is specified).
"""
try:
return self._queue.get(block=True, timeout=timeout)
except six.moves.queue.Empty:
raise NoConnectionsAvailable('No connection available from pool '
'within specified timeout')
@contextlib.contextmanager
def connection(self, timeout=None):
"""Obtain a connection from the pool.
Must be used as a context manager, for example::
with pool.connection() as connection:
pass # do something with the connection
If ``timeout`` is omitted, this method waits forever for a connection
to become available from the local queue.
Yields an active :class:`Connection <.happybase.connection.Connection>`
from the pool.
:type timeout: int
:param timeout: (Optional) Time (in seconds) to wait for a connection
to open.
:raises: :class:`NoConnectionsAvailable` if no connection can be
retrieved from the pool before the ``timeout`` (only if
a timeout is specified).
"""
connection = getattr(self._thread_connections, 'current', None)
retrieved_new_cnxn = False
if connection is None:
# In this case we need to actually grab a connection from the
# pool. After retrieval, the connection is stored on a thread
# local so that nested connection requests from the same
# thread can re-use the same connection instance.
#
# NOTE: This code acquires a lock before assigning to the
# thread local; see
# ('https://emptysqua.re/blog/'
# 'another-thing-about-pythons-threadlocals/')
retrieved_new_cnxn = True
connection = self._acquire_connection(timeout)
with self._lock:
self._thread_connections.current = connection
# This is a no-op for connections that have already been opened
# since they just call Client.start().
connection.open()
yield connection
# Remove thread local reference after the outermost 'with' block
# ends. Afterwards the thread no longer owns the connection.
if retrieved_new_cnxn:
del self._thread_connections.current
self._queue.put(connection)
| {
"content_hash": "b18effaa594bc5715a6ca41a0d68781d",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 79,
"avg_line_length": 37.857142857142854,
"alnum_prop": 0.6252830188679245,
"repo_name": "elibixby/gcloud-python",
"id": "f670065fb049d67fe5ac54a9f7b441c927b1b3a7",
"size": "5897",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gcloud/bigtable/happybase/pool.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "92545"
},
{
"name": "Python",
"bytes": "2806467"
},
{
"name": "Shell",
"bytes": "4717"
}
],
"symlink_target": ""
} |
"""Solvers of systems of polynomial equations."""
import collections
from ..domains import EX
from ..matrices import Matrix
from ..polys import groebner, poly, sring
from ..polys.polyerrors import ComputationFailed, PolificationFailed
from ..polys.polytools import parallel_poly_from_expr
from ..polys.solvers import solve_lin_sys
from ..simplify import simplify
from ..utilities import default_sort_key, numbered_symbols
from .utils import checksol
__all__ = ('solve_linear_system', 'solve_poly_system',
'solve_surd_system')
def solve_linear_system(system, *symbols, **flags):
r"""Solve system of linear equations.
Both under- and overdetermined systems are supported. The possible
number of solutions is zero, one or infinite.
Parameters
==========
system : Matrix
Nx(M+1) matrix, which means it has to be in augmented
form. This matrix will not be modified.
\*symbols : list
List of M Symbol's
Returns
=======
solution: dict or None
Respectively, this procedure will return None or
a dictionary with solutions. In the case of underdetermined
systems, all arbitrary parameters are skipped. This may
cause a situation in which an empty dictionary is returned.
In that case, all symbols can be assigned arbitrary values.
Examples
========
Solve the following system::
x + 4 y == 2
-2 x + y == 14
>>> system = Matrix(((1, 4, 2), (-2, 1, 14)))
>>> solve_linear_system(system, x, y)
{x: -6, y: 2}
A degenerate system returns an empty dictionary.
>>> system = Matrix(((0, 0, 0), (0, 0, 0)))
>>> solve_linear_system(system, x, y)
{}
See Also
========
diofant.matrices.matrices.MatrixBase.rref
"""
eqs = system*Matrix(symbols + (-1,))
domain, eqs = sring(eqs.transpose().tolist()[0], *symbols, field=True)
res = solve_lin_sys(eqs, domain)
if res is None:
return
for k in list(res):
s = domain.symbols[domain.index(k)]
res[s] = res[k].as_expr()
del res[k]
if flags.get('simplify', True):
res[s] = simplify(res[s])
return res
def solve_poly_system(eqs, *gens, **args):
"""
Solve a system of polynomial equations.
Polynomial system may have finite number of solutions or
infinitely many (positive-dimensional systems).
References
==========
* :cite:`Cox2015ideals`, p. 98
Examples
========
>>> solve_poly_system([x*y - 2*y, 2*y**2 - x**2], x, y)
[{x: 0, y: 0}, {x: 2, y: -sqrt(2)}, {x: 2, y: sqrt(2)}]
>>> solve_poly_system([x*y], x, y)
[{x: 0}, {y: 0}]
"""
try:
args['extension'] = False
polys, opt = parallel_poly_from_expr(eqs, *gens, **args)
polys = [p.to_exact() for p in polys]
except PolificationFailed as exc:
raise ComputationFailed('solve_poly_system', len(eqs), exc)
def _solve_reduced_system(system, gens):
"""Recursively solves reduced polynomial systems."""
basis = groebner(system, gens, polys=True, extension=False)
dim = basis.dimension
solutions = []
if dim is None:
return []
elif dim > 0:
max_iset = max(basis.independent_sets, key=len)
new_gens = [g for g in gens if g not in max_iset]
# After removing variables from the maximal set of independent
# variables for the given ideal - the new ideal is of dimension
# zero with the independent variables as parameters in the
# coefficient domain.
solutions.extend(_solve_reduced_system(system, new_gens))
# Now we should examine cases when leading coefficient of
# some polynomial in the system is zero.
for p in basis.polys:
lc = poly(p, *new_gens).LC(order=basis.order)
for special in _solve_reduced_system(system + [lc], gens):
# This heuristics wipe out some redundant special
# solutions, which already there in solutions after
# solving the system with new set of generators.
if all(any((_.subs(s) - _).subs(special).simplify()
for _ in gens) for s in solutions):
solutions.insert(0, special)
else:
# By the elimination property, the last polynomial should
# be univariate in the last variable.
f = basis[-1]
gen = gens[-1]
zeros = {k.doit() for k in f.exclude().all_roots()}
if len(basis) == 1:
return [{gen: zero} for zero in zeros]
new_basis = [b.set_domain(EX) for b in basis[:-1]]
# Now substitute zeros for the last variable and
# solve recursively new obtained zero-dimensional systems.
for zero in zeros:
new_system = []
new_gens = gens[:-1]
for b in new_basis:
eq = b.eval(gen, zero)
if not eq.is_zero:
new_system.append(eq)
for solution in _solve_reduced_system(new_system, new_gens):
solution[gen] = zero
solutions.append(solution)
return solutions
result = _solve_reduced_system(polys, opt.gens)
if not opt.domain.is_Exact:
result = [{k: r[k].evalf(opt.domain.dps) for k in r} for r in result]
return sorted(result, key=default_sort_key)
def solve_surd_system(eqs, *gens, **args):
"""
Solve a system of algebraic equations.
Examples
========
>>> solve_surd_system([x + sqrt(x + 1) - 2])
[{x: -sqrt(13)/2 + 5/2}]
"""
eqs = list(eqs)
if not gens:
gens = set().union(*[_.free_symbols for _ in eqs])
gens = sorted(gens, key=default_sort_key)
else:
gens = list(gens)
aux = numbered_symbols('a')
neqs = len(eqs)
orig_eqs = eqs[:]
ngens = len(gens)
bases = collections.defaultdict(dict)
def q_surd(e):
return e.is_Pow and e.exp.is_Rational and not e.exp.is_Integer
def tr_surd(e):
n, d = e.exp.as_numer_denom()
for v2, d2 in sorted(bases.get(e.base, {}).items(),
key=lambda _: -_[1]):
if not d2 % d:
return v2**(d2 // d)
v = next(aux)
bases[e.base][v] = d
gens.append(v)
eqs.append(v**d - e.base)
return v**n
for i in range(neqs):
eqs[i] = eqs[i].replace(q_surd, tr_surd)
denoms = []
for i, e in enumerate(eqs):
eqs[i], d = e.as_numer_denom()
if not d.is_constant(*gens):
denoms.insert(0, d)
weaksols = solve_poly_system(eqs, *gens, **args)
for i in range(len(weaksols) - 1, -1, -1):
if any(checksol(_, weaksols[i], warn=True) for _ in denoms):
del weaksols[i]
elif any(checksol(_, weaksols[i], warn=True) is False for _ in orig_eqs):
del weaksols[i]
else:
for g in gens[ngens:]:
del weaksols[i][g]
return weaksols
| {
"content_hash": "400a2dbefc05a271599cbb72980db4ec",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 81,
"avg_line_length": 29.616326530612245,
"alnum_prop": 0.5587100330760749,
"repo_name": "skirpichev/omg",
"id": "b312f7fcdf3a80ca93bbef354c3635e0bae45220",
"size": "7256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "diofant/solvers/polysys.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10305079"
}
],
"symlink_target": ""
} |
"""
==============================
Asynchronous stochastic solver
==============================
This example illustrates the convergence speed of the asynchronous version of
SVRG and SAGA solvers. This solver respectively called KroMagnon and ASAGA
have been introduced in
* Mania, H., Pan, X., Papailiopoulos, D., Recht, B., Ramchandran, K. and Jordan, M.I., 2015.
Perturbed iterate analysis for asynchronous stochastic optimization.
`arXiv preprint arXiv:1507.06970.`_.
* R. Leblond, F. Pedregosa, and S. Lacoste-Julien: Asaga: Asynchronous
Parallel Saga, `(AISTATS) 2017`_.
.. _arXiv preprint arXiv:1507.06970.: https://arxiv.org/abs/1507.06970
.. _(AISTATS) 2017: https://hal.inria.fr/hal-01665255/document
To obtain good speedup in a relative short time example we have designed very
sparse and ill-conditonned problem.
"""
from scipy import sparse
import matplotlib.pyplot as plt
from tick.plot import plot_history
import numpy as np
from tick.linear_model import SimuLogReg, ModelLogReg
from tick.simulation import weights_sparse_gauss
from tick.solver import SVRG, SAGA
from tick.prox import ProxElasticNet
seed = 1398
np.random.seed(seed)
n_samples = 40000
n_features = 20000
sparsity = 1e-4
penalty_strength = 1e-5
weights = weights_sparse_gauss(n_features, nnz=1000)
intercept = 0.2
features = sparse.rand(n_samples, n_features, density=sparsity, format='csr')
simulator = SimuLogReg(weights, n_samples=n_samples, features=features,
verbose=False, intercept=intercept)
features, labels = simulator.simulate()
model = ModelLogReg(fit_intercept=True)
model.fit(features, labels)
prox = ProxElasticNet(penalty_strength, ratio=0.5, range=(0, n_features))
svrg_step = 1. / model.get_lip_max()
test_n_threads = [1, 2, 4]
fig, axes = plt.subplots(1, 2, figsize=(8, 4))
for ax, SolverClass in zip(axes, [SVRG, SAGA]):
solver_list = []
solver_labels = []
for n_threads in test_n_threads:
solver = SolverClass(step=svrg_step, seed=seed, max_iter=50,
verbose=False, n_threads=n_threads, tol=0,
record_every=3)
solver.set_model(model).set_prox(prox)
solver.solve()
solver_list += [solver]
if n_threads == 1:
solver_labels += [solver.name]
else:
solver_labels += ['A{} {}'.format(solver.name, n_threads)]
plot_history(solver_list, x="time", dist_min=True, log_scale=True,
labels=solver_labels, ax=ax)
ax.set_ylabel('log distance to optimal objective', fontsize=14)
fig.tight_layout()
plt.show()
| {
"content_hash": "61e621be8170582ebe661649049bf05f",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 92,
"avg_line_length": 32.20987654320987,
"alnum_prop": 0.6742046761211192,
"repo_name": "Dekken/tick",
"id": "d3447be84bd275e623145daec49123804e37e3ec",
"size": "2609",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/plot_asynchronous_stochastic_solver.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "6660"
},
{
"name": "C++",
"bytes": "1181742"
},
{
"name": "CMake",
"bytes": "22073"
},
{
"name": "Dockerfile",
"bytes": "2017"
},
{
"name": "Python",
"bytes": "1450866"
},
{
"name": "Shell",
"bytes": "33446"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../../tools'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.pngmath',
'sphinx.ext.intersphinx',
# Create links to Python source code for the module.
# 'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.inheritance_diagram',
]
# Add any locations and names of other projects that should be linked to in this documentation.
intersphinx_mapping = {
'python': ('http://docs.python.org', None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MPipe'
copyright = u'2014, Velimir Mlaker'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = ['wm5.', 'wm5', ]
# Set this to 'both' to append the __init__(self) docstring to the class docstring.
autoclass_content = 'both'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'mpipe'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = '{0} Documentation'.format(project)
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**' : [],
# '**' : ['localtoc.html'],
# '**' : ['globaltoc.html'],
# '**' : ['searchbox.html', 'search.html'],
# '**' : ['searchbox.html'],
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = { 'search' : 'search.html' }
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MPipedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MPipe.tex', u'MPipe Documentation',
u'Velimir Mlaker', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mpipe', u'MPipe Documentation',
[u'Velimir Mlaker'], 1)
]
rst_prolog = '''
.. |NAME| replace:: MPipe
'''
# End of file.
| {
"content_hash": "34eaa544074f584a53b3d8c56cae8eb5",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 95,
"avg_line_length": 31.98283261802575,
"alnum_prop": 0.6906870638754696,
"repo_name": "vmlaker/mpipe",
"id": "214fa6cbf133d87ed0ebb81aae0a225000f4cd26",
"size": "7581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47434"
}
],
"symlink_target": ""
} |
VerbDict = {'verbs':{}, 'phrases':{}, 'transformations' : {}} # verb dictionary
ActorDict = {} # actor dictionary
ActorCodes = [] # actor code list
AgentDict = {} # agent dictionary
DiscardList = {} # discard list
IssueList = []
IssueCodes = []
ConfigFileName = "PETR_config.ini"
VerbFileName = "" # verb dictionary
ActorFileList = [] # actor dictionary
AgentFileName = "" # agent dictionary
DiscardFileName = "" # discard list
TextFileList = [] # current text or validation file
EventFileName = "" # event output file
IssueFileName = "" # issues list
# element followed by attribute and content pairs for XML line
AttributeList = []
# NULL CODING OPTIONS
NullVerbs = False # Only get verb phrases that are not in the dictionary but are associated with coded noun phrases
NullActors = False # Only get actor phrases that are not in the dictionary but associated with coded verb phrases
NewActorLength = 0 # Maximum length for new actors extracted from noun phrases
# CODING OPTIONS
# Defaults are more or less equivalent to TABARI
RequireDyad = True # Events require a non-null source and target
StoponError = False # Raise stop exception on errors rather than recovering
# OUTPUT OPTIONS
WriteActorRoot = False # Include actor root in event record
WriteActorText = False # Include actor text in event record
WriteEventText = False # Include event text in event record
RunTimeString = '' # used in error and debugging files -- just set it once
# INTERFACE OPTIONS: these can be changed in config.ini
# The default -- all false -- is equivalent to an A)utocode in TABARI
CodeBySentence = False
PauseBySentence = False
PauseByStory = False
# COMMA OPTION : These adjust the length (in words) of comma-delimited clauses
# that are eliminated from the parse. To deactivate, set the max to zero.
# Defaults, based on TABARI, are in ()
# comma_min : internal clause minimum length [2]
# comma_max : internal clause maximum length [8]
# comma_bmin : initial ("begin") clause minimum length [0]
# comma_bmax : initial clause maximum length [0 : deactivated by default]
# comma_emin : terminal ("end") clause minimum length [2]
# comma_emax : terminal clause maximum length [8]
CommaMin = 2
CommaMax = 8
CommaBMin = 0
CommaBMax = 0
CommaEMin = 2
CommaEMax = 8
stanfordnlp = ''
# TEMPORARY VARIABLES
# <14.11.20> Temporary in the sense that these won't be needed when we eventually
# refactor so that codes are some sort of structure other than a string
CodePrimer = '=#=' # separates actor code from root and text strings
RootPrimer = CodePrimer + ':' # start of root string
TextPrimer = CodePrimer + '+' # start of text string
NounLeafLabels = ['JJ', 'DT', 'NN', 'NR']
#convert chinese tags to english
LabelsConverter = {
'MD': 'VB',
'DNP': 'NP',
'VV': 'VB',
'VA': 'AV',
'PN': 'PRP',
'IP': 'S',
'LB': 'VLB'
}
# phrase tags
NPLabels = ['NP']
VPLabels = ['VP']
PPLabels = ['PP']
TPLabels = ['TP']
EXLabels = ['EX']
| {
"content_hash": "feae56161b34c33259136ae9c777231c",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 117,
"avg_line_length": 35.62352941176471,
"alnum_prop": 0.6984808454425363,
"repo_name": "opcoder/zh_ptr",
"id": "b2318043b32f41dad459a4c0c75cc651869e6399",
"size": "4184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "petrarch2/PETRglobals.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "256498"
},
{
"name": "TeX",
"bytes": "37231"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from pychron.entry.export.base_irradiation_exporter import BaseIrradiationExporter
class YAMLIrradiationExporter(BaseIrradiationExporter):
"""
export irradiations from pychron database to an YAML file
"""
# ============= EOF =============================================
| {
"content_hash": "9945d6ee9a908228d70a5190047098f0",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 82,
"avg_line_length": 29.636363636363637,
"alnum_prop": 0.6349693251533742,
"repo_name": "USGSDenverPychron/pychron",
"id": "10d3c94fb5d8bf3e3b4900a520d3165a3c23d27f",
"size": "1258",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pychron/entry/export/yaml_irradiation_exporter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "263"
},
{
"name": "Cython",
"bytes": "1692"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "46796"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10773692"
},
{
"name": "Shell",
"bytes": "1003"
}
],
"symlink_target": ""
} |
import base64
import binascii
import logging
import lxml.html
import re
import time
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
from django.conf import settings
from django.db import IntegrityError, transaction
from django.db.models import F
from django.utils.timezone import now as timezone_now
from django.utils.translation import ugettext as _
import gcm
import requests
import ujson
from zerver.decorator import statsd_increment
from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import JsonableError
from zerver.lib.message import access_message, \
bulk_access_messages_expect_usermessage, huddle_users
from zerver.lib.queue import retry_event
from zerver.lib.remote_server import send_to_push_bouncer, send_json_to_push_bouncer
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import PushDeviceToken, Message, Recipient, \
UserMessage, UserProfile, \
get_display_recipient, receives_offline_push_notifications, \
receives_online_notifications, get_user_profile_by_id, \
ArchivedMessage
if TYPE_CHECKING:
from apns2.client import APNsClient
logger = logging.getLogger(__name__)
if settings.ZILENCER_ENABLED:
from zilencer.models import RemotePushDeviceToken
else: # nocoverage -- Not convenient to add test for this.
from mock import Mock
RemotePushDeviceToken = Mock() # type: ignore # https://github.com/JukkaL/mypy/issues/1188
DeviceToken = Union[PushDeviceToken, RemotePushDeviceToken]
# We store the token as b64, but apns-client wants hex strings
def b64_to_hex(data: bytes) -> str:
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
def hex_to_b64(data: str) -> bytes:
return base64.b64encode(binascii.unhexlify(data.encode('utf-8')))
#
# Sending to APNs, for iOS
#
_apns_client = None # type: Optional[APNsClient]
_apns_client_initialized = False
def get_apns_client() -> 'Optional[APNsClient]':
# We lazily do this import as part of optimizing Zulip's base
# import time.
from apns2.client import APNsClient
global _apns_client, _apns_client_initialized
if not _apns_client_initialized:
# NB if called concurrently, this will make excess connections.
# That's a little sloppy, but harmless unless a server gets
# hammered with a ton of these all at once after startup.
if settings.APNS_CERT_FILE is not None:
_apns_client = APNsClient(credentials=settings.APNS_CERT_FILE,
use_sandbox=settings.APNS_SANDBOX)
_apns_client_initialized = True
return _apns_client
def apns_enabled() -> bool:
client = get_apns_client()
return client is not None
def modernize_apns_payload(data: Dict[str, Any]) -> Dict[str, Any]:
'''Take a payload in an unknown Zulip version's format, and return in current format.'''
# TODO this isn't super robust as is -- if a buggy remote server
# sends a malformed payload, we are likely to raise an exception.
if 'message_ids' in data:
# The format sent by 1.6.0, from the earliest pre-1.6.0
# version with bouncer support up until 613d093d7 pre-1.7.0:
# 'alert': str, # just sender, and text about PM/group-PM/mention
# 'message_ids': List[int], # always just one
return {
'alert': data['alert'],
'badge': 0,
'custom': {
'zulip': {
'message_ids': data['message_ids'],
},
},
}
else:
# Something already compatible with the current format.
# `alert` may be a string, or a dict with `title` and `body`.
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
# item in `custom.zulip` is `message_ids`.
return data
APNS_MAX_RETRIES = 3
@statsd_increment("apple_push_notification")
def send_apple_push_notification(user_id: int, devices: List[DeviceToken],
payload_data: Dict[str, Any], remote: bool=False) -> None:
# We lazily do the APNS imports as part of optimizing Zulip's base
# import time; since these are only needed in the push
# notification queue worker, it's best to only import them in the
# code that needs them.
from apns2.payload import Payload as APNsPayload
from hyper.http20.exceptions import HTTP20Error
client = get_apns_client()
if client is None:
logger.debug("APNs: Dropping a notification because nothing configured. "
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
return
if remote:
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
logger.info("APNs: Sending notification for user %d to %d devices",
user_id, len(devices))
payload = APNsPayload(**modernize_apns_payload(payload_data))
expiration = int(time.time() + 24 * 3600)
retries_left = APNS_MAX_RETRIES
for device in devices:
# TODO obviously this should be made to actually use the async
def attempt_send() -> Optional[str]:
try:
stream_id = client.send_notification_async(
device.token, payload, topic='org.zulip.Zulip',
expiration=expiration)
return client.get_notification_result(stream_id)
except HTTP20Error as e:
logger.warning("APNs: HTTP error sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except BrokenPipeError as e:
logger.warning("APNs: BrokenPipeError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except ConnectionError as e: # nocoverage
logger.warning("APNs: ConnectionError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
result = attempt_send()
while result is None and retries_left > 0:
retries_left -= 1
result = attempt_send()
if result is None:
result = "HTTP error, retries exhausted"
if result[0] == "Unregistered":
# For some reason, "Unregistered" result values have a
# different format, as a tuple of the pair ("Unregistered", 12345132131).
result = result[0]
if result == 'Success':
logger.info("APNs: Success sending for user %d to device %s",
user_id, device.token)
elif result in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
logger.info("APNs: Removing invalid/expired token %s (%s)" % (device.token, result))
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
else:
logger.warning("APNs: Failed to send for user %d to device %s: %s",
user_id, device.token, result)
#
# Sending to GCM, for Android
#
def make_gcm_client() -> gcm.GCM: # nocoverage
# From GCM upstream's doc for migrating to FCM:
#
# FCM supports HTTP and XMPP protocols that are virtually
# identical to the GCM server protocols, so you don't need to
# update your sending logic for the migration.
#
# https://developers.google.com/cloud-messaging/android/android-migrate-fcm
#
# The one thing we're required to change on the server is the URL of
# the endpoint. So we get to keep using the GCM client library we've
# been using (as long as we're happy with it) -- just monkey-patch in
# that one change, because the library's API doesn't anticipate that
# as a customization point.
gcm.gcm.GCM_URL = 'https://fcm.googleapis.com/fcm/send'
return gcm.GCM(settings.ANDROID_GCM_API_KEY)
if settings.ANDROID_GCM_API_KEY: # nocoverage
gcm_client = make_gcm_client()
else:
gcm_client = None
def gcm_enabled() -> bool: # nocoverage
return gcm_client is not None
def send_android_push_notification_to_user(user_profile: UserProfile, data: Dict[str, Any],
options: Dict[str, Any]) -> None:
devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
send_android_push_notification(devices, data, options)
def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
"""
Parse GCM options, supplying defaults, and raising an error if invalid.
The options permitted here form part of the Zulip notification
bouncer's API. They are:
`priority`: Passed through to GCM; see upstream doc linked below.
Zulip servers should always set this; when unset, we guess a value
based on the behavior of old server versions.
Including unrecognized options is an error.
For details on options' semantics, see this GCM upstream doc:
https://developers.google.com/cloud-messaging/http-server-ref
Returns `priority`.
"""
priority = options.pop('priority', None)
if priority is None:
# An older server. Identify if this seems to be an actual notification.
if data.get('event') == 'message':
priority = 'high'
else: # `'event': 'remove'`, presumably
priority = 'normal'
if priority not in ('normal', 'high'):
raise JsonableError(_("Invalid GCM option to bouncer: priority %r")
% (priority,))
if options:
# We're strict about the API; there is no use case for a newer Zulip
# server talking to an older bouncer, so we only need to provide
# one-way compatibility.
raise JsonableError(_("Invalid GCM options to bouncer: %s")
% (ujson.dumps(options),))
return priority # when this grows a second option, can make it a tuple
@statsd_increment("android_push_notification")
def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, Any],
options: Dict[str, Any], remote: bool=False) -> None:
"""
Send a GCM message to the given devices.
See https://developers.google.com/cloud-messaging/http-server-ref
for the GCM upstream API which this talks to.
data: The JSON object (decoded) to send as the 'data' parameter of
the GCM message.
options: Additional options to control the GCM message sent.
For details, see `parse_gcm_options`.
"""
if not gcm_client:
logger.debug("Skipping sending a GCM push notification since "
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset")
return
reg_ids = [device.token for device in devices]
priority = parse_gcm_options(options, data)
try:
# See https://developers.google.com/cloud-messaging/http-server-ref .
# Two kwargs `retries` and `session` get eaten by `json_request`;
# the rest pass through to the GCM server.
res = gcm_client.json_request(registration_ids=reg_ids,
priority=priority,
data=data,
retries=10)
except IOError as e:
logger.warning(str(e))
return
if res and 'success' in res:
for reg_id, msg_id in res['success'].items():
logger.info("GCM: Sent %s as %s" % (reg_id, msg_id))
if remote:
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
# res.canonical will contain results when there are duplicate registrations for the same
# device. The "canonical" registration is the latest registration made by the device.
# Ref: http://developer.android.com/google/gcm/adv.html#canonical
if 'canonical' in res:
for reg_id, new_reg_id in res['canonical'].items():
if reg_id == new_reg_id:
# I'm not sure if this should happen. In any case, not really actionable.
logger.warning("GCM: Got canonical ref but it already matches our ID %s!" % (reg_id,))
elif not DeviceTokenClass.objects.filter(token=new_reg_id,
kind=DeviceTokenClass.GCM).count():
# This case shouldn't happen; any time we get a canonical ref it should have been
# previously registered in our system.
#
# That said, recovery is easy: just update the current PDT object to use the new ID.
logger.warning(
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
(new_reg_id, reg_id))
DeviceTokenClass.objects.filter(
token=reg_id, kind=DeviceTokenClass.GCM).update(token=new_reg_id)
else:
# Since we know the new ID is registered in our system we can just drop the old one.
logger.info("GCM: Got canonical ref %s, dropping %s" % (new_reg_id, reg_id))
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
if 'errors' in res:
for error, reg_ids in res['errors'].items():
if error in ['NotRegistered', 'InvalidRegistration']:
for reg_id in reg_ids:
logger.info("GCM: Removing %s" % (reg_id,))
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
else:
for reg_id in reg_ids:
logger.warning("GCM: Delivery to %s failed: %s" % (reg_id, error))
# python-gcm handles retrying of the unsent messages.
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
#
# Sending to a bouncer
#
def uses_notification_bouncer() -> bool:
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
def send_notifications_to_bouncer(user_profile_id: int,
apns_payload: Dict[str, Any],
gcm_payload: Dict[str, Any],
gcm_options: Dict[str, Any]) -> None:
post_data = {
'user_id': user_profile_id,
'apns_payload': apns_payload,
'gcm_payload': gcm_payload,
'gcm_options': gcm_options,
}
# Calls zilencer.views.remote_server_notify_push
send_json_to_push_bouncer('POST', 'push/notify', post_data)
#
# Managing device tokens
#
def num_push_devices_for_user(user_profile: UserProfile, kind: Optional[int]=None) -> PushDeviceToken:
if kind is None:
return PushDeviceToken.objects.filter(user=user_profile).count()
else:
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
def add_push_device_token(user_profile: UserProfile,
token_str: bytes,
kind: int,
ios_app_id: Optional[str]=None) -> None:
logger.info("Registering push device: %d %r %d %r",
user_profile.id, token_str, kind, ios_app_id)
# If we're sending things to the push notification bouncer
# register this user with them here
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
if kind == PushDeviceToken.APNS:
post_data['ios_app_id'] = ios_app_id
logger.info("Sending new push device to bouncer: %r", post_data)
# Calls zilencer.views.register_remote_push_device
send_to_push_bouncer('POST', 'push/register', post_data)
return
try:
with transaction.atomic():
PushDeviceToken.objects.create(
user_id=user_profile.id,
kind=kind,
token=token_str,
ios_app_id=ios_app_id,
# last_updated is to be renamed to date_created.
last_updated=timezone_now())
except IntegrityError:
pass
def remove_push_device_token(user_profile: UserProfile, token_str: bytes, kind: int) -> None:
# If we're sending things to the push notification bouncer
# unregister this user with them here
if uses_notification_bouncer():
# TODO: Make this a remove item
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
# Calls zilencer.views.unregister_remote_push_device
send_to_push_bouncer("POST", "push/unregister", post_data)
return
try:
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
token.delete()
except PushDeviceToken.DoesNotExist:
raise JsonableError(_("Token does not exist"))
#
# Push notifications in general
#
def push_notifications_enabled() -> bool:
'''True just if this server has configured a way to send push notifications.'''
if (uses_notification_bouncer()
and settings.ZULIP_ORG_KEY is not None
and settings.ZULIP_ORG_ID is not None): # nocoverage
# We have the needed configuration to send push notifications through
# the bouncer. Better yet would be to confirm that this config actually
# works -- e.g., that we have ever successfully sent to the bouncer --
# but this is a good start.
return True
if apns_enabled() and gcm_enabled(): # nocoverage
# We have the needed configuration to send through APNs and GCM directly
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
return True
return False
def initialize_push_notifications() -> None:
if not push_notifications_enabled():
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
# Avoid unnecessary spam on development environment startup
return
logger.warning("Mobile push notifications are not configured.\n "
"See https://zulip.readthedocs.io/en/latest/"
"production/mobile-push-notifications.html")
def get_gcm_alert(message: Message) -> str:
"""
Determine what alert string to display based on the missed messages.
"""
sender_str = message.sender.full_name
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
return "New private group message from %s" % (sender_str,)
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
return "New private message from %s" % (sender_str,)
elif message.is_stream_message() and message.trigger == 'mentioned':
return "New mention from %s" % (sender_str,)
else: # message.is_stream_message() and message.trigger == 'stream_push_notify'
return "New stream message from %s in %s" % (sender_str, get_display_recipient(message.recipient),)
def get_mobile_push_content(rendered_content: str) -> str:
def get_text(elem: lxml.html.HtmlElement) -> str:
# Convert default emojis to their unicode equivalent.
classes = elem.get("class", "")
if "emoji" in classes:
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
if match:
emoji_code = match.group('emoji_code')
char_repr = ""
for codepoint in emoji_code.split('-'):
char_repr += chr(int(codepoint, 16))
return char_repr
# Handles realm emojis, avatars etc.
if elem.tag == "img":
return elem.get("alt", "")
if elem.tag == 'blockquote':
return '' # To avoid empty line before quote text
return elem.text or ''
def format_as_quote(quote_text: str) -> str:
quote_text_list = filter(None, quote_text.split('\n')) # Remove empty lines
quote_text = '\n'.join(map(lambda x: "> "+x, quote_text_list))
quote_text += '\n'
return quote_text
def process(elem: lxml.html.HtmlElement) -> str:
plain_text = get_text(elem)
sub_text = ''
for child in elem:
sub_text += process(child)
if elem.tag == 'blockquote':
sub_text = format_as_quote(sub_text)
plain_text += sub_text
plain_text += elem.tail or ""
return plain_text
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
return "***REDACTED***"
elem = lxml.html.fromstring(rendered_content)
plain_text = process(elem)
return plain_text
def truncate_content(content: str) -> Tuple[str, bool]:
# We use unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
# of three dots as this saves two extra characters for textual
# content. This function will need to be updated to handle unicode
# combining characters and tags when we start supporting themself.
if len(content) <= 200:
return content, False
return content[:200] + "…", True
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
'''Common fields for all notification payloads.'''
data = {} # type: Dict[str, Any]
# These will let the app support logging into multiple realms and servers.
data['server'] = settings.EXTERNAL_HOST
data['realm_id'] = user_profile.realm.id
data['realm_uri'] = user_profile.realm.uri
data['user_id'] = user_profile.id
return data
def get_message_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
'''Common fields for `message` payloads, for all platforms.'''
data = get_base_payload(user_profile)
# `sender_id` is preferred, but some existing versions use `sender_email`.
data['sender_id'] = message.sender.id
data['sender_email'] = message.sender.email
if message.recipient.type == Recipient.STREAM:
data['recipient_type'] = "stream"
data['stream'] = get_display_recipient(message.recipient)
data['topic'] = message.topic_name()
elif message.recipient.type == Recipient.HUDDLE:
data['recipient_type'] = "private"
data['pm_users'] = huddle_users(message.recipient.id)
else: # Recipient.PERSONAL
data['recipient_type'] = "private"
return data
def get_apns_alert_title(message: Message) -> str:
"""
On an iOS notification, this is the first bolded line.
"""
if message.recipient.type == Recipient.HUDDLE:
recipients = get_display_recipient(message.recipient)
assert isinstance(recipients, list)
return ', '.join(sorted(r['full_name'] for r in recipients))
elif message.is_stream_message():
return "#%s > %s" % (get_display_recipient(message.recipient), message.topic_name(),)
# For personal PMs, we just show the sender name.
return message.sender.full_name
def get_apns_alert_subtitle(message: Message) -> str:
"""
On an iOS notification, this is the second bolded line.
"""
if message.trigger == "mentioned":
return message.sender.full_name + " mentioned you:"
elif message.recipient.type == Recipient.PERSONAL:
return ""
# For group PMs, or regular messages to a stream, just use a colon to indicate this is the sender.
return message.sender.full_name + ":"
def get_message_payload_apns(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
'''A `message` payload for iOS, via APNs.'''
zulip_data = get_message_payload(user_profile, message)
zulip_data.update({
'message_ids': [message.id],
})
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
apns_data = {
'alert': {
'title': get_apns_alert_title(message),
'subtitle': get_apns_alert_subtitle(message),
'body': content,
},
'sound': 'default',
'badge': 0, # TODO: set badge count in a better way
'custom': {'zulip': zulip_data},
}
return apns_data
def get_message_payload_gcm(
user_profile: UserProfile, message: Message,
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
'''A `message` payload + options, for Android via GCM/FCM.'''
data = get_message_payload(user_profile, message)
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
data.update({
'event': 'message',
'alert': get_gcm_alert(message),
'zulip_message_id': message.id, # message_id is reserved for CCS
'time': datetime_to_timestamp(message.pub_date),
'content': content,
'content_truncated': truncated,
'sender_full_name': message.sender.full_name,
'sender_avatar_url': absolute_avatar_url(message.sender),
})
gcm_options = {'priority': 'high'}
return data, gcm_options
def get_remove_payload_gcm(
user_profile: UserProfile, message_ids: List[int],
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
'''A `remove` payload + options, for Android via GCM/FCM.'''
gcm_payload = get_base_payload(user_profile)
gcm_payload.update({
'event': 'remove',
'zulip_message_ids': ','.join(str(id) for id in message_ids),
# Older clients (all clients older than 2019-02-13) look only at
# `zulip_message_id` and ignore `zulip_message_ids`. Do our best.
'zulip_message_id': message_ids[0],
})
gcm_options = {'priority': 'normal'}
return gcm_payload, gcm_options
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
"""This should be called when a message that had previously had a
mobile push executed is read. This triggers a mobile push notifica
mobile app when the message is read on the server, to remove the
message from the notification.
"""
user_profile = get_user_profile_by_id(user_profile_id)
message_ids = bulk_access_messages_expect_usermessage(user_profile_id, message_ids)
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, message_ids)
if uses_notification_bouncer():
try:
send_notifications_to_bouncer(user_profile_id,
{},
gcm_payload,
gcm_options)
except requests.ConnectionError: # nocoverage
def failure_processor(event: Dict[str, Any]) -> None:
logger.warning(
"Maximum retries exceeded for trigger:%s event:push_notification" % (
event['user_profile_id'],))
else:
android_devices = list(PushDeviceToken.objects.filter(
user=user_profile, kind=PushDeviceToken.GCM))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, gcm_options)
UserMessage.objects.filter(
user_profile_id=user_profile_id,
message_id__in=message_ids,
).update(
flags=F('flags').bitand(
~UserMessage.flags.active_mobile_push_notification))
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
"""
missed_message is the event received by the
zerver.worker.queue_processors.PushNotificationWorker.consume function.
"""
if not push_notifications_enabled():
return
user_profile = get_user_profile_by_id(user_profile_id)
if not (receives_offline_push_notifications(user_profile) or
receives_online_notifications(user_profile)):
return
user_profile = get_user_profile_by_id(user_profile_id)
try:
(message, user_message) = access_message(user_profile, missed_message['message_id'])
except JsonableError:
if ArchivedMessage.objects.filter(id=missed_message['message_id']).exists():
# If the cause is a race with the message being deleted,
# that's normal and we have no need to log an error.
return
logging.error("Unexpected message access failure handling push notifications: %s %s" % (
user_profile.id, missed_message['message_id']))
return
if user_message is not None:
# If the user has read the message already, don't push-notify.
#
# TODO: It feels like this is already handled when things are
# put in the queue; maybe we should centralize this logic with
# the `zerver/tornado/event_queue.py` logic?
if user_message.flags.read:
return
# Otherwise, we mark the message as having an active mobile
# push notification, so that we can send revocation messages
# later.
user_message.flags.active_mobile_push_notification = True
user_message.save(update_fields=["flags"])
else:
# Users should only be getting push notifications into this
# queue for messages they haven't received if they're
# long-term idle; anything else is likely a bug.
if not user_profile.long_term_idle:
logger.error("Could not find UserMessage with message_id %s and user_id %s" % (
missed_message['message_id'], user_profile_id))
return
message.trigger = missed_message['trigger']
apns_payload = get_message_payload_apns(user_profile, message)
gcm_payload, gcm_options = get_message_payload_gcm(user_profile, message)
logger.info("Sending push notifications to mobile clients for user %s" % (user_profile_id,))
if uses_notification_bouncer():
try:
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload,
gcm_options)
except requests.ConnectionError:
def failure_processor(event: Dict[str, Any]) -> None:
logger.warning(
"Maximum retries exceeded for trigger:%s event:push_notification" % (
event['user_profile_id'],))
retry_event('missedmessage_mobile_notifications', missed_message,
failure_processor)
return
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.APNS))
if apple_devices:
send_apple_push_notification(user_profile.id, apple_devices,
apns_payload)
if android_devices:
send_android_push_notification(android_devices, gcm_payload, gcm_options)
| {
"content_hash": "076c333149e8eecae7fac861063dbe0a",
"timestamp": "",
"source": "github",
"line_count": 741,
"max_line_length": 107,
"avg_line_length": 42.472334682860996,
"alnum_prop": 0.6217590238942552,
"repo_name": "tommyip/zulip",
"id": "da65e42375bbf059bc81c42f7406090e14b54199",
"size": "31499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zerver/lib/push_notifications.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "400301"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "718599"
},
{
"name": "JavaScript",
"bytes": "3092201"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71123"
},
{
"name": "Python",
"bytes": "6889539"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119898"
},
{
"name": "TypeScript",
"bytes": "14645"
}
],
"symlink_target": ""
} |
from pythonic_testcase import *
from soapfish import xsdspec
class XSDSpecSchemaTest(PythonicTestCase):
def test_can_parse_references(self):
xml = ('<xs:schema targetNamespace="http://site.example/ws/spec" \n'
' xmlns:example="http://site.example/ws/spec" \n'
' xmlns:xs="http://www.w3.org/2001/XMLSchema" \n'
' elementFormDefault="qualified">\n'
' <xs:element name="person">\n'
' <xs:complexType>\n'
' <xs:sequence>\n'
' <xs:element name="name" type="xs:string" />\n'
' </xs:sequence>\n'
' </xs:complexType>\n'
' </xs:element>\n'
' <xs:element name="job">\n'
' <xs:complexType>\n'
' <xs:sequence>\n'
' <xs:element ref="example:person" />\n'
' </xs:sequence>\n'
' </xs:complexType>\n'
' </xs:element>\n'
'</xs:schema>')
schema = xsdspec.Schema.parsexml(xml)
job_element = schema.elements[1]
assert_equals('job', job_element.name)
person_reference = job_element.complexType.sequence.elements[0]
assert_none(person_reference.name)
assert_equals('example:person', person_reference.ref)
person_element = schema.elements[0]
assert_equals('person', person_element.name)
# TODO: check that the person_reference points to person
| {
"content_hash": "d633da4ee967d3c30f19a626a80b6789",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 76,
"avg_line_length": 40.38461538461539,
"alnum_prop": 0.5073015873015873,
"repo_name": "vpistis/soapfish",
"id": "3aefaa4e67139de763c5f5fc06130f363453186c",
"size": "1576",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/xsdspec_schema_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "264918"
}
],
"symlink_target": ""
} |
import terminal
class MyCommand(terminal.Command):
def print_title(self, title):
if 'Option' in title:
print(terminal.magenta(title))
elif 'Command' in title:
print(terminal.green(title))
return self
program = MyCommand('terminal')
# add actions
subcommand = terminal.Command('build', 'build the site')
program.action(subcommand)
@program.action
def log(verbose=False):
"""
print a log test
:param verbose: show more logs
"""
terminal.log.config(verbose=verbose)
terminal.log.info('this is a info message')
terminal.log.verbose.info('this is a verbose message')
# add options
program.option('-f, --force', 'force to do something')
program.option('-o, --output <output>', 'output directory')
program.parse()
if program.output:
print('output is %s' % program.output)
| {
"content_hash": "81025f14a374521e4b3612560cdef57e",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 59,
"avg_line_length": 20.571428571428573,
"alnum_prop": 0.6620370370370371,
"repo_name": "lepture/terminal",
"id": "15de404185ff7baa5f5603ed4f0416b5468ea392",
"size": "881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/command.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "62569"
}
],
"symlink_target": ""
} |
from __future__ import division
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from statsmodels.datasets import heart
from statsmodels.tools import add_constant
from statsmodels.emplike.aft_el import emplikeAFT
from .results.el_results import AFTRes
class GenRes(object):
@classmethod
def setup_class(cls):
data = heart.load(as_pandas=False)
endog = np.log10(data.endog)
exog = add_constant(data.exog)
cls.mod1 = emplikeAFT(endog, exog, data.censors)
cls.res1 = cls.mod1.fit()
cls.res2 = AFTRes()
class Test_AFTModel(GenRes):
def test_params(self):
assert_almost_equal(self.res1.params(), self.res2.test_params,
decimal=4)
def test_beta0(self):
assert_almost_equal(self.res1.test_beta([4], [0]),
self.res2.test_beta0, decimal=4)
def test_beta1(self):
assert_almost_equal(self.res1.test_beta([-.04], [1]),
self.res2.test_beta1, decimal=4)
def test_beta_vect(self):
assert_almost_equal(self.res1.test_beta([3.5, -.035], [0, 1]),
self.res2.test_joint, decimal=4)
@pytest.mark.slow
def test_betaci(self):
ci = self.res1.ci_beta(1, -.06, 0)
ll = ci[0]
ul = ci[1]
ll_pval = self.res1.test_beta([ll], [1])[1]
ul_pval = self.res1.test_beta([ul], [1])[1]
assert_almost_equal(ul_pval, .050000, decimal=4)
assert_almost_equal(ll_pval, .05000, decimal=4)
| {
"content_hash": "315ac799f16d1fe96d83c390f9ad2888",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 70,
"avg_line_length": 31.32,
"alnum_prop": 0.5983397190293742,
"repo_name": "ChadFulton/statsmodels",
"id": "e0a04a409863e3df4cac5b2bc998e8c9ee74a2d7",
"size": "1566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "statsmodels/emplike/tests/test_aft.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10035"
},
{
"name": "Batchfile",
"bytes": "3469"
},
{
"name": "C",
"bytes": "381"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "MATLAB",
"bytes": "2609"
},
{
"name": "Python",
"bytes": "11749760"
},
{
"name": "R",
"bytes": "90986"
},
{
"name": "Rebol",
"bytes": "123"
},
{
"name": "Shell",
"bytes": "8181"
},
{
"name": "Smarty",
"bytes": "1014"
},
{
"name": "Stata",
"bytes": "65045"
}
],
"symlink_target": ""
} |
speech = Runtime.start('speech','Speech')
| {
"content_hash": "a58bf92fc5a5430d7f12c379a8103862",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 41,
"avg_line_length": 42,
"alnum_prop": 0.7142857142857143,
"repo_name": "MyRobotLab/pyrobotlab",
"id": "1b9162fb8d013704131cd4e34bfc9ed94033a27c",
"size": "275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service/Speech.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1827"
},
{
"name": "C",
"bytes": "126258"
},
{
"name": "C++",
"bytes": "373018"
},
{
"name": "Java",
"bytes": "156911"
},
{
"name": "Processing",
"bytes": "17022"
},
{
"name": "Python",
"bytes": "3309101"
},
{
"name": "Shell",
"bytes": "4635"
},
{
"name": "VBA",
"bytes": "11115"
}
],
"symlink_target": ""
} |
import os
import sys
from neutron_lib import constants
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import service
from neutron._i18n import _LE, _LI
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import config as common_config
from neutron.common import topics
from neutron.common import utils as n_utils
from neutron.plugins.common import constants as p_constants
from neutron.plugins.ml2.drivers.agent import _agent_manager_base as amb
from neutron.plugins.ml2.drivers.agent import _common_agent as ca
from neutron.plugins.ml2.drivers.macvtap.agent import config # noqa
from neutron.plugins.ml2.drivers.macvtap import macvtap_common
LOG = logging.getLogger(__name__)
MACVTAP_AGENT_BINARY = "neutron-macvtap-agent"
MACVTAP_FS = "/sys/class/net/"
EXTENSION_DRIVER_TYPE = 'macvtap'
class MacvtapRPCCallBack(sg_rpc.SecurityGroupAgentRpcCallbackMixin,
amb.CommonAgentManagerRpcCallBackBase):
# Set RPC API version to 1.0 by default.
# history
# 1.1 Support Security Group RPC
# 1.3 Added param devices_to_update to security_groups_provider_updated
# 1.4 Added support for network_update
target = oslo_messaging.Target(version='1.4')
def network_delete(self, context, **kwargs):
LOG.debug("network_delete received")
network_id = kwargs.get('network_id')
if network_id not in self.network_map:
LOG.error(_LE("Network %s is not available."), network_id)
return
segment = self.network_map.get(network_id)
if segment and segment.network_type == p_constants.TYPE_VLAN:
if_mappings = self.agent.mgr.interface_mappings
vlan_device_name = macvtap_common.get_vlan_device_name(
if_mappings[segment.physical_network],
str(segment.segmentation_id))
ip_dev = ip_lib.IPDevice(vlan_device_name)
if ip_dev.exists():
LOG.debug("Delete %s", ip_dev.name)
ip_dev.link.delete()
else:
LOG.debug("Cannot delete vlan device %s; it does not exist",
vlan_device_name)
def port_update(self, context, **kwargs):
port = kwargs['port']
LOG.debug("port_update received for port %s ", port)
mac = port['mac_address']
# Put the device name in the updated_devices set.
# Do not store port details, as if they're used for processing
# notifications there is no guarantee the notifications are
# processed in the same order as the relevant API requests.
self.updated_devices.add(mac)
class MacvtapManager(amb.CommonAgentManagerBase):
def __init__(self, interface_mappings):
self.interface_mappings = interface_mappings
self.validate_interface_mappings()
self.mac_device_name_mappings = dict()
def validate_interface_mappings(self):
for physnet, interface in self.interface_mappings.items():
if not ip_lib.device_exists(interface):
LOG.error(_LE("Interface %(intf)s for physical network "
"%(net)s does not exist. Agent terminated!"),
{'intf': interface, 'net': physnet})
sys.exit(1)
def ensure_port_admin_state(self, device, admin_state_up):
LOG.debug("Setting admin_state_up to %s for device %s",
admin_state_up, device)
dev = ip_lib.IPDevice(self.mac_device_name_mappings[device])
if admin_state_up:
dev.link.set_up()
else:
dev.link.set_down()
def get_agent_configurations(self):
return {'interface_mappings': self.interface_mappings}
def get_agent_id(self):
devices = ip_lib.IPWrapper().get_devices(True)
if devices:
mac = utils.get_interface_mac(devices[0].name)
return 'macvtap%s' % mac.replace(":", "")
else:
LOG.error(_LE("Unable to obtain MAC address for unique ID. "
"Agent terminated!"))
sys.exit(1)
def get_devices_modified_timestamps(self, devices):
# TODO(kevinbenton): this should be implemented to detect
# rapid Nova instance rebuilds.
return {}
def get_all_devices(self):
devices = set()
all_device_names = os.listdir(MACVTAP_FS)
# Refresh the mac_device_name mapping
self.mac_device_name_mappings = dict()
for device_name in all_device_names:
if device_name.startswith(constants.MACVTAP_DEVICE_PREFIX):
mac = utils.get_interface_mac(device_name)
self.mac_device_name_mappings[mac] = device_name
devices.add(mac)
return devices
def get_extension_driver_type(self):
return EXTENSION_DRIVER_TYPE
def get_rpc_callbacks(self, context, agent, sg_agent):
return MacvtapRPCCallBack(context, agent, sg_agent)
def get_rpc_consumers(self):
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.DELETE],
[topics.SECURITY_GROUP, topics.UPDATE]]
return consumers
def plug_interface(self, network_id, network_segment, device,
device_owner):
# Setting ALLMULTICAST Flag on macvtap device to allow the guest
# receiving traffic for arbitrary multicast addresses.
# The alternative would be to let libvirt instantiate the macvtap
# device with the 'trustGuestRxFilters' option. But doing so, the guest
# would be able to change its mac address and therefore the mac
# address of the macvtap device.
dev = ip_lib.IPDevice(self.mac_device_name_mappings[device])
dev.link.set_allmulticast_on()
return True
def setup_arp_spoofing_protection(self, device, device_details):
pass
def delete_arp_spoofing_protection(self, devices):
pass
def delete_unreferenced_arp_protection(self, current_devices):
pass
def parse_interface_mappings():
try:
interface_mappings = n_utils.parse_mappings(
cfg.CONF.macvtap.physical_interface_mappings)
LOG.info(_LI("Interface mappings: %s"), interface_mappings)
return interface_mappings
except ValueError as e:
LOG.error(_LE("Parsing physical_interface_mappings failed: %s. "
"Agent terminated!"), e)
sys.exit(1)
def validate_firewall_driver():
fw_driver = cfg.CONF.SECURITYGROUP.firewall_driver
supported_fw_drivers = ['neutron.agent.firewall.NoopFirewallDriver',
'noop']
if fw_driver not in supported_fw_drivers:
LOG.error(_LE('Unsupported configuration option for "SECURITYGROUP.'
'firewall_driver"! Only the NoopFirewallDriver is '
'supported by macvtap agent, but "%s" is configured. '
'Set the firewall_driver to "noop" and start the '
'agent again. Agent terminated!'),
fw_driver)
sys.exit(1)
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
validate_firewall_driver()
interface_mappings = parse_interface_mappings()
manager = MacvtapManager(interface_mappings)
polling_interval = cfg.CONF.AGENT.polling_interval
quitting_rpc_timeout = cfg.CONF.AGENT.quitting_rpc_timeout
agent = ca.CommonAgentLoop(manager, polling_interval,
quitting_rpc_timeout,
constants.AGENT_TYPE_MACVTAP,
MACVTAP_AGENT_BINARY)
LOG.info(_LI("Agent initialized successfully, now running... "))
launcher = service.launch(cfg.CONF, agent)
launcher.wait()
| {
"content_hash": "d03bdddeae6b821a7dfca36b37c9aa1e",
"timestamp": "",
"source": "github",
"line_count": 203,
"max_line_length": 79,
"avg_line_length": 39.33990147783251,
"alnum_prop": 0.6344853493613825,
"repo_name": "igor-toga/local-snat",
"id": "41115f8d3945301993c71c71e2dd1410b790cd2b",
"size": "8617",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "neutron/plugins/ml2/drivers/macvtap/agent/macvtap_neutron_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "9636936"
},
{
"name": "Shell",
"bytes": "14072"
}
],
"symlink_target": ""
} |
import collections
from ..error import Error
from ..language import ast
def is_input_type(type):
named_type = get_named_type(type)
return isinstance(named_type, (
GraphQLScalarType,
GraphQLEnumType,
GraphQLInputObjectType,
))
def is_composite_type(type):
named_type = get_named_type(type)
return isinstance(named_type, (
GraphQLObjectType,
GraphQLInterfaceType,
GraphQLUnionType,
))
def is_leaf_type(type):
named_type = get_named_type(type)
return isinstance(named_type, (
GraphQLScalarType,
GraphQLEnumType,
))
def get_named_type(type):
unmodified_type = type
while isinstance(unmodified_type, (GraphQLList, GraphQLNonNull)):
unmodified_type = unmodified_type.of_type
return unmodified_type
def get_nullable_type(type):
if isinstance(type, GraphQLNonNull):
return type.of_type
return type
class GraphQLType(object):
def __str__(self):
return self.name
def is_same_type(self, other):
return self.__class__ is other.__class__ and self.name == other.name
class GraphQLScalarType(GraphQLType):
"""Scalar Type Definition
The leaf values of any request and input values to arguments are
Scalars (or Enums) and are defined with a name and a series of coercion
functions used to ensure validity.
Example:
def coerce_odd(value):
if value % 2 == 1:
return value
return None
OddType = GraphQLScalarType(name='Odd', serialize=coerce_odd)
"""
def __init__(self, name, description=None, serialize=None, parse_value=None, parse_literal=None):
assert name, 'Type must be named.'
self.name = name
self.description = description
assert callable(serialize)
if parse_value or parse_literal:
assert callable(parse_value) and callable(parse_literal)
self._serialize = serialize
self._parse_value = parse_value
self._parse_literal = parse_literal
def serialize(self, value):
return self._serialize(value)
def parse_value(self, value):
if self._parse_value:
return self._parse_value(value)
return None
def parse_literal(self, value_ast):
if self._parse_literal:
return self._parse_literal(value_ast)
return None
def __str__(self):
return self.name
class GraphQLObjectType(GraphQLType):
"""Object Type Definition
Almost all of the GraphQL types you define will be object types.
Object types have a name, but most importantly describe their fields.
Example:
AddressType = GraphQLObjectType('Address', {
'street': GraphQLField(GraphQLString),
'number': GraphQLField(GraphQLInt),
'formatted': GraphQLField(GraphQLString,
resolver=lambda obj, *_: obj.number + ' ' + obj.street),
})
When two types need to refer to each other, or a type needs to refer to
itself in a field, you can use a static method to supply the fields
lazily.
Example:
PersonType = GraphQLObjectType('Person', lambda: {
'name': GraphQLField(GraphQLString),
'bestFriend': GraphQLField(PersonType)
})
"""
def __init__(self, name, fields, interfaces=None, is_type_of=None, description=None):
assert name, 'Type must be named.'
self.name = name
self.description = description
self._fields = fields
self._field_map = None
self._interfaces = interfaces or []
self._is_type_of = is_type_of
add_impl_to_interfaces(self)
def get_fields(self):
if self._field_map is None:
self._field_map = define_field_map(self._fields)
return self._field_map
def get_interfaces(self):
return self._interfaces
def is_type_of(self, value):
if self._is_type_of:
return self._is_type_of(value)
def define_field_map(fields):
if callable(fields):
fields = fields()
for field_name, field in fields.items():
field.name = field_name
return fields
def add_impl_to_interfaces(impl):
for type in impl.get_interfaces():
type._impls.append(impl)
class GraphQLField(object):
def __init__(self, type, args=None, resolver=None,
deprecation_reason=None, description=None):
self.type = type
self.args = []
if args:
for arg_name, arg in args.items():
arg.name = arg_name
self.args.append(arg)
self.resolver = resolver
self.deprecation_reason = deprecation_reason
self.description = description
class GraphQLArgument(object):
def __init__(self, type, default_value=None, description=None):
self.type = type
self.default_value = default_value
self.description = description
class GraphQLInterfaceType(GraphQLType):
"""Interface Type Definition
When a field can return one of a heterogeneous set of types, a Interface type is used to describe what types are possible,
what fields are in common across all types, as well as a function to determine which type is actually used when the field is resolved.
Example:
EntityType = GraphQLInterfaceType(
name='Entity',
fields={
'name': GraphQLField(GraphQLString),
})
"""
def __init__(self, name, fields=None, resolve_type=None, description=None):
assert name, 'Type must be named.'
self.name = name
self.description = description
self._fields = fields or {}
self._resolver = resolve_type
self._impls = []
self._field_map = None
self._possible_type_names = None
def get_fields(self):
if self._field_map is None:
self._field_map = define_field_map(self._fields)
return self._field_map
def get_possible_types(self):
return self._impls
def is_possible_type(self, type):
if self._possible_type_names is None:
self._possible_type_names = set(
t.name for t in self.get_possible_types()
)
return type.name in self._possible_type_names
def resolve_type(self, value):
if self._resolver:
return self._resolver(value)
return get_type_of(value, self)
def get_type_of(value, abstract_type):
possible_types = abstract_type.get_possible_types()
for type in possible_types:
is_type_of = type.is_type_of(value)
if is_type_of is None:
raise Error(
'Non-Object Type {} does not implement resolve_type and '
'Object Type {} does not implement is_type_of. '
'There is no way to determine if a value is of this type.'
.format(abstract_type.name, type.name)
)
if is_type_of:
return type
class GraphQLUnionType(GraphQLType):
"""Union Type Definition
When a field can return one of a heterogeneous set of types, a Union type is used to describe what types are possible
as well as providing a function to determine which type is actually used when the field is resolved.
Example:
class PetType(GraphQLUnionType):
name = 'Pet'
types = [DogType, CatType]
def resolve_type(self, value):
if isinstance(value, Dog):
return DogType()
if isinstance(value, Cat):
return CatType()
"""
def __init__(self, name, types=None, resolve_type=None, description=None):
assert name, 'Type must be named.'
self.name = name
self.description = description
assert types, \
'Must provide types for Union {}.'.format(name)
self._possible_type_names = None
non_obj_types = [t for t in types
if not isinstance(t, GraphQLObjectType)]
if non_obj_types:
raise Error(
'Union {} may only contain object types, it cannot '
'contain: {}.'.format(
self.name,
', '.join(str(t) for t in non_obj_types)
)
)
self._types = types
self._resolve_type = resolve_type
def get_possible_types(self):
return self._types
def is_possible_type(self, type):
if self._possible_type_names is None:
self._possible_type_names = set(
t.name for t in self.get_possible_types()
)
return type.name in self._possible_type_names
def resolve_type(self, value):
if self._resolve_type:
return self._resolve_type(value)
return get_type_of(value, self)
class GraphQLEnumType(GraphQLType):
"""Enum Type Definition
Some leaf values of requests and input values are Enums. GraphQL serializes Enum values as strings,
however internally Enums can be represented by any kind of type, often integers.
Example:
RGBType = GraphQLEnumType('RGB', {
'RED': 0,
'GREEN': 1,
'BLUE': 2,
})
Note: If a value is not provided in a definition, the name of the enum value will be used as it's internal value.
"""
def __init__(self, name, values, description=None):
self.name = name
self.description = description
self._values = values
self._value_map = None
self._value_lookup = None
self._name_lookup = None
def get_values(self):
if self._value_map is None:
self._value_map = self._define_value_map()
return self._value_map
def serialize(self, value):
if isinstance(value, collections.Hashable):
enum_value = self._get_value_lookup().get(value)
if enum_value:
return enum_value.name
return None
def parse_value(self, value):
if isinstance(value, collections.Hashable):
enum_value = self._get_value_lookup().get(value)
if enum_value:
return enum_value.name
return None
def parse_literal(self, value_ast):
if isinstance(value_ast, ast.EnumValue):
enum_value = self._get_name_lookup().get(value_ast.value)
if enum_value:
return enum_value.value
def _define_value_map(self):
value_map = {}
for value_name, value in self._values.items():
if not isinstance(value, GraphQLEnumValue):
value = GraphQLEnumValue(value)
value.name = value_name
if value.value is None:
value.value = value_name
value_map[value_name] = value
return value_map
def _get_value_lookup(self):
if self._value_lookup is None:
lookup = {}
for value_name, value in self.get_values().items():
lookup[value.value] = value
self._value_lookup = lookup
return self._value_lookup
def _get_name_lookup(self):
if self._name_lookup is None:
lookup = {}
for value_name, value in self.get_values().items():
lookup[value.name] = value
self._name_lookup = lookup
return self._name_lookup
class GraphQLEnumValue(object):
def __init__(self, value=None, deprecation_reason=None,
description=None):
self.value = value
self.deprecation_reason = deprecation_reason
self.description = description
class GraphQLInputObjectType(GraphQLType):
"""Input Object Type Definition
An input object defines a structured collection of fields which may be
supplied to a field argument.
Using `NonNull` will ensure that a value must be provided by the query
Example:
NonNullFloat = GraphQLNonNull(GraphQLFloat())
class GeoPoint(GraphQLInputObjectType):
name = 'GeoPoint'
fields = {
'lat': GraphQLInputObjectField(NonNullFloat),
'lon': GraphQLInputObjectField(NonNullFloat),
'alt': GraphQLInputObjectField(GraphQLFloat(),
default_value=0)
}
"""
def __init__(self, name, fields, description=None):
assert name, 'Type must be named.'
self.name = name
self.description = description
self._fields = fields
self._field_map = None
def get_fields(self):
if self._field_map is None:
self._field_map = define_field_map(self._fields)
return self._field_map
class GraphQLInputObjectField(object):
def __init__(self, type, default_value=None, description=None):
self.type = type
self.default_value = default_value
self.description = description
class GraphQLList(GraphQLType):
"""List Modifier
A list is a kind of type marker, a wrapping type which points to another
type. Lists are often created within the context of defining the fields
of an object type.
Example:
class PersonType(GraphQLObjectType):
name = 'Person'
def get_fields(self):
return {
'parents': GraphQLField(GraphQLList(PersonType())),
'children': GraphQLField(GraphQLList(PersonType())),
}
"""
def __init__(self, type):
self.of_type = type
def __str__(self):
return '[' + str(self.of_type) + ']'
def is_same_type(self, other):
return isinstance(other, GraphQLList) and self.of_type.is_same_type(other.of_type)
class GraphQLNonNull(GraphQLType):
"""Non-Null Modifier
A non-null is a kind of type marker, a wrapping type which points to another type. Non-null types enforce that their values are never null
and can ensure an error is raised if this ever occurs during a request. It is useful for fields which you can make a strong guarantee on
non-nullability, for example usually the id field of a database row will never be null.
Example:
class RowType(GraphQLObjectType):
name = 'Row'
fields = {
'id': GraphQLField(type=GraphQLNonNull(GraphQLString()))
}
Note: the enforcement of non-nullability occurs within the executor.
"""
def __init__(self, type):
assert not isinstance(type, GraphQLNonNull), \
'Cannot nest NonNull inside NonNull.'
self.of_type = type
def __str__(self):
return str(self.of_type) + '!'
def is_same_type(self, other):
return isinstance(other, GraphQLNonNull) and self.of_type.is_same_type(other.of_type)
| {
"content_hash": "03b1d106a0a2dd1818a3d2988d1a2ac8",
"timestamp": "",
"source": "github",
"line_count": 476,
"max_line_length": 142,
"avg_line_length": 31.24579831932773,
"alnum_prop": 0.603173535937605,
"repo_name": "gabriel-laet/graphql-py",
"id": "61b4cd5a71a98834b7239e53f212217d31fee310",
"size": "14873",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graphql/core/type/definition.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "468582"
}
],
"symlink_target": ""
} |
import os
import sys
import textwrap
from os.path import abspath, dirname, join as pjoin
from distutils.command import build
#-----------------------------------------------------------------------------
try:
from setuptools import setup, Extension
from setuptools.command import bdist_egg, develop
except ImportError:
from distutils.core import setup, Extension
from distutils.command import build
develop, bdist_egg = None, None
#-----------------------------------------------------------------------------
here = abspath(dirname(__file__))
#-----------------------------------------------------------------------------
classifiers = [
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: POSIX :: Linux',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: Implementation :: CPython',
]
#-----------------------------------------------------------------------------
cflags = ['-std=c99', '-Wno-error=declaration-after-statement']
input_c = Extension('evdev._input', sources=['evdev/input.c'], extra_compile_args=cflags)
uinput_c = Extension('evdev._uinput', sources=['evdev/uinput.c'], extra_compile_args=cflags)
ecodes_c = Extension('evdev._ecodes', sources=['evdev/ecodes.c'], extra_compile_args=cflags)
#-----------------------------------------------------------------------------
kw = {
'name': 'evdev',
'version': '0.4.7',
'description': 'Bindings to the Linux input handling subsystem',
'long_description': open(pjoin(here, 'README.rst')).read(),
'author': 'Georgi Valkov',
'author_email': 'georgi.t.valkov@gmail.com',
'license': 'Revised BSD License',
'keywords': 'evdev input uinput',
'url': 'https://github.com/gvalkov/python-evdev',
'classifiers': classifiers,
'packages': ['evdev'],
'ext_modules': [input_c, uinput_c, ecodes_c],
'include_package_data': False,
'zip_safe': True,
'cmdclass': {},
}
#-----------------------------------------------------------------------------
def create_ecodes():
header = '/usr/include/linux/input.h'
if not os.path.isfile(header):
msg = '''\
The linux/input.h header file is missing. You will have to
install the headers for your kernel in order to continue:
yum install kernel-headers-$(uname -r)
apt-get install linux-headers-$(uname -r)
pacman -S kernel-headers\n\n'''
sys.stderr.write(textwrap.dedent(msg))
sys.exit(1)
from subprocess import check_call
print('writing ecodes.c (using %s)' % header)
cmd = '%s genecodes.py %s > ecodes.c' % (sys.executable, header)
check_call(cmd, cwd="%s/evdev" % here, shell=True)
def cmdfactory(cmd):
class cls(cmd):
def run(self):
create_ecodes()
cmd.run(self)
return cls
#-----------------------------------------------------------------------------
kw['cmdclass']['build'] = cmdfactory(build.build)
if develop and bdist_egg:
kw['cmdclass']['develop'] = cmdfactory(develop.develop)
kw['cmdclass']['bdist_egg'] = cmdfactory(bdist_egg.bdist_egg)
#-----------------------------------------------------------------------------
if __name__ == '__main__':
setup(**kw)
| {
"content_hash": "e7183c201ddc260ab45b1c7b46679335",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 92,
"avg_line_length": 36.23529411764706,
"alnum_prop": 0.5043290043290043,
"repo_name": "ev3dev/python-evdev",
"id": "76edd6511f7372920c7eddf723cbc099b954e13e",
"size": "3737",
"binary": false,
"copies": "1",
"ref": "refs/heads/ev3dev-stretch",
"path": "setup.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "50784"
},
{
"name": "Python",
"bytes": "38662"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
} |
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.text import slugify
from django.utils.datastructures import OrderedSet
from polymorphic.models import PolymorphicModel
from edtf.fields import EDTFField
from icekit.content_collections.abstract_models import TitleSlugMixin, \
PluralTitleSlugMixin
from icekit.mixins import FluentFieldsMixin, ListableMixin
from icekit.plugins.image.abstract_models import ImageLinkMixin
from icekit.models import ICEkitContentsMixin
from icekit.utils.strings import is_empty
from glamkit_collections.contrib.work_creator.managers import \
WorkCreatorQuerySet, WorkImageQuerySet
from glamkit_collections.models import GeographicLocation
class _WorkCreatorMetaDataMixin(models.Model):
""" Basic metadata fields shared by all collection models """
external_ref = models.CharField(
max_length=255,
blank=True,
null=True,
help_text="Unique identifier from an external system, such as from"
" an external system from which records are imported."
)
dt_created = models.DateTimeField(
editable=False,
auto_now_add=True,
)
dt_modified = models.DateTimeField(
editable=False,
auto_now=True,
)
class Meta:
abstract = True
class CreatorBase(
PolymorphicModel,
FluentFieldsMixin,
ICEkitContentsMixin,
ListableMixin,
_WorkCreatorMetaDataMixin,
):
# Primary, definitive, and one truly required name field
name_full = models.CharField(
max_length=255,
help_text='A public "label" for the creator, from which all other '
'name values will be derived unless they are also provided. '
'E.g. for Person, composed of the Prefix, First Names, Last '
'Name Prefix, Last Name, Suffix, and Variant Name fields'
)
# Other name fields that will be derived from `name_full` unless provided
name_display = models.CharField(
max_length=255,
help_text='The commonly known or generally recognized name of the '
'creator, for display, publication and reproduction purposes, '
'e.g., "Rembrandt" or "Guercino" as opposed to the full name '
'"Rembrandt Harmenszoon Van Rijn" or "Giovanni Francesco '
'Barbieri."'
)
name_sort = models.CharField(
max_length=255,
help_text='For searching and organizing, the name or sequence of names '
'which determines the position of the creator in the list of '
'creators, so that he or she may be found where expected, '
'e.g. "Rembrandt" under "R" or "Guercino" under "G"'
)
#for URLs
slug = models.CharField(
max_length=255,
db_index=True,
) # Alt slug redirects to it.
alt_slug = models.SlugField(
max_length=255,
blank=True,
db_index=True,
) # use unidecode + slugify for alt slug.
# Alt slug matches should redirect to the canonical view.
portrait = models.ForeignKey(
'icekit_plugins_image.Image',
blank=True,
null=True,
on_delete=models.SET_NULL,
)
website = models.CharField(
blank=True,
max_length=255,
)
wikipedia_link = models.URLField(blank=True, help_text="e.g. 'https://en.wikipedia.org/wiki/Pablo_Picasso'")
birth_date_display = models.CharField(
"Date of birth (display)",
blank=True,
max_length=255,
help_text='Displays date as formatted for display, rather '
'than sorting.'
)
birth_date_edtf = EDTFField(
"Date of creation (EDTF)",
natural_text_field='birth_date_display',
lower_strict_field='birth_date_earliest',
upper_strict_field='birth_date_latest',
lower_fuzzy_field='birth_date_sort_ascending',
upper_fuzzy_field='birth_date_sort_descending',
blank=True,
null=True,
help_text="an <a href='http://www.loc.gov/standards/datetime/"
"implementations.html'>EDTF</a>-formatted "
"date, parsed from the display date, e.g. "
"'1855/1860-06-04'",
)
birth_date_earliest = models.DateField(
"Earliest birth date",
blank=True,
null=True,
)
birth_date_latest = models.DateField(
"Latest birth date",
blank=True,
null=True,
)
birth_date_sort_ascending = models.DateField(
"Ascending sort by birth",
blank=True,
null=True,
)
birth_date_sort_descending = models.DateField(
"Descending sort by birth",
blank=True,
null=True,
)
death_date_display = models.CharField(
"Date of death (display)",
blank=True,
max_length=255,
help_text='Displays date as formatted for display, rather '
'than sorting.'
)
death_date_edtf = EDTFField(
"Date of death (EDTF)",
natural_text_field='death_date_display',
lower_strict_field='death_date_earliest',
upper_strict_field='death_date_latest',
lower_fuzzy_field='death_date_sort_ascending',
upper_fuzzy_field='death_date_sort_descending',
blank=True,
null=True,
help_text="an <a href='http://www.loc.gov/standards/datetime/"
"implementations.html'>EDTF</a>-formatted "
"date, parsed from the display date, e.g. "
"'1855/1860-06-04'",
)
death_date_earliest = models.DateField(
"Earliest death date",
blank=True,
null=True,
)
death_date_latest = models.DateField(
"Latest death date",
blank=True,
null=True,
)
death_date_sort_ascending = models.DateField(
"Ascending sort by death",
blank=True,
null=True,
)
death_date_sort_descending = models.DateField(
"Descending sort by death",
blank=True,
null=True,
)
class Meta:
verbose_name = "creator"
ordering = ('name_sort', 'slug', 'publishing_is_draft')
unique_together = ('slug', 'publishing_is_draft',)
def __unicode__(self):
return self.name_display
def save(self, *args, **kwargs):
self.derive_and_set_name_fields_and_slug()
return super(CreatorBase, self).save(*args, **kwargs)
def derive_and_set_name_fields_and_slug(
self, set_name_sort=True, set_slug=True
):
"""
Derive subordinate name_* field values from the `name_full` field
unless these fields are set in their own right.
This method is called during `save()`
"""
# name_full is the primary required name field. It must be set.
if is_empty(self.name_full):
raise ValueError(
u"%s.name_full cannot be empty at save" % type(self).__name__)
# if empty, `name_display` == `name_full`
if is_empty(self.name_display):
self.name_display = self.name_full
# if empty, `name_sort` == `name_full`
if set_name_sort and is_empty(self.name_sort):
self.name_sort = self.name_full
# if empty, `slug` is set to slugified `name_full`
if set_slug and is_empty(self.slug):
self.slug = slugify(self.name_full)
def get_absolute_url(self):
return reverse("gk_collections_creator", kwargs={'slug': self.slug})
def get_works(self):
"""
:return: The works that should be presented as visible on the front
end. If self is draft, show visible related items. If self is
published, show published related items.
Normal behaviour is to return published works if possible
AND draft works if they haven't been published. Draft works are
to be shown without links.
"""
qs = self.get_draft().works
# only return works that don't have an equivalent published version
# (ie items that are themselves published, and unpublished drafts)
return qs.filter(publishing_linked=None)
def get_works_count(self):
"""To be used in Admin listings"""
return self.get_works().count()
def get_hero_image(self):
if self.portrait:
return self.portrait
def get_list_image(self):
if self.portrait:
return self.list_image or self.portrait.image
def get_title(self):
return self.name_display
def get_type(self):
return "creator"
def get_roles(self):
"""Return the m2m relations connecting me to works"""
work_ids = self.get_works().values_list('id', flat=True)
return self.works.through.objects.filter(
creator=self.get_draft(),
work_id__in=work_ids,
).select_related('role')
def get_primary_roles(self):
"""Return the m2m relations connecting me to works as primary creator"""
return self.get_roles().filter(is_primary=True)
class WorkOrigin(models.Model):
work = models.ForeignKey('WorkBase')
geographic_location = models.ForeignKey(GeographicLocation)
order = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u"{0} originates from {1}".format(self.work, self.geographic_location)
class Meta:
ordering = ('order',)
class WorkBase(
PolymorphicModel,
FluentFieldsMixin,
ICEkitContentsMixin,
ListableMixin,
_WorkCreatorMetaDataMixin,
):
# meta
slug = models.CharField(max_length=255, db_index=True)
# using accession number (URL-encoded) for canonical slug
alt_slug = models.SlugField(max_length=255, blank=True, db_index=True)
# using slugified, no-hyphens. Alt slug matches should redirect to the
# canonical view.
# what's it called
title = models.CharField(
max_length=511, # 511? Good question: aping `subtitle` & `oneliner`
help_text='The official title of this object. Includes series title '
'when appropriate.'
)
subtitle = models.CharField(max_length=511, blank=True)
oneliner = models.CharField("One-liner", max_length=511, blank=True,
help_text="A pithy description of the work")
# who made it
creators = models.ManyToManyField(
'CreatorBase', through='WorkCreator', related_name='works'
)
date_display = models.CharField(
"Date of creation (display)",
blank=True,
max_length=255,
help_text='Displays date as formatted for display, rather '
'than sorting.'
) # used on 'Explore Modern Art' 53841 records
date_edtf = EDTFField(
"Date of creation (EDTF)",
natural_text_field='date_display',
lower_strict_field='date_earliest',
upper_strict_field='date_latest',
lower_fuzzy_field='date_sort_ascending',
upper_fuzzy_field='date_sort_descending',
blank=True,
null=True,
help_text="an <a href='http://www.loc.gov/standards/datetime/"
"implementations.html'>EDTF</a>-formatted "
"date, parsed from the display date, e.g. "
"'1855/1860-06-04'",
)
date_earliest = models.DateField(
"Earliest date",
blank=True,
null=True,
)
date_latest = models.DateField(
"Latest date",
blank=True,
null=True,
)
date_sort_ascending = models.DateField(
"Ascending sort",
blank=True,
null=True,
)
date_sort_descending = models.DateField(
"Descending sort",
blank=True,
null=True,
)
# where was it made
origin_locations = models.ManyToManyField(GeographicLocation, through=WorkOrigin)
credit_line = models.TextField(
blank=True,
help_text="A formal public credit statement about a transfer of "
"ownership, acquisition, source, or sponsorship of an "
"item suitable for use in a display, label or publication"
# "The full text of lengthy credit statements may be "
# "accessed by visitors to the collection through the "
# "scrolling list of Notes & Histories on page 4 of the "
# "Object Info layout."
)
# how we got it
accession_number = models.CharField(
blank=True,
max_length=255,
help_text="The five components of the Accession number concatenated "
" in a single string for efficiency of display and retrieval."
)
department = models.CharField(
blank=True,
max_length=255,
help_text='The curatorial unit responsible for the object, '
'e.g., "Western Painting."'
)
website = models.URLField(
help_text="A URL at which to view this work, if available online",
blank=True,
)
wikipedia_link = models.URLField(blank=True, help_text="e.g. 'https://en.wikipedia.org/wiki/Beauty_and_the_Beast_(2014_film)'")
images = models.ManyToManyField('icekit_plugins_image.Image', through="WorkImage")
class Meta:
verbose_name = "work"
ordering = ('slug', 'publishing_is_draft', )
unique_together = ('slug', 'publishing_is_draft',)
def __unicode__(self):
if self.date_display:
return u"%s (%s)" % (self.title, self.date_display)
return self.title
def save(self, *args, **kwargs):
self.derive_and_set_slug()
return super(WorkBase, self).save(*args, **kwargs)
def derive_and_set_slug(self, set_name_sort=True, set_slug=True):
"""
Derive `slug` field from `title` unless it is set in its own right.
This method is called during `save()`
"""
# `title` is the primary required name field. It must be set.
if is_empty(self.title):
raise ValueError(
u"%s.title cannot be empty at save" % type(self).__name__)
# if empty, `slug` is set to slugified `title`
if set_slug and is_empty(self.slug):
self.slug = slugify(self.title)
def get_absolute_url(self):
return reverse("gk_collections_work", kwargs={'slug': self.slug})
def get_images(self, **kwargs):
# order images by the order given in WorkImage.
return self.images.filter(**kwargs).order_by('workimage')
def get_hero_image(self):
if not hasattr(self, "_hero_image"):
try:
self._hero_image = self.get_images()[0]
except IndexError:
self._hero_image = None
if self._hero_image:
return self._hero_image
def get_subtitle(self):
return self.subtitle
def get_oneliner(self):
return self.oneliner
def get_type(self):
return "work"
def get_creators(self):
"""
:return: The creaors that should be presented as visible on the front
end.
Normal behaviour is to return published creators if possible
AND draft creators if they haven't been published. Draft creators are
to be shown without links.
"""
qs = self.get_draft().creators
# only return creators that don't have an equivalent published version
# (ie items that are themselves published, and unpublished drafts)
return qs.filter(publishing_linked=None)
def get_roles(self):
"""
Return the m2m relations connecting me to creators.
There's some publishing-related complexity here. The role relations
(self.creators.through) connect to draft objects, which then need to
be modified to point to visible() objects.
"""
creator_ids = self.get_creators().values_list('id', flat=True)
return self.creators.through.objects.filter(
work=self.get_draft(),
creator_id__in=creator_ids,
).select_related('role')
def get_primary_roles(self):
"""Return the m2m relations connecting me to creators as primary creator"""
return self.get_roles().filter(is_primary=True)
def get_title(self):
if self.date_display:
return u"{0} ({1})".format(self.title, self.date_display)
return self.title
def get_origin_countries(self):
countries = OrderedSet()
for o in WorkOrigin.objects.filter(work=self):
if o.geographic_location.country:
countries.add(o.geographic_location.country)
return countries
class Role(PluralTitleSlugMixin):
past_tense = models.CharField(max_length=255, help_text="If the role is 'foundry', the past tense should be 'forged'. Use lower case.")
class WorkCreator(models.Model):
creator = models.ForeignKey(CreatorBase, on_delete=models.CASCADE)
work = models.ForeignKey(WorkBase, on_delete=models.CASCADE)
role = models.ForeignKey(Role, blank=True, null=True, on_delete=models.SET_NULL,)
is_primary = models.BooleanField("Primary?", default=True)
order = models.PositiveIntegerField(help_text="Which order to show this creator in the list of creators.", default=0)
objects = WorkCreatorQuerySet.as_manager()
class Meta:
unique_together = ('creator', 'work', 'role')
ordering = ('order', '-is_primary')
verbose_name = "Work-Creator relation"
def __unicode__(self):
if self.role:
return "%s, %s by %s" % (unicode(self.work), self.role.past_tense, unicode(self.creator))
else:
return "%s, created by %s" % (unicode(self.work), unicode(self.creator))
class WorkImageType(TitleSlugMixin):
class Meta:
verbose_name = "Image type"
class WorkImage(ImageLinkMixin):
work = models.ForeignKey(WorkBase, on_delete=models.CASCADE)
type = models.ForeignKey(WorkImageType, blank=True, null=True, on_delete=models.SET_NULL)
order = models.PositiveIntegerField(
help_text="Which order to show this image in the set of images.",
default=0)
objects = WorkImageQuerySet.as_manager()
class Meta:
ordering = ('order',)
verbose_name = "Image"
| {
"content_hash": "6e1c5e74c6fb89d4ad8554b2215b6958",
"timestamp": "",
"source": "github",
"line_count": 528,
"max_line_length": 139,
"avg_line_length": 34.75189393939394,
"alnum_prop": 0.6169818518720366,
"repo_name": "ic-labs/glamkit-collections",
"id": "6c1543e5200cc9838a64b4348f70584a3eb7e458",
"size": "18349",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "glamkit_collections/contrib/work_creator/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "561"
},
{
"name": "Python",
"bytes": "171835"
}
],
"symlink_target": ""
} |
"""The tests for the MQTT light platform.
Configuration for RGB Version with brightness:
light:
platform: mqtt
name: "Office Light RGB"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
brightness_state_topic: "office/rgb1/brightness/status"
brightness_command_topic: "office/rgb1/brightness/set"
rgb_state_topic: "office/rgb1/rgb/status"
rgb_command_topic: "office/rgb1/rgb/set"
qos: 0
payload_on: "on"
payload_off: "off"
Configuration for XY Version with brightness:
light:
platform: mqtt
name: "Office Light XY"
state_topic: "office/xy1/light/status"
command_topic: "office/xy1/light/switch"
brightness_state_topic: "office/xy1/brightness/status"
brightness_command_topic: "office/xy1/brightness/set"
xy_state_topic: "office/xy1/xy/status"
xy_command_topic: "office/xy1/xy/set"
qos: 0
payload_on: "on"
payload_off: "off"
config without RGB:
light:
platform: mqtt
name: "Office Light"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
brightness_state_topic: "office/rgb1/brightness/status"
brightness_command_topic: "office/rgb1/brightness/set"
qos: 0
payload_on: "on"
payload_off: "off"
config without RGB and brightness:
light:
platform: mqtt
name: "Office Light"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
qos: 0
payload_on: "on"
payload_off: "off"
config for RGB Version with brightness and scale:
light:
platform: mqtt
name: "Office Light RGB"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
brightness_state_topic: "office/rgb1/brightness/status"
brightness_command_topic: "office/rgb1/brightness/set"
brightness_scale: 99
rgb_state_topic: "office/rgb1/rgb/status"
rgb_command_topic: "office/rgb1/rgb/set"
rgb_scale: 99
qos: 0
payload_on: "on"
payload_off: "off"
config with brightness and color temp
light:
platform: mqtt
name: "Office Light Color Temp"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
brightness_state_topic: "office/rgb1/brightness/status"
brightness_command_topic: "office/rgb1/brightness/set"
brightness_scale: 99
color_temp_state_topic: "office/rgb1/color_temp/status"
color_temp_command_topic: "office/rgb1/color_temp/set"
qos: 0
payload_on: "on"
payload_off: "off"
config with brightness and effect
light:
platform: mqtt
name: "Office Light Color Temp"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
brightness_state_topic: "office/rgb1/brightness/status"
brightness_command_topic: "office/rgb1/brightness/set"
brightness_scale: 99
effect_state_topic: "office/rgb1/effect/status"
effect_command_topic: "office/rgb1/effect/set"
effect_list:
- rainbow
- colorloop
qos: 0
payload_on: "on"
payload_off: "off"
config for RGB Version with white value and scale:
light:
platform: mqtt
name: "Office Light RGB"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
white_value_state_topic: "office/rgb1/white_value/status"
white_value_command_topic: "office/rgb1/white_value/set"
white_value_scale: 99
rgb_state_topic: "office/rgb1/rgb/status"
rgb_command_topic: "office/rgb1/rgb/set"
rgb_scale: 99
qos: 0
payload_on: "on"
payload_off: "off"
config for RGB Version with RGB command template:
light:
platform: mqtt
name: "Office Light RGB"
state_topic: "office/rgb1/light/status"
command_topic: "office/rgb1/light/switch"
rgb_state_topic: "office/rgb1/rgb/status"
rgb_command_topic: "office/rgb1/rgb/set"
rgb_command_template: "{{ '#%02x%02x%02x' | format(red, green, blue)}}"
qos: 0
payload_on: "on"
payload_off: "off"
Configuration for HS Version with brightness:
light:
platform: mqtt
name: "Office Light HS"
state_topic: "office/hs1/light/status"
command_topic: "office/hs1/light/switch"
brightness_state_topic: "office/hs1/brightness/status"
brightness_command_topic: "office/hs1/brightness/set"
hs_state_topic: "office/hs1/hs/status"
hs_command_topic: "office/hs1/hs/set"
qos: 0
payload_on: "on"
payload_off: "off"
"""
import json
from unittest import mock
from unittest.mock import ANY, patch
from homeassistant.components import light, mqtt
from homeassistant.components.mqtt.discovery import async_start
from homeassistant.const import (
ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON, STATE_UNAVAILABLE)
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry, assert_setup_component, async_fire_mqtt_message,
async_mock_mqtt_component, mock_coro, mock_registry)
from tests.components.light import common
async def test_fail_setup_if_no_command_topic(hass, mqtt_mock):
"""Test if command fails with command topic."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
}
})
assert hass.states.get('light.test') is None
async def test_no_color_brightness_color_temp_hs_white_xy_if_no_topics(
hass, mqtt_mock):
"""Test if there is no color and brightness if no topic."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_light_rgb/status',
'command_topic': 'test_light_rgb/set',
}
})
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('rgb_color') is None
assert state.attributes.get('brightness') is None
assert state.attributes.get('color_temp') is None
assert state.attributes.get('hs_color') is None
assert state.attributes.get('white_value') is None
assert state.attributes.get('xy_color') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert state.attributes.get('rgb_color') is None
assert state.attributes.get('brightness') is None
assert state.attributes.get('color_temp') is None
assert state.attributes.get('hs_color') is None
assert state.attributes.get('white_value') is None
assert state.attributes.get('xy_color') is None
async def test_controlling_state_via_topic(hass, mqtt_mock):
"""Test the controlling of the state via topic."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_light_rgb/status',
'command_topic': 'test_light_rgb/set',
'brightness_state_topic': 'test_light_rgb/brightness/status',
'brightness_command_topic': 'test_light_rgb/brightness/set',
'rgb_state_topic': 'test_light_rgb/rgb/status',
'rgb_command_topic': 'test_light_rgb/rgb/set',
'color_temp_state_topic': 'test_light_rgb/color_temp/status',
'color_temp_command_topic': 'test_light_rgb/color_temp/set',
'effect_state_topic': 'test_light_rgb/effect/status',
'effect_command_topic': 'test_light_rgb/effect/set',
'hs_state_topic': 'test_light_rgb/hs/status',
'hs_command_topic': 'test_light_rgb/hs/set',
'white_value_state_topic': 'test_light_rgb/white_value/status',
'white_value_command_topic': 'test_light_rgb/white_value/set',
'xy_state_topic': 'test_light_rgb/xy/status',
'xy_command_topic': 'test_light_rgb/xy/set',
'qos': '0',
'payload_on': 1,
'payload_off': 0
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('rgb_color') is None
assert state.attributes.get('brightness') is None
assert state.attributes.get('color_temp') is None
assert state.attributes.get('effect') is None
assert state.attributes.get('hs_color') is None
assert state.attributes.get('white_value') is None
assert state.attributes.get('xy_color') is None
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, 'test_light_rgb/status', '1')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert (255, 255, 255) == state.attributes.get('rgb_color')
assert 255 == state.attributes.get('brightness')
assert 150 == state.attributes.get('color_temp')
assert 'none' == state.attributes.get('effect')
assert (0, 0) == state.attributes.get('hs_color')
assert 255 == state.attributes.get('white_value')
assert (0.323, 0.329) == state.attributes.get('xy_color')
async_fire_mqtt_message(hass, 'test_light_rgb/status', '0')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_OFF == state.state
async_fire_mqtt_message(hass, 'test_light_rgb/status', '1')
await hass.async_block_till_done()
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'test_light_rgb/brightness/status', '100')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert 100 == \
light_state.attributes['brightness']
async_fire_mqtt_message(hass, 'test_light_rgb/color_temp/status', '300')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert 300 == light_state.attributes['color_temp']
async_fire_mqtt_message(hass, 'test_light_rgb/effect/status', 'rainbow')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert 'rainbow' == light_state.attributes['effect']
async_fire_mqtt_message(hass, 'test_light_rgb/white_value/status',
'100')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert 100 == \
light_state.attributes['white_value']
async_fire_mqtt_message(hass, 'test_light_rgb/status', '1')
await hass.async_block_till_done()
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'test_light_rgb/rgb/status',
'125,125,125')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
assert (255, 255, 255) == \
light_state.attributes.get('rgb_color')
async_fire_mqtt_message(hass, 'test_light_rgb/hs/status',
'200,50')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
assert (200, 50) == \
light_state.attributes.get('hs_color')
async_fire_mqtt_message(hass, 'test_light_rgb/xy/status',
'0.675,0.322')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
assert (0.672, 0.324) == \
light_state.attributes.get('xy_color')
async def test_brightness_controlling_scale(hass, mqtt_mock):
"""Test the brightness controlling scale."""
with assert_setup_component(1, light.DOMAIN):
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_scale/status',
'command_topic': 'test_scale/set',
'brightness_state_topic': 'test_scale/brightness/status',
'brightness_command_topic': 'test_scale/brightness/set',
'brightness_scale': '99',
'qos': 0,
'payload_on': 'on',
'payload_off': 'off'
}
})
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('brightness') is None
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, 'test_scale/status', 'on')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 255 == state.attributes.get('brightness')
async_fire_mqtt_message(hass, 'test_scale/status', 'off')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_OFF == state.state
async_fire_mqtt_message(hass, 'test_scale/status', 'on')
await hass.async_block_till_done()
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'test_scale/brightness/status', '99')
await hass.async_block_till_done()
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
await hass.async_block_till_done()
assert 255 == \
light_state.attributes['brightness']
async def test_brightness_from_rgb_controlling_scale(hass, mqtt_mock):
"""Test the brightness controlling scale."""
with assert_setup_component(1, light.DOMAIN):
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_scale_rgb/status',
'command_topic': 'test_scale_rgb/set',
'rgb_state_topic': 'test_scale_rgb/rgb/status',
'rgb_command_topic': 'test_scale_rgb/rgb/set',
'qos': 0,
'payload_on': 'on',
'payload_off': 'off'
}
})
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('brightness') is None
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, 'test_scale_rgb/status', 'on')
async_fire_mqtt_message(hass, 'test_scale_rgb/rgb/status', '255,0,0')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert 255 == state.attributes.get('brightness')
async_fire_mqtt_message(hass, 'test_scale_rgb/rgb/status', '127,0,0')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert 127 == state.attributes.get('brightness')
async def test_white_value_controlling_scale(hass, mqtt_mock):
"""Test the white_value controlling scale."""
with assert_setup_component(1, light.DOMAIN):
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_scale/status',
'command_topic': 'test_scale/set',
'white_value_state_topic': 'test_scale/white_value/status',
'white_value_command_topic': 'test_scale/white_value/set',
'white_value_scale': '99',
'qos': 0,
'payload_on': 'on',
'payload_off': 'off'
}
})
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('white_value') is None
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, 'test_scale/status', 'on')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 255 == state.attributes.get('white_value')
async_fire_mqtt_message(hass, 'test_scale/status', 'off')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_OFF == state.state
async_fire_mqtt_message(hass, 'test_scale/status', 'on')
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'test_scale/white_value/status', '99')
await hass.async_block_till_done()
light_state = hass.states.get('light.test')
await hass.async_block_till_done()
assert 255 == \
light_state.attributes['white_value']
async def test_controlling_state_via_topic_with_templates(hass, mqtt_mock):
"""Test the setting of the state with a template."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test_light_rgb/status',
'command_topic': 'test_light_rgb/set',
'brightness_command_topic': 'test_light_rgb/brightness/set',
'rgb_command_topic': 'test_light_rgb/rgb/set',
'color_temp_command_topic': 'test_light_rgb/color_temp/set',
'effect_command_topic': 'test_light_rgb/effect/set',
'hs_command_topic': 'test_light_rgb/hs/set',
'white_value_command_topic': 'test_light_rgb/white_value/set',
'xy_command_topic': 'test_light_rgb/xy/set',
'brightness_state_topic': 'test_light_rgb/brightness/status',
'color_temp_state_topic': 'test_light_rgb/color_temp/status',
'effect_state_topic': 'test_light_rgb/effect/status',
'hs_state_topic': 'test_light_rgb/hs/status',
'rgb_state_topic': 'test_light_rgb/rgb/status',
'white_value_state_topic': 'test_light_rgb/white_value/status',
'xy_state_topic': 'test_light_rgb/xy/status',
'state_value_template': '{{ value_json.hello }}',
'brightness_value_template': '{{ value_json.hello }}',
'color_temp_value_template': '{{ value_json.hello }}',
'effect_value_template': '{{ value_json.hello }}',
'hs_value_template': '{{ value_json.hello | join(",") }}',
'rgb_value_template': '{{ value_json.hello | join(",") }}',
'white_value_template': '{{ value_json.hello }}',
'xy_value_template': '{{ value_json.hello | join(",") }}',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('brightness') is None
assert state.attributes.get('rgb_color') is None
async_fire_mqtt_message(hass, 'test_light_rgb/rgb/status',
'{"hello": [1, 2, 3]}')
async_fire_mqtt_message(hass, 'test_light_rgb/status',
'{"hello": "ON"}')
async_fire_mqtt_message(hass, 'test_light_rgb/brightness/status',
'{"hello": "50"}')
async_fire_mqtt_message(hass, 'test_light_rgb/color_temp/status',
'{"hello": "300"}')
async_fire_mqtt_message(hass, 'test_light_rgb/effect/status',
'{"hello": "rainbow"}')
async_fire_mqtt_message(hass, 'test_light_rgb/white_value/status',
'{"hello": "75"}')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 50 == state.attributes.get('brightness')
assert (84, 169, 255) == state.attributes.get('rgb_color')
assert 300 == state.attributes.get('color_temp')
assert 'rainbow' == state.attributes.get('effect')
assert 75 == state.attributes.get('white_value')
async_fire_mqtt_message(hass, 'test_light_rgb/hs/status',
'{"hello": [100,50]}')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert (100, 50) == state.attributes.get('hs_color')
async_fire_mqtt_message(hass, 'test_light_rgb/xy/status',
'{"hello": [0.123,0.123]}')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert (0.14, 0.131) == state.attributes.get('xy_color')
async def test_sending_mqtt_commands_and_optimistic(hass, mqtt_mock):
"""Test the sending of command in optimistic mode."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light_rgb/set',
'brightness_command_topic': 'test_light_rgb/brightness/set',
'rgb_command_topic': 'test_light_rgb/rgb/set',
'color_temp_command_topic': 'test_light_rgb/color_temp/set',
'effect_command_topic': 'test_light_rgb/effect/set',
'hs_command_topic': 'test_light_rgb/hs/set',
'white_value_command_topic': 'test_light_rgb/white_value/set',
'xy_command_topic': 'test_light_rgb/xy/set',
'effect_list': ['colorloop', 'random'],
'qos': 2,
'payload_on': 'on',
'payload_off': 'off'
}}
fake_state = ha.State('light.test', 'on', {'brightness': 95,
'hs_color': [100, 100],
'effect': 'random',
'color_temp': 100,
'white_value': 50})
with patch('homeassistant.helpers.restore_state.RestoreEntity'
'.async_get_last_state',
return_value=mock_coro(fake_state)):
with assert_setup_component(1, light.DOMAIN):
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 95 == state.attributes.get('brightness')
assert (100, 100) == state.attributes.get('hs_color')
assert 'random' == state.attributes.get('effect')
assert 100 == state.attributes.get('color_temp')
assert 50 == state.attributes.get('white_value')
assert state.attributes.get(ATTR_ASSUMED_STATE)
common.async_turn_on(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light_rgb/set', 'on', 2, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get('light.test')
assert STATE_ON == state.state
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light_rgb/set', 'off', 2, False)
mqtt_mock.async_publish.reset_mock()
state = hass.states.get('light.test')
assert STATE_OFF == state.state
mqtt_mock.reset_mock()
common.async_turn_on(hass, 'light.test',
brightness=50, xy_color=[0.123, 0.123])
common.async_turn_on(hass, 'light.test',
brightness=50, hs_color=[359, 78])
common.async_turn_on(hass, 'light.test', rgb_color=[255, 128, 0],
white_value=80)
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light_rgb/set', 'on', 2, False),
mock.call('test_light_rgb/rgb/set', '255,128,0', 2, False),
mock.call('test_light_rgb/brightness/set', 50, 2, False),
mock.call('test_light_rgb/hs/set', '359.0,78.0', 2, False),
mock.call('test_light_rgb/white_value/set', 80, 2, False),
mock.call('test_light_rgb/xy/set', '0.14,0.131', 2, False),
], any_order=True)
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert (255, 128, 0) == state.attributes['rgb_color']
assert 50 == state.attributes['brightness']
assert (30.118, 100) == state.attributes['hs_color']
assert 80 == state.attributes['white_value']
assert (0.611, 0.375) == state.attributes['xy_color']
async def test_sending_mqtt_rgb_command_with_template(hass, mqtt_mock):
"""Test the sending of RGB command with template."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light_rgb/set',
'rgb_command_topic': 'test_light_rgb/rgb/set',
'rgb_command_template': '{{ "#%02x%02x%02x" | '
'format(red, green, blue)}}',
'payload_on': 'on',
'payload_off': 'off',
'qos': 0
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
common.async_turn_on(hass, 'light.test', rgb_color=[255, 128, 64])
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light_rgb/set', 'on', 0, False),
mock.call('test_light_rgb/rgb/set', '#ff803f', 0, False),
], any_order=True)
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert (255, 128, 63) == state.attributes['rgb_color']
async def test_sending_mqtt_color_temp_command_with_template(hass, mqtt_mock):
"""Test the sending of Color Temp command with template."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light_color_temp/set',
'color_temp_command_topic': 'test_light_color_temp/color_temp/set',
'color_temp_command_template': '{{ (1000 / value) | round(0) }}',
'payload_on': 'on',
'payload_off': 'off',
'qos': 0
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
common.async_turn_on(hass, 'light.test', color_temp=100)
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light_color_temp/set', 'on', 0, False),
mock.call('test_light_color_temp/color_temp/set', '10', 0, False),
], any_order=True)
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 100 == state.attributes['color_temp']
async def test_show_brightness_if_only_command_topic(hass, mqtt_mock):
"""Test the brightness if only a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'brightness_command_topic': 'test_light_rgb/brightness/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('brightness') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 255 == state.attributes.get('brightness')
async def test_show_color_temp_only_if_command_topic(hass, mqtt_mock):
"""Test the color temp only if a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'color_temp_command_topic': 'test_light_rgb/brightness/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status'
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('color_temp') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 150 == state.attributes.get('color_temp')
async def test_show_effect_only_if_command_topic(hass, mqtt_mock):
"""Test the color temp only if a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'effect_command_topic': 'test_light_rgb/effect/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status'
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('effect') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 'none' == state.attributes.get('effect')
async def test_show_hs_if_only_command_topic(hass, mqtt_mock):
"""Test the hs if only a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'hs_command_topic': 'test_light_rgb/hs/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('hs_color') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert (0, 0) == state.attributes.get('hs_color')
async def test_show_white_value_if_only_command_topic(hass, mqtt_mock):
"""Test the white_value if only a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'white_value_command_topic': 'test_light_rgb/white_value/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('white_value') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert 255 == state.attributes.get('white_value')
async def test_show_xy_if_only_command_topic(hass, mqtt_mock):
"""Test the xy if only a command topic is present."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'xy_command_topic': 'test_light_rgb/xy/set',
'command_topic': 'test_light_rgb/set',
'state_topic': 'test_light_rgb/status',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
assert state.attributes.get('xy_color') is None
async_fire_mqtt_message(hass, 'test_light_rgb/status', 'ON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_ON == state.state
assert (0.323, 0.329) == state.attributes.get('xy_color')
async def test_on_command_first(hass, mqtt_mock):
"""Test on command being sent before brightness."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'brightness_command_topic': 'test_light/bright',
'on_command_type': 'first',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
common.async_turn_on(hass, 'light.test', brightness=50)
await hass.async_block_till_done()
# Should get the following MQTT messages.
# test_light/set: 'ON'
# test_light/bright: 50
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light/set', 'ON', 0, False),
mock.call('test_light/bright', 50, 0, False),
], any_order=True)
mqtt_mock.async_publish.reset_mock()
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light/set', 'OFF', 0, False)
async def test_on_command_last(hass, mqtt_mock):
"""Test on command being sent after brightness."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'brightness_command_topic': 'test_light/bright',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
common.async_turn_on(hass, 'light.test', brightness=50)
await hass.async_block_till_done()
# Should get the following MQTT messages.
# test_light/bright: 50
# test_light/set: 'ON'
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light/bright', 50, 0, False),
mock.call('test_light/set', 'ON', 0, False),
], any_order=True)
mqtt_mock.async_publish.reset_mock()
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light/set', 'OFF', 0, False)
async def test_on_command_brightness(hass, mqtt_mock):
"""Test on command being sent as only brightness."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'brightness_command_topic': 'test_light/bright',
'rgb_command_topic': "test_light/rgb",
'on_command_type': 'brightness',
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
# Turn on w/ no brightness - should set to max
common.async_turn_on(hass, 'light.test')
await hass.async_block_till_done()
# Should get the following MQTT messages.
# test_light/bright: 255
mqtt_mock.async_publish.assert_called_once_with(
'test_light/bright', 255, 0, False)
mqtt_mock.async_publish.reset_mock()
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light/set', 'OFF', 0, False)
mqtt_mock.async_publish.reset_mock()
# Turn on w/ brightness
common.async_turn_on(hass, 'light.test', brightness=50)
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light/bright', 50, 0, False)
mqtt_mock.async_publish.reset_mock()
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
# Turn on w/ just a color to insure brightness gets
# added and sent.
common.async_turn_on(hass, 'light.test', rgb_color=[255, 128, 0])
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light/rgb', '255,128,0', 0, False),
mock.call('test_light/bright', 50, 0, False)
], any_order=True)
async def test_on_command_rgb(hass, mqtt_mock):
"""Test on command in RGB brightness mode."""
config = {light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'rgb_command_topic': "test_light/rgb",
}}
assert await async_setup_component(hass, light.DOMAIN, config)
state = hass.states.get('light.test')
assert STATE_OFF == state.state
common.async_turn_on(hass, 'light.test', brightness=127)
await hass.async_block_till_done()
# Should get the following MQTT messages.
# test_light/rgb: '127,127,127'
# test_light/set: 'ON'
mqtt_mock.async_publish.assert_has_calls([
mock.call('test_light/rgb', '127,127,127', 0, False),
mock.call('test_light/set', 'ON', 0, False),
], any_order=True)
mqtt_mock.async_publish.reset_mock()
common.async_turn_off(hass, 'light.test')
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(
'test_light/set', 'OFF', 0, False)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'brightness_command_topic': 'test_light/bright',
'rgb_command_topic': "test_light/rgb",
'availability_topic': 'availability-topic'
}
})
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE == state.state
async_fire_mqtt_message(hass, 'availability-topic', 'online')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE != state.state
async_fire_mqtt_message(hass, 'availability-topic', 'offline')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE == state.state
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test_light/set',
'brightness_command_topic': 'test_light/bright',
'rgb_command_topic': "test_light/rgb",
'availability_topic': 'availability-topic',
'payload_available': 'good',
'payload_not_available': 'nogood'
}
})
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE == state.state
async_fire_mqtt_message(hass, 'availability-topic', 'good')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE != state.state
async_fire_mqtt_message(hass, 'availability-topic', 'nogood')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert STATE_UNAVAILABLE == state.state
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', '{ "val": "100" }')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert '100' == state.attributes.get('val')
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', '[ "list", "of", "things"]')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert state.attributes.get('val') is None
assert 'JSON result was not a dictionary' in caplog.text
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'command_topic': 'test-topic',
'json_attributes_topic': 'attr-topic'
}
})
async_fire_mqtt_message(hass, 'attr-topic', 'This is not JSON')
await hass.async_block_till_done()
state = hass.states.get('light.test')
assert state.attributes.get('val') is None
assert 'Erroneous JSON: This is not JSON' in caplog.text
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic1" }'
)
data2 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic2" }'
)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, 'attr-topic1', '{ "val": "100" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert '100' == state.attributes.get('val')
# Change json_attributes_topic
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, 'attr-topic1', '{ "val": "50" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert '100' == state.attributes.get('val')
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, 'attr-topic2', '{ "val": "75" }')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert '75' == state.attributes.get('val')
async def test_unique_id(hass):
"""Test unique id option only creates one light per unique_id."""
await async_mock_mqtt_component(hass)
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: [{
'platform': 'mqtt',
'name': 'Test 1',
'state_topic': 'test-topic',
'command_topic': 'test_topic',
'unique_id': 'TOTALLY_UNIQUE'
}, {
'platform': 'mqtt',
'name': 'Test 2',
'state_topic': 'test-topic',
'command_topic': 'test_topic',
'unique_id': 'TOTALLY_UNIQUE'
}]
})
async_fire_mqtt_message(hass, 'test-topic', 'payload')
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(light.DOMAIN)) == 1
async def test_discovery_removal_light(hass, mqtt_mock, caplog):
"""Test removal of discovered light."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data)
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is not None
assert state.name == 'Beer'
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
'')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is None
async def test_discovery_deprecated(hass, mqtt_mock, caplog):
"""Test discovery of mqtt light with deprecated platform option."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {'mqtt': {}}, entry)
data = (
'{ "name": "Beer",'
' "platform": "mqtt",'
' "command_topic": "test_topic"}'
)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data)
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is not None
assert state.name == 'Beer'
async def test_discovery_update_light(hass, mqtt_mock, caplog):
"""Test update of discovered light."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data1)
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is not None
assert state.name == 'Beer'
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is not None
assert state.name == 'Milk'
state = hass.states.get('light.milk')
assert state is None
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, 'homeassistant', {}, entry)
data1 = (
'{ "name": "Beer" }'
)
data2 = (
'{ "name": "Milk",'
' "state_topic": "test_topic",'
' "command_topic": "test_topic" }'
)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data1)
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is None
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data2)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.milk')
assert state is not None
assert state.name == 'Milk'
state = hass.states.get('light.beer')
assert state is None
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT light device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, 'homeassistant', {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps({
'platform': 'mqtt',
'name': 'Test 1',
'state_topic': 'test-topic',
'command_topic': 'test-topic',
'device': {
'identifiers': ['helloworld'],
'connections': [
["mac", "02:5b:26:a8:dc:12"],
],
'manufacturer': 'Whatever',
'name': 'Beer',
'model': 'Glass',
'sw_version': '0.1-beta',
},
'unique_id': 'veryunique'
})
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.identifiers == {('mqtt', 'helloworld')}
assert device.connections == {('mac', "02:5b:26:a8:dc:12")}
assert device.manufacturer == 'Whatever'
assert device.name == 'Beer'
assert device.model == 'Glass'
assert device.sw_version == '0.1-beta'
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, 'homeassistant', {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
config = {
'platform': 'mqtt',
'name': 'Test 1',
'state_topic': 'test-topic',
'command_topic': 'test-command-topic',
'device': {
'identifiers': ['helloworld'],
'connections': [
["mac", "02:5b:26:a8:dc:12"],
],
'manufacturer': 'Whatever',
'name': 'Beer',
'model': 'Glass',
'sw_version': '0.1-beta',
},
'unique_id': 'veryunique'
}
data = json.dumps(config)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.name == 'Beer'
config['device']['name'] = 'Milk'
data = json.dumps(config)
async_fire_mqtt_message(hass, 'homeassistant/light/bla/config',
data)
await hass.async_block_till_done()
await hass.async_block_till_done()
device = registry.async_get_device({('mqtt', 'helloworld')}, set())
assert device is not None
assert device.name == 'Milk'
async def test_entity_id_update(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
registry = mock_registry(hass, {})
mock_mqtt = await async_mock_mqtt_component(hass)
assert await async_setup_component(hass, light.DOMAIN, {
light.DOMAIN: [{
'platform': 'mqtt',
'name': 'beer',
'state_topic': 'test-topic',
'command_topic': 'command-topic',
'availability_topic': 'avty-topic',
'unique_id': 'TOTALLY_UNIQUE'
}]
})
state = hass.states.get('light.beer')
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call('test-topic', ANY, 0, 'utf-8')
mock_mqtt.async_subscribe.assert_any_call('avty-topic', ANY, 0, 'utf-8')
mock_mqtt.async_subscribe.reset_mock()
registry.async_update_entity('light.beer', new_entity_id='light.milk')
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get('light.beer')
assert state is None
state = hass.states.get('light.milk')
assert state is not None
assert mock_mqtt.async_subscribe.call_count == 2
mock_mqtt.async_subscribe.assert_any_call('test-topic', ANY, 0, 'utf-8')
mock_mqtt.async_subscribe.assert_any_call('avty-topic', ANY, 0, 'utf-8')
| {
"content_hash": "1dc30d6d52e0e3384bfc4fec18ec45ce",
"timestamp": "",
"source": "github",
"line_count": 1429,
"max_line_length": 78,
"avg_line_length": 35.32680195941218,
"alnum_prop": 0.623568796798859,
"repo_name": "nugget/home-assistant",
"id": "cfb0d75d1c721009156f7da668e42b5eb2993d35",
"size": "50482",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "tests/components/mqtt/test_light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "826"
},
{
"name": "Python",
"bytes": "14492390"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17526"
}
],
"symlink_target": ""
} |
import os
from py2neo import Graph
import config
def create_p2n_driver():
graph = Graph("http://{}:{}@{}:7474/db/data/".format(
os.environ.get('neo4juser') or config.get('neo4j', 'user'),
os.environ.get('neo4jpass') or config.get('neo4j', 'pass'),
os.environ.get('neo4jhost') or config.get('neo4j', 'host')))
return graph
| {
"content_hash": "b2893fb3fe2960a87223d02541dd6797",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 68,
"avg_line_length": 29.75,
"alnum_prop": 0.6246498599439776,
"repo_name": "nikitamarchenko/open-kilda",
"id": "0f7d793311fee8878b5078a03b66c7aeeecfa9fb",
"size": "961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/topology-engine/queue-engine/topologylistener/db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "43545"
},
{
"name": "Gherkin",
"bytes": "57157"
},
{
"name": "Groovy",
"bytes": "243"
},
{
"name": "HTML",
"bytes": "61822"
},
{
"name": "Java",
"bytes": "2674140"
},
{
"name": "JavaScript",
"bytes": "105371"
},
{
"name": "Makefile",
"bytes": "22000"
},
{
"name": "Python",
"bytes": "359953"
},
{
"name": "Ruby",
"bytes": "1185"
},
{
"name": "Shell",
"bytes": "77403"
}
],
"symlink_target": ""
} |
"""
.. module:: livefyre
:platform: Unix, Windows
:synopsis: Defines a REST API wrapper for Livefyre's (livefyre.com) v3
commenting system.
.. moduleauthor:: Jason Novinger <jnovinger@gmail.com>
"""
import hashlib
import json
import os
from urllib import urlencode
import urlparse
import requests
from .utils import create_auth_token, jid, jwt_encode, validate_url
# grab info from environment if needed
DEFAULT_LIVEFYRE_NETWORK = os.environ.get('LIVEFYRE_NETWORK')
DEFAULT_LIVEFYRE_NETWORK_SECRET = os.environ.get('LIVEFYRE_NETWORK_SECRET')
DEFAULT_LIVEFYRE_SITE_ID = os.environ.get('LIVEFYRE_SITE_ID')
DEFAULT_LIVEFYRE_SITE_SECRET = os.environ.get('LIVEFYRE_SITE_SECRET')
# define API endpoints
LIVEFYRE_API_BASE = 'http://quill.{network}.fyre.co/api/v3.0/site/{site_id}'
LIVEFYRE_API_USER_BASE = 'http://{network}.fyre.co/api/v3_0'
LIVEFYRE_API_BARE = 'http://{network}.fyre.co/'
# suppoert HTTP methods
HTTP_METHODS = ['GET', 'POST']
class Livefyre(object):
"""A Livefyre v3 API client."""
ENDPOINTS = {
'COLLECTIONS': {
'_base': 'collection',
'create': 'create',
},
'USER': {
'_base': 'user',
'ping_to_pull': '{user_id}/refresh',
},
}
def __init__(self,
network=None,
network_secret=None,
site_id=None,
site_secret=None,
system_token=None):
"""Initializes a Livefyre API wrapper instance.
:param str network:
Required, the Livefyre "network_" assigned to you.
:param str network_secret:
Required, super secret API key associated with your network.
:param str site_id:
Optional, but required for some operations like :meth:`create_collection`.
:param str site_secret:
Optional, but required for any operations that also require
:param:`site_id`.
:param str system_token:
Optional, provides the ability to supply a pre-generated
`system@{network}` JWT auth token.
:returns:
None
.. _network: https://github.com/Livefyre/livefyre-docs/wiki/Livefyre-concepts#data-model
"""
super(Livefyre, self).__init__()
self.network = network or DEFAULT_LIVEFYRE_NETWORK
if ".fyre.co" in self.network:
self.network = self.network.replace(".fyre.co", "")
self.network_secret = network_secret or DEFAULT_LIVEFYRE_NETWORK_SECRET
self.site_id = site_id or DEFAULT_LIVEFYRE_SITE_ID
self.site_secret = site_secret or DEFAULT_LIVEFYRE_SITE_SECRET
self.base_api = LIVEFYRE_API_BASE.format(network=self.network,
site_id=self.site_id)
self.bare_api = LIVEFYRE_API_BARE.format(network=self.network)
self.user_api = LIVEFYRE_API_USER_BASE.format(network=self.network)
self._system_token = system_token
self.session = requests.session()
def _get_system_token(self):
"""Returns the token for the `system@{network}` user."""
if self._system_token:
return self._system_token
# if one was not passed in, create one for the "system" user
system_token = self._create_auth_token(user_id='system')
return system_token
token = property(_get_system_token)
def _make_jid(self, user_id):
return jid(user_id, self.network)
def _create_auth_token(self, user_id='system', display_name="", expires=None):
""" Generate a JSON Web Token for a user id."""
return create_auth_token(
user_id=user_id,
network=self.network,
network_secret=self.network_secret,
display_name=display_name,
)
def list_sites(self):
params = {'actor_token': self.token}
params = urlencode(params)
response = self.send_data(
endpoint='/sites?{}'.format(params),
payload={},
api='http://quill.{}.fyre.co/'.format(self.network),
)
return response.content, response
def create_collection(self, title, url, article_id, stream_type, tags):
collection = Collection(
title, url, article_id, stream_type, tags, self.site_secret)
response = self.send_data(
endpoint='/collection/create',
payload=collection.payload()
)
return collection, response
def ping_to_pull(self, user_id, token=None):
"""Calls to the Livefyre Ping To Pull API
Tells Livefyre that a user profile has changed and that they should
hit our pre-registered API point to grab new information about the
profile.
If :param:`user_id` is not a string, it is assumed to be an object with
a :var:`livefyre_id` attribute. See:
[django-coversate](https://github.com/dailydot/django-conversate).
If no token is passed, then one will be generated based on exisiting
known credentials.
"""
if not isinstance(user_id, basestring):
user_id = user_id.livefyre_id
# use a token if presented, else one presented at object instantiation
# else one generated from the user_id
token = (token or
self.token or
self._create_auth_token())
endpoint = '/user/{user_id}/refresh'.format(user_id=user_id)
payload = {'lftoken': token}
return self.send_data(endpoint, payload, api=self.user_api)
def register_profile_pull_interface(self, url, token=None):
assert 'http' in url, "The Pull URL must be a valide HTTP(s) URL."
params = {
'actor_token': token or self.token,
'pull_profile_url': url,
}
endpoint = '?{}'.format(urlencode(params))
return self.send_data(endpoint, payload={}, api=self.bare_api)
def send_data(self, endpoint, payload, method="POST", api=None):
if api is None:
api = self.base_api
url = '{}{}'.format(api, endpoint)
assert method in HTTP_METHODS, "Sorry, we only support {} as HTTP methods".format(HTTP_METHODS)
method_ = getattr(self.session, method.lower())
return method_(url, data=payload)
class Collection(object):
"""Represents a Livefyre StreamHub Collection"""
TYPES = ['livecomments', 'liveblog', 'livechat']
def __init__(self,
title,
url,
article_id,
stream_type='livecomments',
tags=None,
site_secret=None):
assert title, 'title may not be empty.'
assert article_id, 'article_id may not be empty'
_url = urlparse.urlparse(url)
assert 'http' in _url.scheme, 'The URL must be a fully qualified url whose scheme is either "http" or "https".'
assert stream_type in self.TYPES, 'stream_type must be one of {}'.format(self.TYPES)
_collection = {
'title': title if len(title) < 256 else title[:255],
'url': url,
'articleId': article_id if len(article_id) < 256 else article_id[:255],
'stream_type': stream_type,
'tags': self._tagify(tags),
}
self.collection = _collection
self.site_secret = site_secret
def _tagify(self, tags):
if tags:
return tags.split(',')
return []
def meta(self):
return jwt_encode(self.collection, self.site_secret)
def checksum(self):
hash_ = hashlib.md5()
hash_.update(self.meta())
return hash_.hexdigest()
def payload(self):
payload_ = json.dumps({
'collectionMeta': self.meta(),
'type': self.collection['stream_type'],
'checksum': self.checksum(),
})
return payload_
| {
"content_hash": "6086a4702414264ec18dbafc107f75dd",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 119,
"avg_line_length": 32.48347107438016,
"alnum_prop": 0.5997964635542552,
"repo_name": "jnovinger/python-livefyre",
"id": "b23ff032a1aa90a3f8166e6b63b5363dae783083",
"size": "7886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "livefyre/livefyre.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "21588"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
} |
import os
import pickle
import pandas as pd
import numpy as np
import xgboost as xgb
import lightgbm as lgbm
from utils import *
def xgboost(XY_train, XY_validate, test_df, features, XY_all=None, restore=False):
param = {
'objective': 'multi:softprob',
'eta': 0.1,
'min_child_weight': 10,
'max_depth': 8,
'silent': 1,
# 'nthread': 16,
'eval_metric': 'mlogloss',
'colsample_bytree': 0.8,
'colsample_bylevel': 0.9,
'num_class': len(products),
}
if not restore:
X_train = XY_train.as_matrix(columns=features)
Y_train = XY_train.as_matrix(columns=["y"])
W_train = XY_train.as_matrix(columns=["weight"])
train = xgb.DMatrix(X_train, label=Y_train, feature_names=features, weight=W_train)
X_validate = XY_validate.as_matrix(columns=features)
Y_validate = XY_validate.as_matrix(columns=["y"])
W_validate = XY_validate.as_matrix(columns=["weight"])
validate = xgb.DMatrix(X_validate, label=Y_validate, feature_names=features, weight=W_validate)
with Timer("train"):
print(param)
evallist = [(train,'train'), (validate,'eval')]
model = xgb.train(param, train, 1000, evals=evallist, early_stopping_rounds=20)
pickle.dump(model, open("next_multi.pickle", "wb"))
else:
with Timer("restore model"):
model = pickle.load(open("next_multi.pickle", "rb"))
best_ntree_limit = model.best_ntree_limit
if XY_all is not None:
X_all = XY_all.as_matrix(columns=features)
Y_all = XY_all.as_matrix(columns=["y"])
W_all = XY_all.as_matrix(columns=["weight"])
all_data = xgb.DMatrix(X_all, label=Y_all, feature_names=features, weight=W_all)
evallist = [(all_data,'all_data')]
best_ntree_limit = int(best_ntree_limit * (len(XY_train) + len(XY_validate)) / len(XY_train))
model = xgb.train(param, all_data, best_ntree_limit, evals=evallist)
print("Feature importance:")
for kv in sorted([(k,v) for k,v in model.get_fscore().items()], key=lambda kv: kv[1], reverse=True):
print(kv)
X_test = test_df.as_matrix(columns=features)
test = xgb.DMatrix(X_test, feature_names=features)
return model.predict(test, ntree_limit=best_ntree_limit)
def lightgbm(XY_train, XY_validate, test_df, features, XY_all=None, restore=False):
train = lgbm.Dataset(XY_train[list(features)], label=XY_train["y"], weight=XY_train["weight"], feature_name=features)
validate = lgbm.Dataset(XY_validate[list(features)], label=XY_validate["y"], weight=XY_validate["weight"], feature_name=features, reference=train)
params = {
'task' : 'train',
'boosting_type' : 'gbdt',
'objective' : 'multiclass',
'num_class': 24,
'metric' : {'multi_logloss'},
'is_training_metric': True,
'max_bin': 255,
'num_leaves' : 64,
'learning_rate' : 0.1,
'feature_fraction' : 0.8,
'min_data_in_leaf': 10,
'min_sum_hessian_in_leaf': 5,
# 'num_threads': 16,
}
print(params)
if not restore:
with Timer("train lightgbm_lib"):
model = lgbm.train(params, train, num_boost_round=1000, valid_sets=validate, early_stopping_rounds=20)
best_iteration = model.best_iteration
model.save_model("tmp/lgbm.model.txt")
pickle.dump(best_iteration, open("tmp/lgbm.model.meta", "wb"))
else:
with Timer("restore lightgbm_lib model"):
model = lgbm.Booster(model_file="tmp/lgbm.model.txt")
best_iteration = pickle.load(open("tmp/lgbm.model.meta", "rb"))
if XY_all is not None:
best_iteration = int(best_iteration * len(XY_all) / len(XY_train))
all_train = lgbm.Dataset(XY_all[list(features)], label=XY_all["y"], weight=XY_all["weight"], feature_name=features)
with Timer("retrain lightgbm_lib with all data"):
model = lgbm.train(params, all_train, num_boost_round=best_iteration)
model.save_model("tmp/lgbm.all.model.txt")
print("Feature importance by split:")
for kv in sorted([(k,v) for k,v in zip(features, model.feature_importance("split"))], key=lambda kv: kv[1], reverse=True):
print(kv)
print("Feature importance by gain:")
for kv in sorted([(k,v) for k,v in zip(features, model.feature_importance("gain"))], key=lambda kv: kv[1], reverse=True):
print(kv)
return model.predict(test_df[list(features)], num_iteration=best_iteration)
| {
"content_hash": "d162ed00f03f324be491006650f719b6",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 150,
"avg_line_length": 40.21052631578947,
"alnum_prop": 0.6154013961605584,
"repo_name": "yaxinus/santander-product-recommendation-8th-place",
"id": "8edef46081b10f949747495b97d00954c62053a9",
"size": "4584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "engines.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22344"
},
{
"name": "Shell",
"bytes": "43"
}
],
"symlink_target": ""
} |
"""Creates grid social games from ASCII art diagrams."""
from __future__ import print_function
import collections
from gym import spaces
import numpy as np
# Careful with convention 'up' is displayed as -1 on the grid plot.
MOVING_ACTIONS = {
'stand': np.array([0, 0]),
'up': np.array([0, -1]),
'down': np.array([0, 1]),
'left': np.array([-1, 0]),
'right': np.array([1, 0])
}
class Element(object):
"""An element can be either an item object or a player.
It has a color.
It can be visible or not, pushable or not, passable or not.
In future (by adding physics), it will have a speed
and will be bouncing or not.
"""
def __init__(self, color=(254, 254, 254), visible=True, pushable=False,
passable=False):
self.color = color
self.visible = visible
self.pushable = pushable
self.passable = passable
class Item(Element):
"""An item "on the floor" that can be collected or pushed by players."""
def __init__(self, color=(254, 254, 254), visible=True, pushable=True,
passable=True, force_collect=True):
super(Item, self).__init__(color=color,
visible=visible,
pushable=pushable,
passable=passable)
# If force_collect is True,
# the item is automatically collected when the agent runs into it:
self.force_collect = force_collect
class Player(Element):
"""A player agent."""
def __init__(self, color=(254, 254, 254), visible=True, pushable=False,
passable=False):
super(Player, self).__init__(color=color,
visible=visible,
pushable=pushable,
passable=passable)
class Game(object):
"""Generates a Gym-based multi-player grid world markov game.
Attributes:
ascii_art: a list of strings, representing the map of the game. Ex:
art = ['#####',
'# #',
'#a=A#',
'#####',]
This represents a room surrounded by absolute walls ('#')
containing one item ('a'), one player ('A') and one player
wall ('=') that only stops players (items can pass).
items: items is a dictionary mapping items to their names (lowercase)
in the ascii-art ex, we would have:
items = {Item():'a'}.
players: a dictionary mapping players to their names (uppercase) in the
ascii-art: players = {Player():'A'}.
tabular: a boolean that specifies if observations are images (False)
or integers (True).
max_steps: an integer that represents the maximal number of steps
before the game ends.
actions: dictionary mapping integers (representing actions)
to strings (textual descriptions of actions)
last_events: list of strings describing events. Events are textual
descriptions of what occurs in the game.
The default events that can appear are:
- 'A_moves' (when player A takes a moving action (1,2,3 or 4))
- 'A_goes_to_the_wall' (player A is blocked by a wall)
- 'A_is_blocked_by_X' (when player A is blocked by another player or item)
- 'A_lost_the_drawn' (when several players try to reach the same cell,
one player will be picked randomly)
- 'A_collects_x' (when player A colletcs an item)
rewards: dictionary mapping events (strings) to rewards (floats)
terminaisons: list of events (strings) that causes the end of the game.
effects: dictionary mapping events (strings) with functions that
modifies the game's attributes.
steps: counts the number of steps done during an episode.
done: boolean specifying if the game has ended.
height: vertical dimensions of the game map.
width: horizontal dimensions of the game map.
players_cells: dictionary mapping players names to their
(x,y) positions in the game.
players_items: dictionary mapping players names to dictionaries mapping
item names to integers, representing the quantity
of each item collected by each player.
items_cells: dictionary mapping items names to their
(x,y) positions in the game.
player_walls: dictionary mapping player walls to their (x,y) positions.
absolute_walls: dictionary mapping absolute walls
to their (x,y) positions.
content: 2d table of dictionaries mapping elements
(player or items names) to integers (their quantities)
at each (x,y) position.
players_order: sorted list of players names,
used to attribute indexes (intergers) to players.
items_order: sorted list of items names,
used to attribute indexes (interger) to items.
players_idx: dictionary mapping players names to players indexes (int).
items_idx: dictionary mapping items names to their indexes (int).
num_players: number of players.
num_items: number of items.
num_actions: number of actions for each player.
num_states: number of states (upper bound of the number
of combinations of players positions).
action_space: gym-like action space (a MultiDiscrete space).
"""
def __init__(self, ascii_art, items, players, tabular=False, max_steps=100):
"""Init a game and read the ascii-art map."""
# Arguments:
self.ascii_art = ascii_art
self.items = items
self.players = players
self.tabular = tabular
self.max_steps = max_steps
# Default:
self.actions = {0: 'stand', 1: 'up', 2: 'down', 3: 'left', 4: 'right'}
self.rewards = {}
self.terminaisons = []
self.effects = {}
self.steps = 0
self.done = False
self.last_events = []
# To be determined from the ascii-art:
self.height = None
self.width = None
self.players_cells = None
self.players_items = None
self.items_cells = None
self.player_walls = None
self.absolute_walls = None
self.content = None
# Initialize attributes from the ascii-art game map:
self.read_ascii_art()
# Define ordering indexes for players and items:
self.players_order = sorted(self.players.keys())
self.players_idx = {
player: i for i, player in enumerate(self.players_order)}
self.items_order = sorted(self.items.keys())
self.items_idx = {item: i for i, item in enumerate(self.items_order)}
def read_ascii_art(self):
"""Generates the list of items and player and the map from the ascii art."""
self.height = len(self.ascii_art)
self.width = max([len(line) for line in self.ascii_art])
self.players_cells = {}
self.players_items = {}
self.items_cells = {}
self.player_walls = np.zeros((self.width, self.height))
self.absolute_walls = np.zeros((self.width, self.height))
self.content = []
for _ in range(self.width):
cell_content = []
for _ in range(self.height):
cell_content.append(collections.defaultdict(int))
self.content.append(cell_content)
for y, line in enumerate(self.ascii_art):
for x, char in enumerate(line):
if char == '#':
self.absolute_walls[x, y] = 1
elif char == '=':
self.player_walls[x, y] = 1
else:
self.content[x][y][char] += 1
if char.isupper():
self.players_cells[char] = np.array([x, y])
self.players_items[char] = collections.defaultdict(int)
if char.islower():
if char in self.items_cells:
self.items_cells[char].append(np.array([x, y]))
else:
self.items_cells[char] = [np.array([x, y])]
assert set(self.players.keys()) >= set(self.players_cells.keys()), (
'some players may have no description')
assert set(self.items.keys()) >= set(self.items_cells.keys()), (
'some items may have no description')
def display(self):
print('game map:')
print('---------')
for line in self.ascii_art:
print(line)
def reset(self):
self.read_ascii_art()
self.done = False
self.last_events = []
self.steps = 0
return self.generate_observations()
def add_action(self, action_name, conditions=None, consequences=None):
"""Add an action, its condition to be doable and its consequences.
Eventual associated rewards should be defined using the method 'add_reward'.
Args:
action_name: name of the action (str).
conditions: event (str) or list of events. All must hold, can be empty.
consequences: event (str) or list of events, can be empty.
"""
action_idx = len(self.actions)
self.actions[action_idx] = action_name
if conditions is not None and not isinstance(conditions, list):
conditions = [conditions]
self.actions_conditions[action_name] = conditions
if consequences is not None and not isinstance(consequences, list):
consequences = [consequences]
self.actions_consequences[action_name] = consequences
def add_effect(self, event, effect):
"""Add a function (effect) that modifies the game if event is held.
Args:
event: the event that causes the effect (str).
effect: function that modifies the game. It should take a
game as argument and modify its public attributes
or call its public methods.
For example,
reaching a new game level could be implemented as follow:
def my_effect(my_game):
my_game.art = new_ascii_art
my_game.read_ascii_art()
return my_game.reset() # return the new initial state
"""
self.effects[event] = effect
def add_reward(self, event, targets_rewards):
for target_player, reward in targets_rewards.items():
if event in self.rewards:
self.rewards[event].append((target_player, reward))
else:
self.rewards[event] = [(target_player, reward)]
def add_terminaison(self, conditions=None):
"""Add a list of conditions (events) that defines a terminal state.
Args:
conditions: event (str) or list of events. All must hold, can be empty.
"""
if conditions is not None and not isinstance(conditions, list):
conditions = [conditions]
self.terminaisons.append(conditions)
@property
def num_players(self):
return len(self.players_order)
@property
def num_actions(self):
return len(self.actions)
@property
def action_space(self):
return spaces.MultiDiscrete([self.num_actions] * self.num_players)
@property
def num_states(self):
# num_states is an upper bound of the number of possible
# tuples containing each player's position.
# So far, this number does not take collectable objects into account,
# so a game with collectable objects is partially observed.
return (self.height * self.width) ** self.num_players
@property
def num_items(self):
return len(self.items)
def step(self, actions):
"""Applies a gym-based environement step.
Args:
actions: list of integers (or numpy array)
containing the actions of each agent.
Returns:
observations: image or list of integers, depending on the game setting.
"""
events = []
# If you want an order that depends on some fitness,
# you must add a condition here.
actions, conflict_events = self.solve_conflicts(actions)
events += conflict_events
random_order = np.random.permutation(self.num_players)
for player_idx in random_order:
player = self.players_order[player_idx]
action = actions[player_idx]
step_events = self.single_player_step(player, action)
events += step_events
self.last_events = events
self.steps += 1
if self.terminal_step():
self.done = True
self.apply_effects()
self.apply_physics() # Using players/items speed and directions attributes.
observations = self.generate_observations()
rewards = self.reward_events()
done = self.is_done()
infos = self.infos()
return observations, rewards, done, infos
def single_player_step(self, player, action):
"""Process separately the step of one player.
Args:
player: (str) name of player doing the action.
action: (int) action number.
Returns:
events: list of event caused by the player's action.
"""
events = []
if action in self.actions:
action_name = self.actions[action]
if action_name in MOVING_ACTIONS:
new_position = self.players_cells[player] + MOVING_ACTIONS[action_name]
for element in self.content[new_position[0]][new_position[1]]:
if element in self.items:
item = self.items[element]
if item.force_collect:
action_events = self.player_collects_item(
player, element)
events += action_events
# If element is a pushable player -> push it.
# TODO(alexisjacq) if it is a pushable item, must affect item_cells.
elif element in self.players and self.players[element].pushable:
other = self.players[element]
other_new_position = (
self.players_cells[element] + MOVING_ACTIONS[action_name])
for other_element in (
self.content[other_new_position[0]][other_new_position[1]]):
if other_element in self.items:
other_item = self.items[other_element]
if other_item.force_collect:
action_events = self.player_collects_item(
other, other_element)
events += action_events
del self.content[self.players_cells[element][0]][
self.players_cells[element][1]][element]
self.players_cells[element] = other_new_position
self.content[other_new_position[0]][
other_new_position[1]][element] = 1
del self.content[self.players_cells[player][0]][
self.players_cells[player][1]][player]
self.players_cells[player] = new_position
self.content[new_position[0]][new_position[1]][player] = 1
else:
action_events = self.player_acts(action)
events += action_events
return events
def player_collects_item(self, player, item):
"""add the picked item to the list of player's possesions."""
self.players_items[player][item] += 1
# TODO(alexisjacq): if item limited quantity, must affect item_cells:
# Add argument 'new_position' to this function.
# Then add something like:
# self.content[new_position[0]][new_position[1]][item] -= 1
# if self.content[new_position[0]][new_position[1]][item] == 0:
# del self.content[new_position[0]][new_position[1]][item]
events = [player+'_collects_'+item]
return events
def player_acts(self, action):
"""Check if action meets conditions for being possible.
If action is possible, return consequences.
Args:
action: action of one agent.
Returns:
events: consequences of action if possible.
"""
conditions = self.actions_conditions[action]
ok = True
for condition in conditions:
if condition not in self.last_events:
ok = False
break
events = []
if ok: # All conditions are in self.last_events.
consequences = self.action_consequences[action]
for consequence in consequences:
events.append(consequence)
return events
def solve_conflicts(self, actions):
"""This method removes forbidden/conflictual actions.
For example: if two or more agents agents reach the same cell,
only one -- randomly chosen -- does the move.
Args:
actions: list of integers (or numpy array)
containing the actions of each agent
Returns:
actions: corrected list of actions.
events: describes the conflicts encountered.
"""
events = []
future_cells = collections.defaultdict(list)
# When player runs into walls and blocking items:
for player in self.players.keys():
player_idx = self.players_idx[player]
player_passable = self.players[player].passable
action = actions[player_idx]
if action > 0:
events.append(player+'_moves')
action_name = self.actions[action]
if action_name in MOVING_ACTIONS and action > 0 and not player_passable:
new_position = self.players_cells[player] + MOVING_ACTIONS[action_name]
# Check if no wall or (unpassable + unpushable) object:
player_wall = self.player_walls[new_position[0]][new_position[1]]
absolute_wall = self.absolute_walls[new_position[0]][new_position[1]]
if player_wall + absolute_wall > 0:
actions[player_idx] = 0 # stand
events.append(player+'_goes_in_walls')
else:
for element in self.content[new_position[0]][new_position[1]]:
# When player runs into unpassable/pushable item:
if element in self.items:
passable = self.items[element].passable
pushable = self.items[element].pushable
if not passable and not pushable:
actions[player_idx] = 0
events.append(player+'_blocked_by_'+element)
# When players cross each others, both are stoped:
for player in self.players.keys():
player_idx = self.players_idx[player]
player_passable = self.players[player].passable
action = actions[player_idx]
action_name = self.actions[action]
if action_name in MOVING_ACTIONS and action > 0 and not player_passable:
new_position = self.players_cells[player] + MOVING_ACTIONS[action_name]
for element in self.content[new_position[0]][new_position[1]]:
if element in self.players:
other_action = actions[self.players_idx[element]]
passable = self.players[element].passable
pushable = self.players[element].pushable
other_action_name = self.actions[other_action]
cross = ((MOVING_ACTIONS[action_name] +
MOVING_ACTIONS[other_action_name])**2).sum()
if cross == 0 and not passable and not pushable:
actions[player_idx] = 0
actions[self.players_idx[element]] = 0
events.append(player+'_blocked_by_'+element)
events.append(element+'_blocked_by_'+player)
# When players blocked by standing player:
for player in self.players.keys():
player_idx = self.players_idx[player]
player_passable = self.players[player].passable
action = actions[player_idx]
action_name = self.actions[action]
if action_name in MOVING_ACTIONS and action > 0 and not player_passable:
new_position = self.players_cells[player] + MOVING_ACTIONS[action_name]
for element in self.content[new_position[0]][new_position[1]]:
if element in self.players:
other_action = actions[self.players_idx[element]]
passable = self.players[element].passable
pushable = self.players[element].pushable
# Check if it is an unpassable/unpushable player who stays:
if other_action == 0 and not passable and not pushable:
actions[player_idx] = 0
events.append(player+'_blocked_by_'+element)
# Predict futur positions:
for player in self.players.keys():
player_idx = self.players_idx[player]
player_passable = self.players[player].passable
action = actions[player_idx]
action_name = self.actions[action]
if action_name in MOVING_ACTIONS and action > 0 and not player_passable:
new_position = self.players_cells[player] + MOVING_ACTIONS[action_name]
future_cells[tuple(new_position)].append(player)
# When several players try to reach the same cell,
# one player is picked randomly:
for content in future_cells.values():
if len(content) > 1:
losers = np.random.choice(content, len(content)-1, replace=False)
for player in losers:
player_idx = self.players_idx[player]
actions[player_idx] = 0
events.append(player+'_lost_the_drawn')
return actions, events
def terminal_step(self):
"""Detect if the state is terminal."""
if self.max_steps is not None and self.steps > self.max_steps:
return True
for conditions in self.terminaisons:
ok = True
for condition in conditions:
if condition not in self.last_events:
ok = False
break
if ok:
return True
return False
def is_done(self):
return self.done
def reward_events(self):
rewards = np.zeros(self.num_players)
for event in self.last_events:
if event in self.rewards:
for player_target, reward in self.rewards[event]:
target_idx = self.players_idx[player_target]
rewards[target_idx] += reward
return rewards
def apply_physics(self):
# TODO(alexisjacq)
pass
def apply_effects(self):
for event in self.last_events:
if event in self.effects:
self.effects[event](self)
def render(self):
"""Returns the image of the map with elements and wall colors.
By default, walls > agents > items
and alphabetical order between superposed agents or item
"""
image = np.zeros((self.height, self.width, 3), dtype='uint8')
for item_name in self.items_order:
item = self.items[item_name]
item_color = np.array(item.color, dtype=int)
for x, y in self.items_cells[item_name]:
image[x, y, :] = item_color
for player_name in self.players_order:
player = self.players[player_name]
player_color = np.array(player.color, dtype='uint8')
x, y = self.players_cells[player_name]
for channel in range(3):
image[x, y, channel] = player_color[channel]
for channel in range(3):
image[:, :, channel][self.player_walls > 0] = 100
for channel in range(3):
image[:, :, channel][self.absolute_walls > 0] = 150
return image
def discrete_state(self, obs):
"""Converts an x,y position into a discrete state.
Args:
obs: list of discrete (x,y) positions of players.
Returns:
state: a unique discrete number associated with the list of positions.
"""
state = 0
for i, (x, y) in enumerate(obs):
state += (x * self.width + y) * ((self.width * self.height) ** i)
return state
def one_hot_state(self, obs):
"""Converts a list of x,y positions into a "one-hot" vector.
Args:
obs: list of discrete (x,y) positions of players.
Returns:
state: numpy array of size (1, (width + height) * num_players).
The first 'width' elements encode the column for the first player.
They are all zeros except the x-th which is 1.
(similar for second part about encoding the row for the first player
and then for all other players).
This is not exactly a one-hot encoding since multiple ones are
set (two by player).
Ex: in a 2-players 3x3 grid, obs = ((x1, y1), (x2, y2)) = ((2, 3), (1, 1))
one_hot_state(obs) = ((0,1,0 , 0,0,1 , 1,0,0 , 1,0,0))
"""
state = np.zeros((1, (self.width + self.height) * self.num_players))
for i, (x, y) in enumerate(obs):
state[0, i * (self.width + self.height) + x] = 1
state[0, i * (self.width + self.height) + self.width + y] = 1
return state
def generate_observations(self):
if self.tabular:
obs = []
for player in self.players_order:
x, y = self.players_cells[player]
obs.append((x, y))
return obs
else:
return self.render()
def infos(self):
infos = {'event_list': str(self.last_events)}
return infos
| {
"content_hash": "c8d3281398d2a60ac7d1ec713b6cc97d",
"timestamp": "",
"source": "github",
"line_count": 690,
"max_line_length": 80,
"avg_line_length": 34.857971014492755,
"alnum_prop": 0.6264343921503409,
"repo_name": "google-research/google-research",
"id": "86ded45238df7a992c2e17a1db33f6a686521397",
"size": "24660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psycholab/game.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
} |
''' This program will generate a list of all subdirectories
and files within the current working directory'''
import os
# Grab current directory and create a file name based upon that.
currentdirectory = os.getcwd()
dirname = currentdirectory.split('/')
pathfile = dirname[-1] + ".path"
f = open(pathfile, 'a')
#walk the directory and write the results to a file
for dirname, dirnames, filenames in os.walk(currentdirectory):
# print path to all subdirectories first.
for subdirname in dirnames:
f.write(os.path.join(dirname, subdirname) + "\n")
# print path to all filenames.
for filename in filenames:
f.write(os.path.join(dirname, filename) + "\n")
f.close() | {
"content_hash": "7598d8270737a4c47669715866d49e4d",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 64,
"avg_line_length": 29.333333333333332,
"alnum_prop": 0.7059659090909091,
"repo_name": "amikiri/path_tree",
"id": "52d633b658562feae660f5dfc9f1ca248b7ce290",
"size": "728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "path_tree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "728"
}
],
"symlink_target": ""
} |
"""conll2002 dataset."""
from tensorflow_datasets.text.conll2002.conll2002 import Conll2002
| {
"content_hash": "3edbde0f33b9d70f833659722a47eb70",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 66,
"avg_line_length": 31,
"alnum_prop": 0.8064516129032258,
"repo_name": "tensorflow/datasets",
"id": "a3aebc466b90d1bd11f4a8d6439bf1ec24af7204",
"size": "705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_datasets/text/conll2002/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "728"
},
{
"name": "JavaScript",
"bytes": "13369"
},
{
"name": "NewLisp",
"bytes": "13940"
},
{
"name": "Perl",
"bytes": "520"
},
{
"name": "Python",
"bytes": "5398856"
},
{
"name": "Roff",
"bytes": "22095"
},
{
"name": "Ruby",
"bytes": "25669"
},
{
"name": "Shell",
"bytes": "3895"
},
{
"name": "Smalltalk",
"bytes": "20604"
},
{
"name": "TeX",
"bytes": "759"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class WidthsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="widthsrc", parent_name="bar", **kwargs):
super(WidthsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| {
"content_hash": "03269a7d5618462e88ea6e686cb1be72",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 76,
"avg_line_length": 35.72727272727273,
"alnum_prop": 0.6183206106870229,
"repo_name": "plotly/plotly.py",
"id": "00296c11f36a7927b10d9d6137b63931b2cf8460",
"size": "393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/bar/_widthsrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
from datetime import datetime
import email.parser
import hashlib
import locale
import random
import six
from six.moves import urllib
import time
import unittest2
import uuid
from copy import deepcopy
import eventlet
from unittest2 import SkipTest
from swift.common.http import is_success, is_client_error
from email.utils import parsedate
import mock
from test.functional import normalized_urls, load_constraint, cluster_info
from test.functional import check_response, retry
import test.functional as tf
from test.functional.swift_test_client import Account, Connection, File, \
ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class Utils(object):
@classmethod
def create_ascii_name(cls, length=None):
return uuid.uuid4().hex
@classmethod
def create_utf8_name(cls, length=None):
if length is None:
length = 15
else:
length = int(length)
utf8_chars = u'\uF10F\uD20D\uB30B\u9409\u8508\u5605\u3703\u1801'\
u'\u0900\uF110\uD20E\uB30C\u940A\u8509\u5606\u3704'\
u'\u1802\u0901\uF111\uD20F\uB30D\u940B\u850A\u5607'\
u'\u3705\u1803\u0902\uF112\uD210\uB30E\u940C\u850B'\
u'\u5608\u3706\u1804\u0903\u03A9\u2603'
return ''.join([random.choice(utf8_chars)
for x in range(length)]).encode('utf-8')
create_name = create_ascii_name
class BaseEnv(object):
account = conn = None
@classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
cls.account.delete_containers()
@classmethod
def tearDown(cls):
pass
class Base(unittest2.TestCase):
# subclasses may override env class
env = BaseEnv
@classmethod
def setUpClass(cls):
cls.env.setUp()
@classmethod
def tearDownClass(cls):
cls.env.tearDown()
def assert_body(self, body):
response_body = self.env.conn.response.read()
self.assertEqual(response_body, body,
'Body returned: %s' % (response_body))
def assert_status(self, status_or_statuses):
self.assertTrue(
self.env.conn.response.status == status_or_statuses or
(hasattr(status_or_statuses, '__iter__') and
self.env.conn.response.status in status_or_statuses),
'Status returned: %d Expected: %s' %
(self.env.conn.response.status, status_or_statuses))
def assert_header(self, header_name, expected_value):
try:
actual_value = self.env.conn.response.getheader(header_name)
except KeyError:
self.fail(
'Expected header name %r not found in response.' % header_name)
self.assertEqual(expected_value, actual_value)
class Base2(object):
def setUp(self):
Utils.create_name = Utils.create_utf8_name
super(Base2, self).setUp()
def tearDown(self):
Utils.create_name = Utils.create_ascii_name
class TestAccountEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestAccountEnv, cls).setUp()
cls.containers = []
for i in range(10):
cont = cls.account.container(Utils.create_name())
if not cont.create():
raise ResponseError(cls.conn.response)
cls.containers.append(cont)
class TestAccountDev(Base):
env = TestAccountEnv
class TestAccountDevUTF8(Base2, TestAccountDev):
pass
class TestAccount(Base):
env = TestAccountEnv
def testNoAuthToken(self):
self.assertRaises(ResponseError, self.env.account.info,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
self.assertRaises(ResponseError, self.env.account.containers,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
def testInvalidUTF8Path(self):
invalid_utf8 = Utils.create_utf8_name()[::-1]
container = self.env.account.container(invalid_utf8)
self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assert_body('Invalid UTF8 or contains NULL')
def testVersionOnlyPath(self):
self.env.account.conn.make_request('PUT',
cfg={'version_only_path': True})
self.assert_status(412)
self.assert_body('Bad URL')
def testInvalidPath(self):
was_url = self.env.account.conn.storage_url
if (normalized_urls):
self.env.account.conn.storage_url = '/'
else:
self.env.account.conn.storage_url = "/%s" % was_url
self.env.account.conn.make_request('GET')
try:
self.assert_status(404)
finally:
self.env.account.conn.storage_url = was_url
def testPUTError(self):
if load_constraint('allow_account_management'):
raise SkipTest("Allow account management is enabled")
self.env.account.conn.make_request('PUT')
self.assert_status([403, 405])
def testAccountHead(self):
try_count = 0
while try_count < 5:
try_count += 1
info = self.env.account.info()
for field in ['object_count', 'container_count', 'bytes_used']:
self.assertGreaterEqual(info[field], 0)
if info['container_count'] == len(self.env.containers):
break
if try_count < 5:
time.sleep(1)
self.assertEqual(info['container_count'], len(self.env.containers))
self.assert_status(204)
def testContainerSerializedInfo(self):
container_info = {}
for container in self.env.containers:
info = {'bytes': 0}
info['count'] = random.randint(10, 30)
for i in range(info['count']):
file_item = container.file(Utils.create_name())
bytes = random.randint(1, 32768)
file_item.write_random(bytes)
info['bytes'] += bytes
container_info[container.name] = info
for format_type in ['json', 'xml']:
for a in self.env.account.containers(
parms={'format': format_type}):
self.assertGreaterEqual(a['count'], 0)
self.assertGreaterEqual(a['bytes'], 0)
headers = dict(self.env.conn.response.getheaders())
if format_type == 'json':
self.assertEqual(headers['content-type'],
'application/json; charset=utf-8')
elif format_type == 'xml':
self.assertEqual(headers['content-type'],
'application/xml; charset=utf-8')
def testListingLimit(self):
limit = load_constraint('account_listing_limit')
for l in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2):
p = {'limit': l}
if l <= limit:
self.assertLessEqual(len(self.env.account.containers(parms=p)),
l)
self.assert_status(200)
else:
self.assertRaises(ResponseError,
self.env.account.containers, parms=p)
self.assert_status(412)
def testContainerListing(self):
a = sorted([c.name for c in self.env.containers])
for format_type in [None, 'json', 'xml']:
b = self.env.account.containers(parms={'format': format_type})
if isinstance(b[0], dict):
b = [x['name'] for x in b]
self.assertEqual(a, b)
def testListDelimiter(self):
delimiter = '-'
containers = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for c in containers:
cont = self.env.account.container(c)
self.assertTrue(cont.create())
results = self.env.account.containers(parms={'delimiter': delimiter})
expected = ['test', 'test-']
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
results = self.env.account.containers(parms={'delimiter': delimiter,
'reverse': 'yes'})
expected.reverse()
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
def testListDelimiterAndPrefix(self):
delimiter = 'a'
containers = ['bar', 'bazar']
for c in containers:
cont = self.env.account.container(c)
self.assertTrue(cont.create())
results = self.env.account.containers(parms={'delimiter': delimiter,
'prefix': 'ba'})
expected = ['bar', 'baza']
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
results = self.env.account.containers(parms={'delimiter': delimiter,
'prefix': 'ba',
'reverse': 'yes'})
expected.reverse()
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
def testContainerListingLastModified(self):
expected = {}
for container in self.env.containers:
res = container.info()
expected[container.name] = time.mktime(
parsedate(res['last_modified']))
for format_type in ['json', 'xml']:
actual = {}
containers = self.env.account.containers(
parms={'format': format_type})
if isinstance(containers[0], dict):
for container in containers:
self.assertIn('name', container) # sanity
self.assertIn('last_modified', container) # sanity
# ceil by hand (wants easier way!)
datetime_str, micro_sec_str = \
container['last_modified'].split('.')
timestamp = time.mktime(
time.strptime(datetime_str,
"%Y-%m-%dT%H:%M:%S"))
if int(micro_sec_str):
timestamp += 1
actual[container['name']] = timestamp
self.assertEqual(expected, actual)
def testInvalidAuthToken(self):
hdrs = {'X-Auth-Token': 'bogus_auth_token'}
self.assertRaises(ResponseError, self.env.account.info, hdrs=hdrs)
self.assert_status(401)
def testLastContainerMarker(self):
for format_type in [None, 'json', 'xml']:
containers = self.env.account.containers({'format': format_type})
self.assertEqual(len(containers), len(self.env.containers))
self.assert_status(200)
containers = self.env.account.containers(
parms={'format': format_type, 'marker': containers[-1]})
self.assertEqual(len(containers), 0)
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
def testMarkerLimitContainerList(self):
for format_type in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z',
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, 9)
containers = self.env.account.containers(
parms={'format': format_type,
'marker': marker,
'limit': limit})
self.assertLessEqual(len(containers), limit)
if containers:
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assertGreater(locale.strcoll(containers[0], marker),
0)
def testContainersOrderedByName(self):
for format_type in [None, 'json', 'xml']:
containers = self.env.account.containers(
parms={'format': format_type})
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assertEqual(sorted(containers, cmp=locale.strcoll),
containers)
def testQuotedWWWAuthenticateHeader(self):
# check that the www-authenticate header value with the swift realm
# is correctly quoted.
conn = Connection(tf.config)
conn.authenticate()
inserted_html = '<b>Hello World'
hax = 'AUTH_haxx"\nContent-Length: %d\n\n%s' % (len(inserted_html),
inserted_html)
quoted_hax = urllib.parse.quote(hax)
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
resp = conn.connection.getresponse()
resp_headers = dict(resp.getheaders())
self.assertIn('www-authenticate', resp_headers)
actual = resp_headers['www-authenticate']
expected = 'Swift realm="%s"' % quoted_hax
# other middleware e.g. auth_token may also set www-authenticate
# headers in which case actual values will be a comma separated list.
# check that expected value is among the actual values
self.assertIn(expected, actual)
class TestAccountUTF8(Base2, TestAccount):
pass
class TestAccountNoContainers(Base):
def testGetRequest(self):
for format_type in [None, 'json', 'xml']:
self.assertFalse(self.env.account.containers(
parms={'format': format_type}))
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
pass
class TestAccountSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestAccountSortingEnv, cls).setUp()
postfix = Utils.create_name()
cls.cont_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
cls.cont_items = ['%s%s' % (x, postfix) for x in cls.cont_items]
for container in cls.cont_items:
c = cls.account.container(container)
if not c.create():
raise ResponseError(cls.conn.response)
class TestAccountSorting(Base):
env = TestAccountSortingEnv
def testAccountContainerListSorting(self):
# name (byte order) sorting.
cont_list = sorted(self.env.cont_items)
for reverse in ('false', 'no', 'off', '', 'garbage'):
cont_listing = self.env.account.containers(
parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing,
'Expected %s but got %s with reverse param %r'
% (cont_list, cont_listing, reverse))
def testAccountContainerListSortingReverse(self):
# name (byte order) sorting.
cont_list = sorted(self.env.cont_items)
cont_list.reverse()
for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
cont_listing = self.env.account.containers(
parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing,
'Expected %s but got %s with reverse param %r'
% (cont_list, cont_listing, reverse))
def testAccountContainerListSortingByPrefix(self):
cont_list = sorted(c for c in self.env.cont_items if c.startswith('a'))
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'prefix': 'a'})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByMarkersExclusive(self):
first_item = self.env.cont_items[3] # 'b1' + postfix
last_item = self.env.cont_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.cont_items
if last_item < c < first_item)
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByMarkersInclusive(self):
first_item = self.env.cont_items[3] # 'b1' + postfix
last_item = self.env.cont_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.cont_items
if last_item <= c <= first_item)
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': first_item + '\x00',
'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByReversedMarkers(self):
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
self.assert_status(204)
self.assertEqual([], cont_listing)
class TestContainerEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 10
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file_item = cls.container.file(Utils.create_name())
file_item.write_random(cls.file_size)
cls.files.append(file_item.name)
class TestContainerDev(Base):
env = TestContainerEnv
class TestContainerDevUTF8(Base2, TestContainerDev):
pass
class TestContainer(Base):
env = TestContainerEnv
def testContainerNameLimit(self):
limit = load_constraint('max_container_name_length')
for l in (limit - 100, limit - 10, limit - 1, limit,
limit + 1, limit + 10, limit + 100):
cont = self.env.account.container('a' * l)
if l <= limit:
self.assertTrue(cont.create())
self.assert_status(201)
else:
self.assertFalse(cont.create())
self.assert_status(400)
def testFileThenContainerDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
self.assertTrue(file_item.write_random())
self.assertTrue(file_item.delete())
self.assert_status(204)
self.assertNotIn(file_item.name, cont.files())
self.assertTrue(cont.delete())
self.assert_status(204)
self.assertNotIn(cont.name, self.env.account.containers())
def testFileListingLimitMarkerPrefix(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
files = sorted([Utils.create_name() for x in range(10)])
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
for i in range(len(files)):
f = files[i]
for j in range(1, len(files) - i):
self.assertEqual(cont.files(parms={'limit': j, 'marker': f}),
files[i + 1: i + j + 1])
self.assertEqual(cont.files(parms={'marker': f}), files[i + 1:])
self.assertEqual(cont.files(parms={'marker': f, 'prefix': f}), [])
self.assertEqual(cont.files(parms={'prefix': f}), [f])
def testPrefixAndLimit(self):
load_constraint('container_listing_limit')
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
prefix_file_count = 10
limit_count = 2
prefixs = ['alpha/', 'beta/', 'kappa/']
prefix_files = {}
for prefix in prefixs:
prefix_files[prefix] = []
for i in range(prefix_file_count):
file_item = cont.file(prefix + Utils.create_name())
file_item.write()
prefix_files[prefix].append(file_item.name)
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(files, sorted(prefix_files[prefix]))
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'limit': limit_count,
'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(len(files), limit_count)
for file_item in files:
self.assertTrue(file_item.startswith(prefix))
def testListDelimiter(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = '-'
files = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
for format_type in [None, 'json', 'xml']:
results = cont.files(parms={'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-bar', 'test-foo'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-', 'test'])
def testListDelimiterAndPrefix(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = 'a'
files = ['bar', 'bazar']
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
results = cont.files(parms={'delimiter': delimiter, 'prefix': 'ba'})
self.assertEqual(results, ['bar', 'baza'])
results = cont.files(parms={'delimiter': delimiter,
'prefix': 'ba',
'reverse': 'yes'})
self.assertEqual(results, ['baza', 'bar'])
def testLeadingDelimiter(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = '/'
files = ['test', delimiter.join(['', 'test', 'bar']),
delimiter.join(['', 'test', 'bar', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
results = cont.files(parms={'delimiter': delimiter})
self.assertEqual(results, [delimiter, 'test'])
def testCreate(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertIn(cont.name, self.env.account.containers())
def testContainerFileListOnContainerThatDoesNotExist(self):
for format_type in [None, 'json', 'xml']:
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.files,
parms={'format': format_type})
self.assert_status(404)
def testUtf8Container(self):
valid_utf8 = Utils.create_utf8_name()
invalid_utf8 = valid_utf8[::-1]
container = self.env.account.container(valid_utf8)
self.assertTrue(container.create(cfg={'no_path_quote': True}))
self.assertIn(container.name, self.env.account.containers())
self.assertEqual(container.files(), [])
self.assertTrue(container.delete())
container = self.env.account.container(invalid_utf8)
self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assertRaises(ResponseError, container.files,
cfg={'no_path_quote': True})
self.assert_status(412)
def testCreateOnExisting(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertTrue(cont.create())
self.assert_status(202)
def testSlashInName(self):
if Utils.create_name == Utils.create_utf8_name:
cont_name = list(six.text_type(Utils.create_name(), 'utf-8'))
else:
cont_name = list(Utils.create_name())
cont_name[random.randint(2, len(cont_name) - 2)] = '/'
cont_name = ''.join(cont_name)
if Utils.create_name == Utils.create_utf8_name:
cont_name = cont_name.encode('utf-8')
cont = self.env.account.container(cont_name)
self.assertFalse(cont.create(cfg={'no_path_quote': True}),
'created container with name %s' % (cont_name))
self.assert_status(404)
self.assertNotIn(cont.name, self.env.account.containers())
def testDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertTrue(cont.delete())
self.assert_status(204)
self.assertNotIn(cont.name, self.env.account.containers())
def testDeleteOnContainerThatDoesNotExist(self):
cont = self.env.account.container(Utils.create_name())
self.assertFalse(cont.delete())
self.assert_status(404)
def testDeleteOnContainerWithFiles(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
file_item.write_random(self.env.file_size)
self.assertIn(file_item.name, cont.files())
self.assertFalse(cont.delete())
self.assert_status(409)
def testFileCreateInContainerThatDoesNotExist(self):
file_item = File(self.env.conn, self.env.account, Utils.create_name(),
Utils.create_name())
self.assertRaises(ResponseError, file_item.write)
self.assert_status(404)
def testLastFileMarker(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files({'format': format_type})
self.assertEqual(len(files), len(self.env.files))
self.assert_status(200)
files = self.env.container.files(
parms={'format': format_type, 'marker': files[-1]})
self.assertEqual(len(files), 0)
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
def testContainerFileList(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type})
self.assert_status(200)
if isinstance(files[0], dict):
files = [x['name'] for x in files]
for file_item in self.env.files:
self.assertIn(file_item, files)
for file_item in files:
self.assertIn(file_item, self.env.files)
def _testContainerFormattedFileList(self, format_type):
expected = {}
for name in self.env.files:
expected[name] = self.env.container.file(name).info()
file_list = self.env.container.files(parms={'format': format_type})
self.assert_status(200)
for actual in file_list:
name = actual['name']
self.assertIn(name, expected)
self.assertEqual(expected[name]['etag'], actual['hash'])
self.assertEqual(
expected[name]['content_type'], actual['content_type'])
self.assertEqual(
expected[name]['content_length'], actual['bytes'])
expected.pop(name)
self.assertFalse(expected) # sanity check
def testContainerJsonFileList(self):
self._testContainerFormattedFileList('json')
def testContainerXmlFileList(self):
self._testContainerFormattedFileList('xml')
def testMarkerLimitFileList(self):
for format_type in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z',
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, self.env.file_count - 1)
files = self.env.container.files(parms={'format': format_type,
'marker': marker,
'limit': limit})
if not files:
continue
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertLessEqual(len(files), limit)
if files:
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertGreater(locale.strcoll(files[0], marker), 0)
def testFileOrder(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type})
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertEqual(sorted(files, cmp=locale.strcoll), files)
def testContainerInfo(self):
info = self.env.container.info()
self.assert_status(204)
self.assertEqual(info['object_count'], self.env.file_count)
self.assertEqual(info['bytes_used'],
self.env.file_count * self.env.file_size)
def testContainerInfoOnContainerThatDoesNotExist(self):
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.info)
self.assert_status(404)
def testContainerFileListWithLimit(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type,
'limit': 2})
self.assertEqual(len(files), 2)
def testTooLongName(self):
cont = self.env.account.container('x' * 257)
self.assertFalse(cont.create(),
'created container with name %s' % (cont.name))
self.assert_status(400)
def testContainerExistenceCachingProblem(self):
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assertTrue(cont.create())
cont.files()
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
file_item.write_random()
def testContainerLastModified(self):
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
info = container.info()
t0 = info['last_modified']
# last modified header is in date format which supports in second
# so we need to wait to increment a sec in the header.
eventlet.sleep(1)
# POST container change last modified timestamp
self.assertTrue(
container.update_metadata({'x-container-meta-japan': 'mitaka'}))
info = container.info()
t1 = info['last_modified']
self.assertNotEqual(t0, t1)
eventlet.sleep(1)
# PUT container (overwrite) also change last modified
self.assertTrue(container.create())
info = container.info()
t2 = info['last_modified']
self.assertNotEqual(t1, t2)
eventlet.sleep(1)
# PUT object doesn't change container last modified timestamp
obj = container.file(Utils.create_name())
self.assertTrue(
obj.write("aaaaa", hdrs={'Content-Type': 'text/plain'}))
info = container.info()
t3 = info['last_modified']
self.assertEqual(t2, t3)
# POST object also doesn't change container last modified timestamp
self.assertTrue(
obj.sync_metadata({'us': 'austin'}))
info = container.info()
t4 = info['last_modified']
self.assertEqual(t2, t4)
class TestContainerUTF8(Base2, TestContainer):
pass
class TestContainerSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerSortingEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
cls.files = list()
cls.file_size = 128
for name in cls.file_items:
file_item = cls.container.file(name)
file_item.write_random(cls.file_size)
cls.files.append(file_item.name)
class TestContainerSorting(Base):
env = TestContainerSortingEnv
def testContainerFileListSortingReversed(self):
file_list = list(sorted(self.env.file_items))
file_list.reverse()
for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
cont_files = self.env.container.files(parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(file_list, cont_files,
'Expected %s but got %s with reverse param %r'
% (file_list, cont_files, reverse))
def testContainerFileSortingByPrefixReversed(self):
cont_list = sorted(c for c in self.env.file_items if c.startswith('a'))
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'prefix': 'a'})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByMarkersExclusiveReversed(self):
first_item = self.env.file_items[3] # 'b1' + postfix
last_item = self.env.file_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.file_items
if last_item < c < first_item)
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByMarkersInclusiveReversed(self):
first_item = self.env.file_items[3] # 'b1' + postfix
last_item = self.env.file_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.file_items
if last_item <= c <= first_item)
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': first_item + '\x00',
'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByReversedMarkersReversed(self):
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
self.assert_status(204)
self.assertEqual([], cont_listing)
def testContainerFileListSorting(self):
file_list = list(sorted(self.env.file_items))
cont_files = self.env.container.files()
self.assert_status(200)
self.assertEqual(file_list, cont_files)
# Lets try again but with reverse is specifically turned off
cont_files = self.env.container.files(parms={'reverse': 'off'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'false'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'no'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': ''})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
# Lets try again but with a incorrect reverse values
cont_files = self.env.container.files(parms={'reverse': 'foo'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'hai'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'o=[]::::>'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
class TestContainerPathsEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerPathsEnv, cls).setUp()
cls.file_size = 8
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.files = [
'/file1',
'/file A',
'/dir1/',
'/dir2/',
'/dir1/file2',
'/dir1/subdir1/',
'/dir1/subdir2/',
'/dir1/subdir1/file2',
'/dir1/subdir1/file3',
'/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/',
'/dir1/subdir1/subsubdir1/file5',
'/dir1/subdir1/subsubdir1/file6',
'/dir1/subdir1/subsubdir1/file7',
'/dir1/subdir1/subsubdir1/file8',
'/dir1/subdir1/subsubdir2/',
'/dir1/subdir1/subsubdir2/file9',
'/dir1/subdir1/subsubdir2/file0',
'file1',
'dir1/',
'dir2/',
'dir1/file2',
'dir1/subdir1/',
'dir1/subdir2/',
'dir1/subdir1/file2',
'dir1/subdir1/file3',
'dir1/subdir1/file4',
'dir1/subdir1/subsubdir1/',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file6',
'dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir2/',
'dir1/subdir1/subsubdir2/file9',
'dir1/subdir1/subsubdir2/file0',
'dir1/subdir with spaces/',
'dir1/subdir with spaces/file B',
'dir1/subdir+with{whatever/',
'dir1/subdir+with{whatever/file D',
]
stored_files = set()
for f in cls.files:
file_item = cls.container.file(f)
if f.endswith('/'):
file_item.write(hdrs={'Content-Type': 'application/directory'})
else:
file_item.write_random(cls.file_size,
hdrs={'Content-Type':
'application/directory'})
if (normalized_urls):
nfile = '/'.join(filter(None, f.split('/')))
if (f[-1] == '/'):
nfile += '/'
stored_files.add(nfile)
else:
stored_files.add(f)
cls.stored_files = sorted(stored_files)
class TestContainerPaths(Base):
env = TestContainerPathsEnv
def testTraverseContainer(self):
found_files = []
found_dirs = []
def recurse_path(path, count=0):
if count > 10:
raise ValueError('too deep recursion')
for file_item in self.env.container.files(parms={'path': path}):
self.assertTrue(file_item.startswith(path))
if file_item.endswith('/'):
recurse_path(file_item, count + 1)
found_dirs.append(file_item)
else:
found_files.append(file_item)
recurse_path('')
for file_item in self.env.stored_files:
if file_item.startswith('/'):
self.assertNotIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
self.assertIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
else:
self.assertIn(file_item, found_files)
self.assertNotIn(file_item, found_dirs)
found_files = []
found_dirs = []
recurse_path('/')
for file_item in self.env.stored_files:
if not file_item.startswith('/'):
self.assertNotIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
self.assertIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
else:
self.assertIn(file_item, found_files)
self.assertNotIn(file_item, found_dirs)
def testContainerListing(self):
for format_type in (None, 'json', 'xml'):
files = self.env.container.files(parms={'format': format_type})
if isinstance(files[0], dict):
files = [str(x['name']) for x in files]
self.assertEqual(files, self.env.stored_files)
for format_type in ('json', 'xml'):
for file_item in self.env.container.files(parms={'format':
format_type}):
self.assertGreaterEqual(int(file_item['bytes']), 0)
self.assertIn('last_modified', file_item)
if file_item['name'].endswith('/'):
self.assertEqual(file_item['content_type'],
'application/directory')
def testStructure(self):
def assert_listing(path, file_list):
files = self.env.container.files(parms={'path': path})
self.assertEqual(sorted(file_list, cmp=locale.strcoll), files)
if not normalized_urls:
assert_listing('/', ['/dir1/', '/dir2/', '/file1', '/file A'])
assert_listing('/dir1',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/subdir1',
['/dir1/subdir1/subsubdir2/', '/dir1/subdir1/file2',
'/dir1/subdir1/file3', '/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/'])
assert_listing('/dir1/subdir2', [])
assert_listing('', ['file1', 'dir1/', 'dir2/'])
else:
assert_listing('', ['file1', 'dir1/', 'dir2/', 'file A'])
assert_listing('dir1', ['dir1/file2', 'dir1/subdir1/',
'dir1/subdir2/', 'dir1/subdir with spaces/',
'dir1/subdir+with{whatever/'])
assert_listing('dir1/subdir1',
['dir1/subdir1/file4', 'dir1/subdir1/subsubdir2/',
'dir1/subdir1/file2', 'dir1/subdir1/file3',
'dir1/subdir1/subsubdir1/'])
assert_listing('dir1/subdir1/subsubdir1',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir1/subsubdir1/',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir with spaces/',
['dir1/subdir with spaces/file B'])
class TestFileEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestFileEnv, cls).setUp()
# creating another account and connection
# for account to account copy tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_size = 128
# With keystoneauth we need the accounts to have had the project
# domain id persisted as sysmeta prior to testing ACLs. This may
# not be the case if, for example, the account was created using
# a request with reseller_admin role, when project domain id may
# not have been known. So we ensure that the project domain id is
# in sysmeta by making a POST to the accounts using an admin role.
cls.account.update_metadata()
cls.account2.update_metadata()
class TestFileDev(Base):
env = TestFileEnv
class TestFileDevUTF8(Base2, TestFileDev):
pass
class TestFile(Base):
env = TestFileEnv
def testGetResponseHeaders(self):
obj_data = 'test_body'
def do_test(put_hdrs, get_hdrs, expected_hdrs, unexpected_hdrs):
filename = Utils.create_name()
file_item = self.env.container.file(filename)
resp = file_item.write(
data=obj_data, hdrs=put_hdrs, return_resp=True)
# put then get an object
resp.read()
read_data = file_item.read(hdrs=get_hdrs)
self.assertEqual(obj_data, read_data) # sanity check
resp_headers = file_item.conn.response.getheaders()
# check the *list* of all header (name, value) pairs rather than
# constructing a dict in case of repeated names in the list
errors = []
for k, v in resp_headers:
if k.lower() in unexpected_hdrs:
errors.append('Found unexpected header %s: %s' % (k, v))
for k, v in expected_hdrs.items():
matches = [hdr for hdr in resp_headers if hdr[0] == k]
if not matches:
errors.append('Missing expected header %s' % k)
for (got_k, got_v) in matches:
if got_v != v:
errors.append('Expected %s but got %s for %s' %
(v, got_v, k))
if errors:
self.fail(
'Errors in response headers:\n %s' % '\n '.join(errors))
put_headers = {'X-Object-Meta-Fruit': 'Banana',
'X-Delete-After': '10000',
'Content-Type': 'application/test'}
expected_headers = {'content-length': str(len(obj_data)),
'x-object-meta-fruit': 'Banana',
'accept-ranges': 'bytes',
'content-type': 'application/test',
'etag': hashlib.md5(obj_data).hexdigest(),
'last-modified': mock.ANY,
'date': mock.ANY,
'x-delete-at': mock.ANY,
'x-trans-id': mock.ANY,
'x-openstack-request-id': mock.ANY}
unexpected_headers = ['connection', 'x-delete-after']
do_test(put_headers, {}, expected_headers, unexpected_headers)
get_headers = {'Connection': 'keep-alive'}
expected_headers['connection'] = 'keep-alive'
unexpected_headers = ['x-delete-after']
do_test(put_headers, get_headers, expected_headers, unexpected_headers)
def testCopy(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {}
metadata[Utils.create_ascii_name()] = Utils.create_name()
put_headers = {'Content-Type': 'application/test',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'}
file_item.metadata = metadata
data = file_item.write_random(hdrs=put_headers)
# the allowed headers are configurable in object server, so we cannot
# assert that content-encoding and content-disposition get *copied*
# unless they were successfully set on the original PUT, so populate
# expected_headers by making a HEAD on the original object
file_item.initialize()
self.assertEqual('application/test', file_item.content_type)
resp_headers = dict(file_item.conn.response.getheaders())
expected_headers = {}
for k, v in put_headers.items():
if k.lower() in resp_headers:
expected_headers[k] = v
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
extra_hdrs = {'X-Object-Meta-Extra': 'fresh'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(file_item.etag, obj['hash'])
self.assertEqual(file_item.content_type, obj['content_type'])
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
expected_metadata = dict(metadata)
# new metadata should be merged with existing
expected_metadata['extra'] = 'fresh'
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# repeat copy with updated content-type, content-encoding and
# content-disposition, which should get updated
extra_hdrs = {
'X-Object-Meta-Extra': 'fresher',
'Content-Type': 'application/test-changed',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
self.assertIn(dest_filename, cont.files())
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
expected_metadata['extra'] = 'fresher'
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
# if k is in expected_headers then we can assert its new value
for k, v in expected_headers.items():
v = extra_hdrs.get(k, v)
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(file_item.etag, obj['hash'])
self.assertEqual(
'application/test-changed', obj['content_type'])
# repeat copy with X-Fresh-Metadata header - existing user
# metadata should not be copied, new completely replaces it.
extra_hdrs = {'Content-Type': 'application/test-updated',
'X-Object-Meta-Extra': 'fresher',
'X-Fresh-Metadata': 'true'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
self.assertIn(dest_filename, cont.files())
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
self.assertEqual('application/test-updated',
file_copy.content_type)
expected_metadata = {'extra': 'fresher'}
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
for k in ('Content-Disposition', 'Content-Encoding'):
self.assertNotIn(k.lower(), resp_headers)
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(file_item.etag, obj['hash'])
self.assertEqual(
'application/test-updated', obj['content_type'])
def testCopyRange(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {Utils.create_ascii_name(): Utils.create_name()}
data = file_item.write_random(1024)
file_item.sync_metadata(metadata)
file_item.initialize()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
expected_body = data[100:201]
expected_etag = hashlib.md5(expected_body)
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item.copy('%s%s' % (prefix, cont), dest_filename,
hdrs={'Range': 'bytes=100-200'})
self.assertEqual(201, file_item.conn.response.status)
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(101, obj['bytes'])
self.assertEqual(expected_etag.hexdigest(), obj['hash'])
self.assertEqual(file_item.content_type, obj['content_type'])
# verify copy object
copy_file_item = cont.file(dest_filename)
self.assertEqual(expected_body, copy_file_item.read())
self.assertTrue(copy_file_item.initialize())
self.assertEqual(metadata, copy_file_item.metadata)
def testCopyAccount(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {Utils.create_ascii_name(): Utils.create_name()}
data = file_item.write_random()
file_item.sync_metadata(metadata)
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
acct = self.env.conn.account_name
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, cont),
dest_filename)
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
dest_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
acct = self.env.conn2.account_name
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, dest_cont),
dest_filename)
self.assertIn(dest_filename, dest_cont.files())
file_item = dest_cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopy404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
self.assertFalse(file_item.copy(
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
Utils.create_name()))
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertFalse(file_item.copy(
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
self.assert_status(404)
self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
Utils.create_name()))
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
self.assertFalse(file_item.copy(
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
def testCopyAccount404s(self):
acct = self.env.conn.account_name
acct2 = self.env.conn2.account_name
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl,
'X-Container-Read': self.env.conn.user_acl
}))
for acct, cont in ((acct, dest_cont), (acct2, dest_cont2)):
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name()))
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, cont),
Utils.create_name()))
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
self.assertFalse(file_item.copy_account(
acct,
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name()))
if acct == acct2:
# there is no such destination container
# and foreign user can have no permission to write there
self.assert_status(403)
else:
self.assert_status(404)
def testCopyNoDestinationHeader(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
file_item = self.env.container.file(source_filename)
self.assertFalse(file_item.copy(Utils.create_name(),
Utils.create_name(),
cfg={'no_destination': True}))
self.assert_status(412)
def testCopyDestinationSlashProblems(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
# no slash
self.assertFalse(file_item.copy(Utils.create_name(),
Utils.create_name(),
cfg={'destination': Utils.create_name()}))
self.assert_status(412)
# too many slashes
self.assertFalse(file_item.copy(Utils.create_name(),
Utils.create_name(),
cfg={'destination': '//%s' % Utils.create_name()}))
self.assert_status(412)
def testCopyFromHeader(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = cont.file(dest_filename)
file_item.write(hdrs={'X-Copy-From': '%s%s/%s' % (
prefix, self.env.container.name, source_filename)})
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopyFromAccountHeader(self):
acct = self.env.conn.account_name
src_cont = self.env.account.container(Utils.create_name())
self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
for cont in (src_cont, dest_cont, dest_cont2):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = cont.file(dest_filename)
file_item.write(hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' % (
prefix,
src_cont.name,
source_filename)})
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopyFromHeader404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
for prefix in ('', '/'):
# invalid source container
file_item = self.env.container.file(Utils.create_name())
copy_from = ('%s%s/%s'
% (prefix, Utils.create_name(), source_filename))
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid source object
copy_from = ('%s%s/%s'
% (prefix, self.env.container.name,
Utils.create_name()))
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
copy_from = ('%s%s/%s'
% (prefix, self.env.container.name, source_filename))
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
def testCopyFromAccountHeader404s(self):
acct = self.env.conn2.account_name
src_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
Utils.create_name(),
source_filename)})
# looks like cached responses leak "not found"
# to un-authorized users, not going to fix it now, but...
self.assert_status([403, 404])
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
Utils.create_name())})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
source_filename)})
self.assert_status(404)
def testNameLimit(self):
limit = load_constraint('max_object_name_length')
for l in (1, 10, limit / 2, limit - 1, limit, limit + 1, limit * 2):
file_item = self.env.container.file('a' * l)
if l <= limit:
self.assertTrue(file_item.write())
self.assert_status(201)
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
def testQuestionMarkInName(self):
if Utils.create_name == Utils.create_ascii_name:
file_name = list(Utils.create_name())
file_name[random.randint(2, len(file_name) - 2)] = '?'
file_name = "".join(file_name)
else:
file_name = Utils.create_name(6) + '?' + Utils.create_name(6)
file_item = self.env.container.file(file_name)
self.assertTrue(file_item.write(cfg={'no_path_quote': True}))
self.assertNotIn(file_name, self.env.container.files())
self.assertIn(file_name.split('?')[0], self.env.container.files())
def testDeleteThen404s(self):
file_item = self.env.container.file(Utils.create_name())
self.assertTrue(file_item.write_random())
self.assert_status(201)
self.assertTrue(file_item.delete())
self.assert_status(204)
file_item.metadata = {Utils.create_ascii_name(): Utils.create_name()}
for method in (file_item.info,
file_item.read,
file_item.sync_metadata,
file_item.delete):
self.assertRaises(ResponseError, method)
self.assert_status(404)
def testBlankMetadataName(self):
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = {'': Utils.create_name()}
self.assertRaises(ResponseError, file_item.write_random)
self.assert_status(400)
def testMetadataNumberLimit(self):
number_limit = load_constraint('max_meta_count')
size_limit = load_constraint('max_meta_overall_size')
for i in (number_limit - 10, number_limit - 1, number_limit,
number_limit + 1, number_limit + 10, number_limit + 100):
j = size_limit / (i * 2)
metadata = {}
while len(metadata.keys()) < i:
key = Utils.create_ascii_name()
val = Utils.create_name()
if len(key) > j:
key = key[:j]
val = val[:j]
metadata[key] = val
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
if i <= number_limit:
self.assertTrue(file_item.write())
self.assert_status(201)
self.assertTrue(file_item.sync_metadata())
self.assert_status((201, 202))
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(400)
def testContentTypeGuessing(self):
file_types = {'wav': 'audio/x-wav', 'txt': 'text/plain',
'zip': 'application/zip'}
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
for i in file_types.keys():
file_item = container.file(Utils.create_name() + '.' + i)
file_item.write('', cfg={'no_content_type': True})
file_types_read = {}
for i in container.files(parms={'format': 'json'}):
file_types_read[i['name'].split('.')[1]] = i['content_type']
self.assertEqual(file_types, file_types_read)
def testRangedGets(self):
# We set the file_length to a strange multiple here. This is to check
# that ranges still work in the EC case when the requested range
# spans EC segment boundaries. The 1 MiB base value is chosen because
# that's a common EC segment size. The 1.33 multiple is to ensure we
# aren't aligned on segment boundaries
file_length = int(1048576 * 1.33)
range_size = file_length / 10
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d' % (i, i + range_size - 1)
hdrs = {'Range': range_string}
self.assertEqual(
data[i: i + range_size], file_item.read(hdrs=hdrs),
range_string)
range_string = 'bytes=-%d' % (i)
hdrs = {'Range': range_string}
if i == 0:
# RFC 2616 14.35.1
# "If a syntactically valid byte-range-set includes ... at
# least one suffix-byte-range-spec with a NON-ZERO
# suffix-length, then the byte-range-set is satisfiable.
# Otherwise, the byte-range-set is unsatisfiable.
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
else:
self.assertEqual(file_item.read(hdrs=hdrs), data[-i:])
self.assert_header('content-range', 'bytes %d-%d/%d' % (
file_length - i, file_length - 1, file_length))
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-' % (i)
hdrs = {'Range': range_string}
self.assertEqual(
file_item.read(hdrs=hdrs), data[i - file_length:],
range_string)
range_string = 'bytes=%d-%d' % (file_length + 1000, file_length + 2000)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-%d' % (file_length - 1000, file_length + 2000)
hdrs = {'Range': range_string}
self.assertEqual(file_item.read(hdrs=hdrs), data[-1000:], range_string)
hdrs = {'Range': '0-4'}
self.assertEqual(file_item.read(hdrs=hdrs), data, '0-4')
# RFC 2616 14.35.1
# "If the entity is shorter than the specified suffix-length, the
# entire entity-body is used."
range_string = 'bytes=-%d' % (file_length + 10)
hdrs = {'Range': range_string}
self.assertEqual(file_item.read(hdrs=hdrs), data, range_string)
def testMultiRangeGets(self):
file_length = 10000
range_size = file_length / 10
subrange_size = range_size / 10
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(
file_length, hdrs={"Content-Type":
"lovecraft/rugose; squamous=true"})
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
i, i + subrange_size - 1,
i + 2 * subrange_size, i + 3 * subrange_size - 1,
i + 4 * subrange_size, i + 5 * subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
self.assertTrue(content_type.startswith("multipart/byteranges"))
self.assertIsNone(file_item.content_range)
# email.parser.FeedParser wants a message with headers on the
# front, then two CRLFs, and then a body (like emails have but
# HTTP response bodies don't). We fake it out by constructing a
# one-header preamble containing just the Content-Type, then
# feeding in the response body.
parser = email.parser.FeedParser()
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 3)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (i, i + subrange_size - 1, file_length))
self.assertEqual(
byteranges[0].get_payload(),
data[i:(i + subrange_size)])
self.assertEqual(byteranges[1]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[1]['Content-Range'],
"bytes %d-%d/%d" % (i + 2 * subrange_size,
i + 3 * subrange_size - 1, file_length))
self.assertEqual(
byteranges[1].get_payload(),
data[(i + 2 * subrange_size):(i + 3 * subrange_size)])
self.assertEqual(byteranges[2]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[2]['Content-Range'],
"bytes %d-%d/%d" % (i + 4 * subrange_size,
i + 5 * subrange_size - 1, file_length))
self.assertEqual(
byteranges[2].get_payload(),
data[(i + 4 * subrange_size):(i + 5 * subrange_size)])
# The first two ranges are satisfiable but the third is not; the
# result is a multipart/byteranges response containing only the two
# satisfiable byteranges.
range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
0, subrange_size - 1,
2 * subrange_size, 3 * subrange_size - 1,
file_length, file_length + subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
self.assertTrue(content_type.startswith("multipart/byteranges"))
self.assertIsNone(file_item.content_range)
parser = email.parser.FeedParser()
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 2)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
self.assertEqual(byteranges[1]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[1]['Content-Range'],
"bytes %d-%d/%d" % (2 * subrange_size, 3 * subrange_size - 1,
file_length))
self.assertEqual(
byteranges[1].get_payload(),
data[(2 * subrange_size):(3 * subrange_size)])
# The first range is satisfiable but the second is not; the
# result is either a multipart/byteranges response containing one
# byterange or a normal, non-MIME 206 response.
range_string = 'bytes=%d-%d,%d-%d' % (
0, subrange_size - 1,
file_length, file_length + subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
if content_type.startswith("multipart/byteranges"):
self.assertIsNone(file_item.content_range)
parser = email.parser.FeedParser()
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 1)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(byteranges[0].get_payload(), data[:subrange_size])
else:
self.assertEqual(
file_item.content_range,
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(content_type, "lovecraft/rugose; squamous=true")
self.assertEqual(fetched, data[:subrange_size])
# No byterange is satisfiable, so we get a 416 response.
range_string = 'bytes=%d-%d,%d-%d' % (
file_length, file_length + 2,
file_length + 100, file_length + 102)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
def testRangedGetsWithLWSinHeader(self):
file_length = 10000
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)
for r in ('BYTES=0-999', 'bytes = 0-999', 'BYTES = 0 - 999',
'bytes = 0 - 999', 'bytes=0 - 999', 'bytes=0-999 '):
self.assertEqual(file_item.read(hdrs={'Range': r}), data[0:1000])
def testFileSizeLimit(self):
limit = load_constraint('max_file_size')
tsecs = 3
def timeout(seconds, method, *args, **kwargs):
try:
with eventlet.Timeout(seconds):
method(*args, **kwargs)
except eventlet.Timeout:
return True
else:
return False
# This loop will result in fallocate calls for 4x the limit
# (minus 111 bytes). With fallocate turned on in the object servers,
# this may fail if you don't have 4x the limit available on your
# data drives.
# Note that this test does not actually send any data to the system.
# All it does is ensure that a response (success or failure) comes
# back within 3 seconds. For the successful tests (size smaller
# than limit), the cluster will log a 499.
for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1,
limit + 10, limit + 100):
file_item = self.env.container.file(Utils.create_name())
if i <= limit:
self.assertTrue(timeout(tsecs, file_item.write,
cfg={'set_content_length': i}))
else:
self.assertRaises(ResponseError, timeout, tsecs,
file_item.write,
cfg={'set_content_length': i})
def testNoContentLengthForPut(self):
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write, 'testing',
cfg={'no_content_length': True})
self.assert_status(411)
def testDelete(self):
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(self.env.file_size)
self.assertIn(file_item.name, self.env.container.files())
self.assertTrue(file_item.delete())
self.assertNotIn(file_item.name, self.env.container.files())
def testBadHeaders(self):
file_length = 100
# no content type on puts should be ok
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(file_length, cfg={'no_content_type': True})
self.assert_status(201)
# content length x
self.assertRaises(ResponseError, file_item.write_random, file_length,
hdrs={'Content-Length': 'X'},
cfg={'no_content_length': True})
self.assert_status(400)
# no content-length
self.assertRaises(ResponseError, file_item.write_random, file_length,
cfg={'no_content_length': True})
self.assert_status(411)
self.assertRaises(ResponseError, file_item.write_random, file_length,
hdrs={'transfer-encoding': 'gzip,chunked'},
cfg={'no_content_length': True})
self.assert_status(501)
# bad request types
# for req in ('LICK', 'GETorHEAD_base', 'container_info',
# 'best_response'):
for req in ('LICK', 'GETorHEAD_base'):
self.env.account.conn.make_request(req)
self.assert_status(405)
# bad range headers
self.assertEqual(
len(file_item.read(hdrs={'Range': 'parsecs=8-12'})),
file_length)
self.assert_status(200)
def testMetadataLengthLimits(self):
key_limit = load_constraint('max_meta_name_length')
value_limit = load_constraint('max_meta_value_length')
lengths = [[key_limit, value_limit], [key_limit, value_limit + 1],
[key_limit + 1, value_limit], [key_limit, 0],
[key_limit, value_limit * 10],
[key_limit * 10, value_limit]]
for l in lengths:
metadata = {'a' * l[0]: 'b' * l[1]}
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
if l[0] <= key_limit and l[1] <= value_limit:
self.assertTrue(file_item.write())
self.assert_status(201)
self.assertTrue(file_item.sync_metadata())
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(400)
def testEtagWayoff(self):
file_item = self.env.container.file(Utils.create_name())
hdrs = {'etag': 'reallylonganddefinitelynotavalidetagvalue'}
self.assertRaises(ResponseError, file_item.write_random, hdrs=hdrs)
self.assert_status(422)
def testFileCreate(self):
for i in range(10):
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random()
self.assert_status(201)
self.assertEqual(data, file_item.read())
self.assert_status(200)
def testHead(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
file_item.write_random(self.env.file_size)
md5 = file_item.md5
file_item = self.env.container.file(file_name)
info = file_item.info()
self.assert_status(200)
self.assertEqual(info['content_length'], self.env.file_size)
self.assertEqual(info['etag'], md5)
self.assertEqual(info['content_type'], content_type)
self.assertIn('last_modified', info)
def testDeleteOfFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.delete)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.delete)
self.assert_status(404)
def testHeadOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.info)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.info)
self.assert_status(404)
def testMetadataOnPost(self):
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(self.env.file_size)
for i in range(10):
metadata = {}
for j in range(10):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
self.assertTrue(file_item.sync_metadata())
self.assert_status((201, 202))
file_item = self.env.container.file(file_item.name)
self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
def testGetContentType(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
file_item.write_random()
file_item = self.env.container.file(file_name)
file_item.read()
self.assertEqual(content_type, file_item.content_type)
def testGetOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.read)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.read)
self.assert_status(404)
def testPostOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
file_item.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
file_item.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(404)
def testMetadataOnPut(self):
for i in range(10):
metadata = {}
for j in range(10):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
file_item.write_random(self.env.file_size)
file_item = self.env.container.file(file_item.name)
self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
def testSerialization(self):
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
files = []
for i in (0, 1, 10, 100, 1000, 10000):
files.append({'name': Utils.create_name(),
'content_type': Utils.create_name(), 'bytes': i})
write_time = time.time()
for f in files:
file_item = container.file(f['name'])
file_item.content_type = f['content_type']
file_item.write_random(f['bytes'])
f['hash'] = file_item.md5
f['json'] = False
f['xml'] = False
write_time = time.time() - write_time
for format_type in ['json', 'xml']:
for file_item in container.files(parms={'format': format_type}):
found = False
for f in files:
if f['name'] != file_item['name']:
continue
self.assertEqual(file_item['content_type'],
f['content_type'])
self.assertEqual(int(file_item['bytes']), f['bytes'])
d = datetime.strptime(
file_item['last_modified'].split('.')[0],
"%Y-%m-%dT%H:%M:%S")
lm = time.mktime(d.timetuple())
if 'last_modified' in f:
self.assertEqual(f['last_modified'], lm)
else:
f['last_modified'] = lm
f[format_type] = True
found = True
self.assertTrue(
found, 'Unexpected file %s found in '
'%s listing' % (file_item['name'], format_type))
headers = dict(self.env.conn.response.getheaders())
if format_type == 'json':
self.assertEqual(headers['content-type'],
'application/json; charset=utf-8')
elif format_type == 'xml':
self.assertEqual(headers['content-type'],
'application/xml; charset=utf-8')
lm_diff = max([f['last_modified'] for f in files]) -\
min([f['last_modified'] for f in files])
self.assertLess(lm_diff, write_time + 1,
'Diff in last modified times '
'should be less than time to write files')
for f in files:
for format_type in ['json', 'xml']:
self.assertTrue(
f[format_type], 'File %s not found in %s listing'
% (f['name'], format_type))
def testStackedOverwrite(self):
file_item = self.env.container.file(Utils.create_name())
for i in range(1, 11):
data = file_item.write_random(512)
file_item.write(data)
self.assertEqual(file_item.read(), data)
def testTooLongName(self):
file_item = self.env.container.file('x' * 1025)
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
def testZeroByteFile(self):
file_item = self.env.container.file(Utils.create_name())
self.assertTrue(file_item.write(''))
self.assertIn(file_item.name, self.env.container.files())
self.assertEqual(file_item.read(), '')
def testEtagResponse(self):
file_item = self.env.container.file(Utils.create_name())
data = six.StringIO(file_item.write_random(512))
etag = File.compute_md5sum(data)
headers = dict(self.env.conn.response.getheaders())
self.assertIn('etag', headers.keys())
header_etag = headers['etag'].strip('"')
self.assertEqual(etag, header_etag)
def testChunkedPut(self):
if (tf.web_front_end == 'apache2'):
raise SkipTest("Chunked PUT can only be tested with apache2 web"
" front end")
def chunks(s, length=3):
i, j = 0, length
while i < len(s):
yield s[i:j]
i, j = j, j + length
data = File.random_data(10000)
etag = File.compute_md5sum(data)
for i in (1, 10, 100, 1000):
file_item = self.env.container.file(Utils.create_name())
for j in chunks(data, i):
file_item.chunked_write(j)
self.assertTrue(file_item.chunked_write())
self.assertEqual(data, file_item.read())
info = file_item.info()
self.assertEqual(etag, info['etag'])
def test_POST(self):
# verify consistency between object and container listing metadata
file_name = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = 'text/foobar'
file_item.write_random(1024)
# sanity check
file_item = self.env.container.file(file_name)
file_item.initialize()
self.assertEqual('text/foobar', file_item.content_type)
self.assertEqual(1024, file_item.size)
etag = file_item.etag
# check container listing is consistent
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_name:
break
else:
self.fail('Failed to find file %r in listing' % file_name)
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('text/foobar', f_dict['content_type'])
self.assertEqual(etag, f_dict['hash'])
# now POST updated content-type to each file
file_item = self.env.container.file(file_name)
file_item.content_type = 'image/foobarbaz'
file_item.sync_metadata({'Test': 'blah'})
# sanity check object metadata
file_item = self.env.container.file(file_name)
file_item.initialize()
self.assertEqual(1024, file_item.size)
self.assertEqual('image/foobarbaz', file_item.content_type)
self.assertEqual(etag, file_item.etag)
self.assertIn('test', file_item.metadata)
# check for consistency between object and container listing
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_name:
break
else:
self.fail('Failed to find file %r in listing' % file_name)
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('image/foobarbaz', f_dict['content_type'])
self.assertEqual(etag, f_dict['hash'])
class TestFileUTF8(Base2, TestFile):
pass
class TestFileComparisonEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestFileComparisonEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 20
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file_item = cls.container.file(Utils.create_name())
file_item.write_random(cls.file_size)
cls.files.append(file_item)
cls.time_old_f1 = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(time.time() - 86400))
cls.time_old_f2 = time.strftime("%A, %d-%b-%y %H:%M:%S GMT",
time.gmtime(time.time() - 86400))
cls.time_old_f3 = time.strftime("%a %b %d %H:%M:%S %Y",
time.gmtime(time.time() - 86400))
cls.time_new = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(time.time() + 86400))
class TestFileComparison(Base):
env = TestFileComparisonEnv
def testIfMatch(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
def testIfMatchMultipleEtags(self):
for file_item in self.env.files:
hdrs = {'If-Match': '"bogus1", "%s", "bogus2"' % file_item.md5}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
hdrs = {'If-None-Match': 'bogus'}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-None-Match': file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfNoneMatchMultipleEtags(self):
for file_item in self.env.files:
hdrs = {'If-None-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-None-Match':
'"bogus1", "bogus2", "%s"' % file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Modified-Since': self.env.time_old_f1}
self.assertTrue(file_item.read(hdrs=hdrs))
self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(304)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Unmodified-Since': self.env.time_new}
self.assertTrue(file_item.read(hdrs=hdrs))
self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_new}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus',
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_header('etag', file_item.md5)
def testLastModified(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
resp = file_item.write_random_return_resp(self.env.file_size)
put_last_modified = resp.getheader('last-modified')
etag = file_item.md5
file_item = self.env.container.file(file_name)
info = file_item.info()
self.assertIn('last_modified', info)
last_modified = info['last_modified']
self.assertEqual(put_last_modified, info['last_modified'])
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_header('etag', etag)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
self.assertTrue(file_item.read(hdrs=hdrs))
class TestFileComparisonUTF8(Base2, TestFileComparison):
pass
class TestServiceToken(unittest2.TestCase):
def setUp(self):
if tf.skip_service_tokens:
raise SkipTest
self.SET_TO_USERS_TOKEN = 1
self.SET_TO_SERVICE_TOKEN = 2
# keystoneauth and tempauth differ in allowing PUT account
# Even if keystoneauth allows it, the proxy-server uses
# allow_account_management to decide if accounts can be created
self.put_account_expect = is_client_error
if tf.swift_test_auth_version != '1':
if cluster_info.get('swift').get('allow_account_management'):
self.put_account_expect = is_success
def _scenario_generator(self):
paths = ((None, None), ('c', None), ('c', 'o'))
for path in paths:
for method in ('PUT', 'POST', 'HEAD', 'GET', 'OPTIONS'):
yield method, path[0], path[1]
for path in reversed(paths):
yield 'DELETE', path[0], path[1]
def _assert_is_authed_response(self, method, container, object, resp):
resp.read()
expect = is_success
if method == 'DELETE' and not container:
expect = is_client_error
if method == 'PUT' and not container:
expect = self.put_account_expect
self.assertTrue(expect(resp.status), 'Unexpected %s for %s %s %s'
% (resp.status, method, container, object))
def _assert_not_authed_response(self, method, container, object, resp):
resp.read()
expect = is_client_error
if method == 'OPTIONS':
expect = is_success
self.assertTrue(expect(resp.status), 'Unexpected %s for %s %s %s'
% (resp.status, method, container, object))
def prepare_request(self, method, use_service_account=False,
container=None, obj=None, body=None, headers=None,
x_auth_token=None,
x_service_token=None, dbg=False):
"""
Setup for making the request
When retry() calls the do_request() function, it calls it the
test user's token, the parsed path, a connection and (optionally)
a token from the test service user. We save options here so that
do_request() can make the appropriate request.
:param method: The operation (e.g. 'HEAD')
:param use_service_account: Optional. Set True to change the path to
be the service account
:param container: Optional. Adds a container name to the path
:param obj: Optional. Adds an object name to the path
:param body: Optional. Adds a body (string) in the request
:param headers: Optional. Adds additional headers.
:param x_auth_token: Optional. Default is SET_TO_USERS_TOKEN. One of:
SET_TO_USERS_TOKEN Put the test user's token in
X-Auth-Token
SET_TO_SERVICE_TOKEN Put the service token in X-Auth-Token
:param x_service_token: Optional. Default is to not set X-Service-Token
to any value. If specified, is one of following:
SET_TO_USERS_TOKEN Put the test user's token in
X-Service-Token
SET_TO_SERVICE_TOKEN Put the service token in
X-Service-Token
:param dbg: Optional. Set true to check request arguments
"""
self.method = method
self.use_service_account = use_service_account
self.container = container
self.obj = obj
self.body = body
self.headers = headers
if x_auth_token:
self.x_auth_token = x_auth_token
else:
self.x_auth_token = self.SET_TO_USERS_TOKEN
self.x_service_token = x_service_token
self.dbg = dbg
def do_request(self, url, token, parsed, conn, service_token=''):
if self.use_service_account:
path = self._service_account(parsed.path)
else:
path = parsed.path
if self.container:
path += '/%s' % self.container
if self.obj:
path += '/%s' % self.obj
headers = {}
if self.body:
headers.update({'Content-Length': len(self.body)})
if self.x_auth_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Auth-Token': token})
elif self.x_auth_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Auth-Token': service_token})
if self.x_service_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Service-Token': token})
elif self.x_service_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Service-Token': service_token})
if self.dbg:
print('DEBUG: conn.request: method:%s path:%s'
' body:%s headers:%s' % (self.method, path, self.body,
headers))
conn.request(self.method, path, self.body, headers=headers)
return check_response(conn)
def _service_account(self, path):
parts = path.split('/', 3)
account = parts[2]
try:
project_id = account[account.index('_') + 1:]
except ValueError:
project_id = account
parts[2] = '%s%s' % (tf.swift_test_service_prefix, project_id)
return '/'.join(parts)
def test_user_access_own_auth_account(self):
# This covers ground tested elsewhere (tests a user doing HEAD
# on own account). However, if this fails, none of the remaining
# tests will work
self.prepare_request('HEAD')
resp = retry(self.do_request)
resp.read()
self.assertIn(resp.status, (200, 204))
def test_user_cannot_access_service_account(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj)
resp = retry(self.do_request)
self._assert_not_authed_response(method, container, obj, resp)
def test_service_user_denied_with_x_auth_token(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_not_authed_response(method, container, obj, resp)
def test_service_user_denied_with_x_service_token(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_SERVICE_TOKEN,
x_service_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_not_authed_response(method, container, obj, resp)
def test_user_plus_service_can_access_service_account(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_USERS_TOKEN,
x_service_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_is_authed_response(method, container, obj, resp)
if __name__ == '__main__':
unittest2.main()
| {
"content_hash": "0a87ca9db8f3ef90771b2a0b89410ec7",
"timestamp": "",
"source": "github",
"line_count": 2866,
"max_line_length": 79,
"avg_line_length": 40.76587578506629,
"alnum_prop": 0.55547567081782,
"repo_name": "clayg/swift",
"id": "4af65418dda651da934d1115f9537ad4dd3e14a9",
"size": "117451",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/functional/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "248"
},
{
"name": "PHP",
"bytes": "377"
},
{
"name": "Python",
"bytes": "8555598"
},
{
"name": "Shell",
"bytes": "1804"
}
],
"symlink_target": ""
} |
print """
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! !!
!! Flatten files of current folder, recursively. !!
!! .eg. moves all files under all sub-folders to !!
!! current folder. !!
!! !!
!! NOTICE! this might MESS UP your files! !!
!! !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
"""
secret = raw_input("Enter 123abc to continue:")
if secret != "123abc":
print "you Entered " + secret, ", WRONG answer!"
print "Aborted!!!"
import sys
sys.exit()
import os
from os.path import join, getsize
for root, dirs, files in os.walk('.'):
if root == ".": continue
for file in files:
cmd = 'mv "' + join(root,file) + '" "' + file + '"'
print cmd
os.system(cmd)
for dir in os.listdir('.'):
if not os.path.isdir(dir): continue
cmd = "rm -rf " + dir
print cmd
os.system(cmd)
| {
"content_hash": "cbb44e4c0de617edef756fc9c0e4cef5",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 59,
"avg_line_length": 32.029411764705884,
"alnum_prop": 0.3948576675849403,
"repo_name": "realfun/handyscripts",
"id": "1972b80604939f620156f7f1a4fbf74f258562b1",
"size": "1137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flat.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5404"
},
{
"name": "Shell",
"bytes": "343"
}
],
"symlink_target": ""
} |
import nltk
import re
from nltk.stem.snowball import SnowballStemmer
from nltk.corpus import stopwords
def identificaTitulo(cedula, tituloAnterior):
titulo=cedula[0][1:len(cedula[0])]
#fiz esse tamanho minimo para ele nao pegar nem um lixo, e como o menor nome de capitulo eh JON, nao teremos problema
if titulo.upper()==titulo and len(titulo)>2:
return cedula[0]
else:
return tituloAnterior
def melhoraListaFrases(listaFrasesRuim):
listaFrasesBoa = []
for frase in listaFrasesRuim:
for fraseBoa in frase.split('\n'):
if fraseBoa != '':
#if fraseBoa[-1] != '.':
#fraseBoa += '.' #coloca ponto final onde estiver faltando
listaFrasesBoa.append(fraseBoa)
return listaFrasesBoa
def criaListaTags (listaFrasesBoa):
#obtem os tokens, separados por frase
tokensPorFrase = []
for frase in listaFrasesBoa:
tokensPorFrase.append(nltk.word_tokenize(frase))
#gera uma lista com todos os tokens do texto
tokensGeral = []
for frase in tokensPorFrase:
for token in frase:
tokensGeral.append(token)
#obtem a classificacao gramatica das palavras
listaTagsRuim = nltk.pos_tag(tokensGeral)
#gera uma lista de classificacoes mais facil de mexer
listaTagsBoa = []
for par in listaTagsRuim:
listaTagsBoa.append([str(par[0]),str(par[1])])
return listaTagsBoa
def GeraVetorDaRNSP(matrizFull, nomeDoArquivoTre, nomeDoArquivoTes):
listaPalavrasUnicas=[]
matrizDeEntrada=[]
#constroi a wordlist do texto (usando o primitivo das palavras)
for frase in matrizFull:
for palavra in frase:
if palavra not in listaPalavrasUnicas:
listaPalavrasUnicas.append(palavra)
#print listaPalavrasUnicas
#constroi a matriz de entrada da RNSP (binaria)
for frase in matrizFull:
palavrasNaFrase=[0]*len(listaPalavrasUnicas)
for palavra in frase:
index=listaPalavrasUnicas.index(palavra)
palavrasNaFrase[index]=1
matrizDeEntrada.append(palavrasNaFrase)
##DEBUGER
# for frase in matrizDeEntrada:
# print frase
###TENHO Q VER COMO ESCREVER A MATRIZ EM ARQUIVO E DONEEEEE
#entrar com esses dados na RNSP em C++ -> como eu devo montar o arquivo para darmos de entrada?
#90% sera usada para treino
#10% sera usada para teste
qtdFrases=len(matrizDeEntrada)
#print qtdFrases
limite=int(0.9*qtdFrases)
#print limite
cont=0
while cont<limite:
print matrizDeEntrada[cont]
cont+=1
# #gera arquivo com listas binarias dos dados de treino (falta criar o arquivo com as labels)
# treino=open(nomeDoArquivoTre,'w')
# cont=0
# while cont<limite:
# treino.write(matrizDeEntrada[cont])
# cont+=1
# treino.close()
#
# #gera arquivo com listas binarias dos dados de teste (falta criar o arquivo com as labels)
# teste=open(nomeDoArquivoTes,'w')
# while cont<qtdFrases:
# teste.write(matrizDeEntrada[cont])
# cont+=1
# teste.close()
def GeraLabels(capCompleto, frasesSelecionadas):
vetorLabels=[]
for frase in capCompleto:
if frase in frasesSelecionadas:
vetorLabels.append('1')
else:
vetorLabels.append('0')
#print vetorLabels
#usar NLTK para tokenizar o testo, retirar stopwords e fazer Stemming
############MAIN##############
#abre o arquivo
#arquivo = open('base de dados concatenada.txt')
arquivoEntrada = open('dados/TodasAsFrases.txt')
#le o arquivo
raw=arquivoEntrada.read()
#obtem o segmentador de sentencas
sent_tokenizer=nltk.data.load("tokenizers/punkt/english.pickle")
#separa as frases do texto
listaFrasesRuim = sent_tokenizer.tokenize(raw)
#melhora a lista de frases
listaFrasesBoa = melhoraListaFrases(listaFrasesRuim)
#cria a lista de duplas com classificacao das palavras
listaTagsBoa=criaListaTags(listaFrasesBoa)
#troca as labels de algumas palavras para seguir regras parecidas com o outro programa
prepLugarList=['above', 'across', 'after', 'against', 'along', 'among',
'around', 'at', 'behind', 'below', 'beside', 'between', 'beyond the',
'close to', 'down', 'from', 'in front of',
'inside', 'in', 'into', 'near', 'next to', 'off', 'on',
'onto', 'opposite', 'out of', 'outside', 'over',
'past', 'round', 'through', 'to', 'towards', 'under', 'up']
familialist=['of the', 'from the']
blacklist=['in','on','at', 'beyond', 'as', '[',']', 'and', 'are', 'for', 'from','while','with',
'under', 'to', 'does','into','is','again','angrily',
'was','then','that','during','tells','takes','calls',
'back','if','before','through','by','about','atop', 'until', 'since']
pessoaInicio = ['king', 'queen', 'prince', 'princess', 'lord', 'lady', 'ser', 'commander', 'Young']
cont=0
while cont < len(listaTagsBoa):
if listaTagsBoa[cont][1]=='NNP' or listaTagsBoa[cont][1]=='NNPS':
listaTagsBoa[cont][1]='ENOMEADA'
#estou fazendo isso pois no final de cada frase de personagem temos ',' ao infez de um '.'
elif listaTagsBoa[cont][1]==',':
if (cont < (len(listaTagsBoa)-1)) and (listaTagsBoa[cont+1][1]=="''"): #essas aspas "''" aparecem como fecha aspas no formato pdf
listaTagsBoa[cont][1]='PONTOFALA'
cont+=1
for word in listaTagsBoa:
if (word[0].lower() in blacklist) or (word[0].lower() in prepLugarList) or (word[0].lower() in familialist):
word[1]='LNEGRA'
if (word[0].lower() in pessoaInicio):
word[1]='TITULO'
#gera a matriz de frases
fraseTemp = []
matrizDeFrases = []
cont=0
titulo=''
while cont < len(listaTagsBoa):
titulo=identificaTitulo(listaTagsBoa[cont], titulo)
#pegarei de frase em frase para analisar
if listaTagsBoa[cont][1] != '.' and listaTagsBoa[cont][0]!=titulo:
fraseTemp.append(listaTagsBoa[cont])
else:
if listaTagsBoa[cont][0]!=titulo:
fraseTemp.append(listaTagsBoa[cont])
#isso ira fazer com que todas as falas sejam classificadas do inicio o " ate o fim do "
if cont<(len(listaTagsBoa)-1) and listaTagsBoa[cont+1][1]=="''":
cont+=1
fraseTemp.append(listaTagsBoa[cont])
#se entrar aqui significa que eh um fim de frase, e o programa ira pontuar, para saber se a frase eh um modelo de sumario
matrizDeFrases.append(fraseTemp)
fraseTemp=[]
cont+=1
#GERA O ARQUIVO DE LABELS
frasesSelecionadas = open('dados/FrasesSelecionadas.txt').read()
vec=frasesSelecionadas.split('\n')
# #USEI ISSO PARA FORMATAR O ARQUIVO ORIGINAL
# oi=sent_tokenizer.tokenize(frasesSelecionadas)
# tokensPorFrase = []
# for frase in oi:
# tokensPorFrase.append(nltk.word_tokenize(frase))
# for frase in tokensPorFrase:
# for el in frase:
# print el,
# print
todasAsFrases = open('dados/TodasAsFrases.txt').read()
vecT=todasAsFrases.split('\n')
#the last element is null, so i will remove
del vecT[-1]
GeraLabels(vecT,vec)
#steaming e retirada de stopwords
stemmer = SnowballStemmer("english")
stop = set(stopwords.words('english'))
myStopList = [ '``', ',', "''", '.' ]
matrizDeFrasesFinalTag = []
matrizDeFrasesFinalPalavra = []
for frase in matrizDeFrases:
#ira compor a matriz de tags para a entrada na RNSP
fraseSemStopTag=[]
#ira compor a matriz de palavras para a entrada na RNSP
fraseSemStopPalavra=[]
for elemento in frase:
elemento[0]=stemmer.stem(elemento[0])
if (elemento[0] not in stop) and (elemento[1] not in myStopList):
fraseSemStopTag.append(elemento[1])
fraseSemStopPalavra.append(elemento[0])
#essa sera a matriz que sera binarizada com tags
matrizDeFrasesFinalTag.append(fraseSemStopTag)
#essa sera a matriz que sera binarizada com palavras
matrizDeFrasesFinalPalavra.append(fraseSemStopPalavra)
###DEBUGER###
# for frase in matrizDeFrasesFinalPalavra:
# for elemento in frase:
# print elemento,
# print
GeraVetorDaRNSP(matrizDeFrasesFinalPalavra, '/home/rgaio/Desktop/treino-palavras', '/home/rgaio/Desktop/teste-palavras')
GeraVetorDaRNSP(matrizDeFrasesFinalTag, '/home/rgaio/Desktop/treino-tags', '/home/rgaio/Desktop/teste-tags')
| {
"content_hash": "a9822a162d7176c4c1ced593be0d5c42",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 131,
"avg_line_length": 31.097165991902834,
"alnum_prop": 0.7212602525712798,
"repo_name": "jonnyguio/GOT-WiSARD",
"id": "80f6ae63c594301496e82247842003b875f22a0c",
"size": "7682",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "RNSP-got.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "10345"
},
{
"name": "Makefile",
"bytes": "1248"
},
{
"name": "Python",
"bytes": "8752"
},
{
"name": "Shell",
"bytes": "1782"
}
],
"symlink_target": ""
} |
from unittest.mock import patch
from django.test import TestCase
from waldur_core.core import utils as core_utils
from waldur_core.structure import models as structure_models
from waldur_core.structure.tests import factories as structure_factories
from waldur_openstack.openstack import apps
from .. import factories
@patch('waldur_core.core.tasks.BackendMethodTask.delay')
class SshKeysHandlersTest(TestCase):
def setUp(self):
self.user = structure_factories.UserFactory()
self.ssh_key = structure_factories.SshPublicKeyFactory(user=self.user)
self.tenant = factories.TenantFactory()
def test_ssh_key_will_be_removed_if_user_lost_connection_to_tenant(
self, mocked_task_call
):
project = self.tenant.project
project.add_user(self.user, structure_models.ProjectRole.ADMINISTRATOR)
project.remove_user(self.user)
serialized_tenant = core_utils.serialize_instance(self.tenant)
mocked_task_call.assert_called_once_with(
serialized_tenant,
'remove_ssh_key_from_tenant',
self.ssh_key.name,
self.ssh_key.fingerprint,
)
def test_ssh_key_will_not_be_removed_if_user_still_has_connection_to_tenant(
self, mocked_task_call
):
project = self.tenant.project
project.add_user(self.user, structure_models.ProjectRole.ADMINISTRATOR)
project.customer.add_user(self.user, structure_models.CustomerRole.OWNER)
project.remove_user(self.user)
self.assertEqual(mocked_task_call.call_count, 0)
def test_ssh_key_will_be_deleted_from_tenant_on_user_deletion(
self, mocked_task_call
):
project = self.tenant.project
project.add_user(self.user, structure_models.ProjectRole.ADMINISTRATOR)
self.user.delete()
serialized_tenant = core_utils.serialize_instance(self.tenant)
mocked_task_call.assert_called_once_with(
serialized_tenant,
'remove_ssh_key_from_tenant',
self.ssh_key.name,
self.ssh_key.fingerprint,
)
def test_ssh_key_will_be_deleted_from_tenant_on_ssh_key_deletion(
self, mocked_task_call
):
project = self.tenant.project
project.add_user(self.user, structure_models.ProjectRole.ADMINISTRATOR)
self.ssh_key.delete()
serialized_tenant = core_utils.serialize_instance(self.tenant)
mocked_task_call.assert_called_once_with(
serialized_tenant,
'remove_ssh_key_from_tenant',
self.ssh_key.name,
self.ssh_key.fingerprint,
)
class LogTenantQuotaUpdateTest(TestCase):
@patch('waldur_openstack.openstack.handlers.event_logger')
def test_logger_called_on_quota_limit_update(self, logger_mock):
tenant = factories.TenantFactory()
quota = tenant.quotas.get(name='vcpu')
old_limit = quota.limit
quota.limit = old_limit + 1
quota.save()
logger_mock.openstack_tenant_quota.info.assert_called_once_with(
'{quota_name} quota limit has been changed from %s to %s for tenant {tenant_name}.'
% (int(old_limit), int(quota.limit)),
event_type='openstack_tenant_quota_limit_updated',
event_context={
'quota': quota,
'tenant': tenant,
'limit': float(quota.limit),
'old_limit': float(old_limit),
},
)
@patch('waldur_openstack.openstack.handlers.event_logger')
def test_vcpu_limit_quota_update_logged_as_integer(self, logger_mock):
tenant = factories.TenantFactory()
quota = tenant.quotas.get(name='vcpu')
old_limit = quota.limit
quota.limit = 12.00
quota.save()
logger_mock.openstack_tenant_quota.info.assert_called_once_with(
'{quota_name} quota limit has been changed from %s to 12 for tenant {tenant_name}.'
% int(old_limit),
event_type='openstack_tenant_quota_limit_updated',
event_context={
'quota': quota,
'tenant': tenant,
'limit': float(quota.limit),
'old_limit': float(old_limit),
},
)
@patch('waldur_openstack.openstack.handlers.event_logger')
def test_ram_limit_quota_update_logged_with_units(self, logger_mock):
tenant = factories.TenantFactory()
quota = tenant.quotas.get(name='ram')
old_limit = quota.limit
quota.limit = 63 * 1024
quota.save()
logger_mock.openstack_tenant_quota.info.assert_called_once_with(
'{quota_name} quota limit has been changed from %s GB to 63 GB for tenant {tenant_name}.'
% int(old_limit / 1024),
event_type='openstack_tenant_quota_limit_updated',
event_context={
'quota': quota,
'tenant': tenant,
'limit': float(quota.limit),
'old_limit': float(old_limit),
},
)
class UpdateServiceSettingsNameHandlerTest(TestCase):
def test_settings_name_is_update_when_tenant_is_renamed(self):
tenant = factories.TenantFactory()
service_settings = structure_factories.ServiceSettingsFactory(
scope=tenant, name=tenant.name, type=apps.OpenStackConfig.service_name
)
tenant.name = 'new name'
tenant.save()
service_settings.refresh_from_db()
self.assertEqual(service_settings.name, tenant.name)
| {
"content_hash": "f13f7c7b93fe03c0fa50087fa05e8fe4",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 101,
"avg_line_length": 36.57236842105263,
"alnum_prop": 0.6292498650836481,
"repo_name": "opennode/nodeconductor-assembly-waldur",
"id": "0a6d4096aabaa8858421384f7f46578d566f7c27",
"size": "5559",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/waldur_openstack/openstack/tests/unittests/test_handlers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1624"
},
{
"name": "Python",
"bytes": "412263"
},
{
"name": "Shell",
"bytes": "2031"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dcim', '0136_device_airflow'),
('virtualization', '0023_virtualmachine_natural_ordering'),
]
operations = [
migrations.AlterField(
model_name='cluster',
name='name',
field=models.CharField(max_length=100),
),
migrations.AlterUniqueTogether(
name='cluster',
unique_together={('site', 'name'), ('group', 'name')},
),
]
| {
"content_hash": "cb1e6b3d07b191630f0d0f8c1a0adbb8",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 67,
"avg_line_length": 25.904761904761905,
"alnum_prop": 0.5588235294117647,
"repo_name": "digitalocean/netbox",
"id": "5ff214d295880c06348c9c38ef2ea939af665231",
"size": "544",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "netbox/virtualization/migrations/0024_cluster_relax_uniqueness.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189339"
},
{
"name": "HTML",
"bytes": "570800"
},
{
"name": "JavaScript",
"bytes": "326125"
},
{
"name": "Python",
"bytes": "1815170"
},
{
"name": "Shell",
"bytes": "2786"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Snippet'
db.create_table('snippet_snippet', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('code', self.gf('django.db.models.fields.TextField')()),
('description', self.gf('django.db.models.fields.TextField')()),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('lexer', self.gf('django.db.models.fields.TextField')()),
('key', self.gf('django.db.models.fields.TextField')()),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('public', self.gf('django.db.models.fields.BooleanField')(default=False)),
('tags', self.gf('tagging.fields.TagField')(default='')),
))
db.send_create_signal('snippet', ['Snippet'])
# Adding model 'Referer'
db.create_table('snippet_referer', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('url', self.gf('django.db.models.fields.URLField')(max_length=200)),
('snippet', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['snippet.Snippet'])),
))
db.send_create_signal('snippet', ['Referer'])
def backwards(self, orm):
# Deleting model 'Snippet'
db.delete_table('snippet_snippet')
# Deleting model 'Referer'
db.delete_table('snippet_referer')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'snippet.referer': {
'Meta': {'object_name': 'Referer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'snippet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['snippet.Snippet']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'snippet.snippet': {
'Meta': {'object_name': 'Snippet'},
'code': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.TextField', [], {}),
'lexer': ('django.db.models.fields.TextField', [], {}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'tags': ('tagging.fields.TagField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['snippet']
| {
"content_hash": "1f654b57fd0ac9fb973e57fab7427ac8",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 163,
"avg_line_length": 61.23762376237624,
"alnum_prop": 0.5535974130962005,
"repo_name": "nicksergeant/snipt-old",
"id": "8ae0b8232434a5d7e18e2a19c825a4af87933b91",
"size": "6203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snippet/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "40716"
},
{
"name": "Perl",
"bytes": "73"
},
{
"name": "Python",
"bytes": "126248"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
import unittest
from pbxproj.XcodeProject import *
class ProjectGroupsTest(unittest.TestCase):
def setUp(self):
self.obj = {
'objects': {
'root': {'isa': 'PBXGroup', 'children': ['1', '1p']},
'1': {'isa': 'PBXGroup', 'name': 'root', 'children': ['2', '3']},
'2': {'isa': 'PBXGroup', 'name': 'app', 'children': []},
'3': {'isa': 'PBXGroup', 'name': 'app', 'children': []},
'4': {'isa': 'PBXGroup', 'name': 'root', 'children': ['5', '6']},
'5': {'isa': 'PBXGroup', 'name': 'app', 'children': []},
'6': {'isa': 'PBXGroup', 'name': 'app', 'children': []},
'1p': {'isa': 'PBXGroup', 'name': 'root', 'children': ['2p', '3p']},
'2p': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': []},
'3p': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': []},
'4p': {'isa': 'PBXGroup', 'name': 'root', 'children': ['5p', '6p']},
'5p': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': []},
'6p': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': []},
'broken': {'isa': 'PBXGroup', 'name': 'broken', 'path': '..', 'children': ['broken1']},
'broken1': {'isa': 'PBXGroup', 'name': 'b1', 'path': '..', 'children': ['broken2']},
'a': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': ['b']},
'b': {'isa': 'PBXGroup', 'name': 'app', 'path': '..', 'children': ['c']},
'c': {'isa': 'PBXFileReference', 'name': 'app'},
},
}
def testInit(self):
with self.assertRaisesRegexp(EnvironmentError, '^This class cannot be instantiated directly'):
ProjectGroups()
def testGetGroupsByNameNoParent(self):
project = XcodeProject(self.obj)
groups = project.get_groups_by_name('app')
self.assertIn(project.objects['2'], groups)
self.assertIn(project.objects['3'], groups)
self.assertIn(project.objects['5'], groups)
self.assertIn(project.objects['6'], groups)
def testGetGroupsByNameFromParent(self):
project = XcodeProject(self.obj)
groups = project.get_groups_by_name('app', parent=project.objects['1'])
self.assertIn(project.objects['2'], groups)
self.assertIn(project.objects['3'], groups)
self.assertNotIn(project.objects['5'], groups)
self.assertNotIn(project.objects['6'], groups)
def testGetGroupsByPathNoParent(self):
project = XcodeProject(self.obj)
groups = project.get_groups_by_path('..')
self.assertIn(project.objects['2p'], groups)
self.assertIn(project.objects['3p'], groups)
self.assertIn(project.objects['5p'], groups)
self.assertIn(project.objects['6p'], groups)
def testGetGroupsByPathFromParent(self):
project = XcodeProject(self.obj)
groups = project.get_groups_by_path('..', parent=project.objects['1p'])
self.assertIn(project.objects['2p'], groups)
self.assertIn(project.objects['3p'], groups)
self.assertNotIn(project.objects['5p'], groups)
self.assertNotIn(project.objects['6p'], groups)
def testAddGroupNoParent(self):
project = XcodeProject(self.obj)
group = project.add_group("my_group")
self.assertTrue(project.objects['root'].has_child(group))
def testAddGroupToParent(self):
project = XcodeProject(self.obj)
group = project.add_group("my_group", parent=project.objects['1'])
self.assertTrue(project.objects['1'].has_child(group))
def testRemoveByIdNotFound(self):
project = XcodeProject(self.obj)
self.assertFalse(project.remove_group_by_id('xxx'))
def testRemoveByIdRecursive(self):
project = XcodeProject(self.obj)
group1 = project.objects['1']
result = project.remove_group_by_id('1', recursive=True)
self.assertTrue(result)
self.assertFalse(project.objects['root'].has_child(group1))
self.assertIsNone(project.objects['1'])
self.assertIsNone(project.objects['2'])
self.assertIsNone(project.objects['3'])
def testRemoveByIdNonRecursive(self):
project = XcodeProject(self.obj)
group = project.objects['1']
result = project.remove_group_by_id('1', recursive=False)
self.assertTrue(result)
self.assertFalse(project.objects['root'].has_child(group))
self.assertIsNone(project.objects['1'])
self.assertIsNotNone(project.objects['2'])
self.assertIsNotNone(project.objects['3'])
def testRemoveByNameNotFound(self):
project = XcodeProject(self.obj)
self.assertFalse(project.remove_group_by_name('xxx'))
def testRemoveByNameRecursive(self):
project = XcodeProject(self.obj)
group1 = project.objects['1']
group1p = project.objects['1p']
result = project.remove_group_by_name('root', recursive=True)
self.assertTrue(result)
self.assertFalse(project.objects['root'].has_child(group1))
self.assertFalse(project.objects['root'].has_child(group1p))
self.assertIsNone(project.objects['1'])
self.assertIsNone(project.objects['2'])
self.assertIsNone(project.objects['3'])
self.assertIsNone(project.objects['1p'])
self.assertIsNone(project.objects['2p'])
self.assertIsNone(project.objects['3p'])
def testRemoveByNameNonRecursive(self):
project = XcodeProject(self.obj)
group1 = project.objects['1']
group1p = project.objects['1p']
result = project.remove_group_by_name('root', recursive=False)
self.assertTrue(result)
self.assertFalse(project.objects['root'].has_child(group1))
self.assertFalse(project.objects['root'].has_child(group1p))
self.assertIsNone(project.objects['1'])
self.assertIsNotNone(project.objects['2'])
self.assertIsNotNone(project.objects['3'])
self.assertIsNone(project.objects['1p'])
self.assertIsNotNone(project.objects['2p'])
self.assertIsNotNone(project.objects['3p'])
def testRemoveByIdRecursivelyWithFiles(self):
project = XcodeProject(self.obj)
result = project.remove_group_by_id('a')
self.assertTrue(result)
self.assertIsNone(project.objects['a'])
self.assertIsNone(project.objects['b'])
self.assertIsNone(project.objects['c'])
def testRemoveBrokenGroups(self):
project = XcodeProject(self.obj)
result = project.remove_group_by_id('broken')
self.assertFalse(result)
def testRemoveBrokenGroupsByName(self):
project = XcodeProject(self.obj)
result = project.remove_group_by_name('broken')
self.assertFalse(result)
def testGetOrCreateGroupNoName(self):
project = XcodeProject(self.obj)
group = project.get_or_create_group(None)
self.assertIsNone(group)
def testGetOrCreateGroupNotFound(self):
project = XcodeProject(self.obj)
group = project.get_or_create_group('whatever')
self.assertIsNotNone(group)
self.assertNotIn(group.get_id(), self.obj['objects'])
def testGetOrCreateGroupFound(self):
project = XcodeProject(self.obj)
group = project.get_or_create_group('root')
self.assertIsNotNone(group)
self.assertIn(group.get_id(), self.obj['objects'])
def testGetParentGroupCreateDefault(self):
project = XcodeProject({'objects': {}})
group = project._get_parent_group(None)
self.assertIsNotNone(group)
self.assertEqual(project.objects[group.get_id()], group)
def testGetParentGroupFromMainGroup(self):
project = XcodeProject(
{
'objects': {
'project': {'isa': 'PBXProject', 'mainGroup': 'group'},
'group': {'isa': 'PBXGroup', 'name': 'group1'}
},
'rootObject': 'project'
})
group = project._get_parent_group(None)
self.assertIsNotNone(group)
self.assertEqual(project.objects[project.objects['project'].mainGroup], group)
def testGetParentGroupWithID(self):
project = XcodeProject(self.obj)
parent = project._get_parent_group('5p')
self.assertEqual(parent, project.objects['5p'])
| {
"content_hash": "d957878aff24dba2c352684c0d913d37",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 103,
"avg_line_length": 40.15094339622642,
"alnum_prop": 0.5977443609022557,
"repo_name": "dayongxie/mod-pbxproj",
"id": "c9e9b813f6419141e800caa1a20e8e6c145b7849",
"size": "8512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/pbxextensions/TestProjectGroups.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "62008"
}
],
"symlink_target": ""
} |
import datetime
from django import http
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django_openstack import api
from django_openstack import utils
from django_openstack.dash.views.floating_ips import FloatingIpAssociate
from django_openstack.tests.view_tests import base
from mox import IsA, IgnoreArg
from novaclient import exceptions as novaclient_exceptions
class FloatingIpViewTests(base.BaseViewTests):
def setUp(self):
super(FloatingIpViewTests, self).setUp()
server = self.mox.CreateMock(api.Server)
server.id = 1
server.name = 'serverName'
self.server = server
self.servers = (server, )
floating_ip = self.mox.CreateMock(api.FloatingIp)
floating_ip.id = 1
floating_ip.fixed_ip = '10.0.0.4'
floating_ip.instance_id = 1
floating_ip.ip = '58.58.58.58'
self.floating_ip = floating_ip
self.floating_ips = [floating_ip, ]
def test_index(self):
self.mox.StubOutWithMock(api, 'tenant_floating_ip_list')
api.tenant_floating_ip_list(IsA(http.HttpRequest)).\
AndReturn(self.floating_ips)
self.mox.ReplayAll()
res = self.client.get(reverse('dash_floating_ips',
args=[self.TEST_TENANT]))
self.assertTemplateUsed(res,
'django_openstack/dash/floating_ips/index.html')
self.assertItemsEqual(res.context['floating_ips'], self.floating_ips)
self.mox.VerifyAll()
def test_associate(self):
self.mox.StubOutWithMock(api, 'server_list')
api.server_list = self.mox.CreateMockAnything()
api.server_list(IsA(http.HttpRequest)).AndReturn(self.servers)
self.mox.StubOutWithMock(api, 'tenant_floating_ip_get')
api.tenant_floating_ip_get = self.mox.CreateMockAnything()
api.tenant_floating_ip_get(IsA(http.HttpRequest), str(1)).\
AndReturn(self.floating_ip)
self.mox.ReplayAll()
res = self.client.get(reverse('dash_floating_ips_associate',
args=[self.TEST_TENANT, 1]))
self.assertTemplateUsed(res,
'django_openstack/dash/floating_ips/associate.html')
self.mox.VerifyAll()
def test_associate_post(self):
server = self.server
self.mox.StubOutWithMock(api, 'server_list')
api.server_list = self.mox.CreateMockAnything()
api.server_list(IsA(http.HttpRequest)).AndReturn(self.servers)
self.mox.StubOutWithMock(api, 'tenant_floating_ip_list')
api.tenant_floating_ip_list(IsA(http.HttpRequest)).\
AndReturn(self.floating_ips)
self.mox.StubOutWithMock(api, 'server_add_floating_ip')
api.server_add_floating_ip = self.mox.CreateMockAnything()
api.server_add_floating_ip(IsA(http.HttpRequest), IsA(unicode),
IsA(unicode)).\
AndReturn(None)
self.mox.StubOutWithMock(messages, 'info')
messages.info(IsA(http.HttpRequest), IsA(unicode))
self.mox.StubOutWithMock(api, 'tenant_floating_ip_get')
api.tenant_floating_ip_get = self.mox.CreateMockAnything()
api.tenant_floating_ip_get(IsA(http.HttpRequest), str(1)).\
AndReturn(self.floating_ip)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_floating_ips_associate',
args=[self.TEST_TENANT, 1]),
{'instance_id': 1,
'floating_ip_id': self.floating_ip.id,
'floating_ip': self.floating_ip.ip,
'method': 'FloatingIpAssociate'})
self.assertRedirects(res, reverse('dash_floating_ips',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_associate_post_with_exception(self):
server = self.server
self.mox.StubOutWithMock(api, 'server_list')
api.server_list = self.mox.CreateMockAnything()
api.server_list(IsA(http.HttpRequest)).AndReturn(self.servers)
self.mox.StubOutWithMock(api, 'tenant_floating_ip_list')
api.tenant_floating_ip_list(IsA(http.HttpRequest)).\
AndReturn(self.floating_ips)
self.mox.StubOutWithMock(api, 'server_add_floating_ip')
api.server_add_floating_ip = self.mox.CreateMockAnything()
exception = novaclient_exceptions.ClientException('ClientException',
message='clientException')
api.server_add_floating_ip(IsA(http.HttpRequest), IsA(unicode),
IsA(unicode)).\
AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.StubOutWithMock(api, 'tenant_floating_ip_get')
api.tenant_floating_ip_get = self.mox.CreateMockAnything()
api.tenant_floating_ip_get(IsA(http.HttpRequest), IsA(unicode)).\
AndReturn(self.floating_ip)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_floating_ips_associate',
args=[self.TEST_TENANT, 1]),
{'instance_id': 1,
'floating_ip_id': self.floating_ip.id,
'floating_ip': self.floating_ip.ip,
'method': 'FloatingIpAssociate'})
self.assertRaises(novaclient_exceptions.ClientException)
self.assertRedirects(res, reverse('dash_floating_ips',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_disassociate(self):
res = self.client.get(reverse('dash_floating_ips_disassociate',
args=[self.TEST_TENANT, 1]))
self.assertTemplateUsed(res,
'django_openstack/dash/floating_ips/associate.html')
self.mox.VerifyAll()
def test_disassociate_post(self):
self.mox.StubOutWithMock(api, 'tenant_floating_ip_list')
api.tenant_floating_ip_list(IsA(http.HttpRequest)).\
AndReturn(self.floating_ips)
self.mox.StubOutWithMock(api, 'server_remove_floating_ip')
api.server_remove_floating_ip = self.mox.CreateMockAnything()
api.server_remove_floating_ip(IsA(http.HttpRequest), IsA(int),
IsA(int)).\
AndReturn(None)
self.mox.StubOutWithMock(messages, 'info')
messages.info(IsA(http.HttpRequest), IsA(unicode))
self.mox.StubOutWithMock(api, 'tenant_floating_ip_get')
api.tenant_floating_ip_get = self.mox.CreateMockAnything()
api.tenant_floating_ip_get(IsA(http.HttpRequest), IsA(unicode)).\
AndReturn(self.floating_ip)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_floating_ips_disassociate',
args=[self.TEST_TENANT, 1]),
{'floating_ip_id': self.floating_ip.id,
'method': 'FloatingIpDisassociate'})
self.assertRedirects(res, reverse('dash_floating_ips',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_disassociate_post_with_exception(self):
self.mox.StubOutWithMock(api, 'tenant_floating_ip_list')
api.tenant_floating_ip_list(IsA(http.HttpRequest)).\
AndReturn(self.floating_ips)
self.mox.StubOutWithMock(api, 'server_remove_floating_ip')
exception = novaclient_exceptions.ClientException('ClientException',
message='clientException')
api.server_remove_floating_ip(IsA(http.HttpRequest), IsA(int),
IsA(int)).\
AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.StubOutWithMock(api, 'tenant_floating_ip_get')
api.tenant_floating_ip_get = self.mox.CreateMockAnything()
api.tenant_floating_ip_get(IsA(http.HttpRequest), IsA(unicode)).\
AndReturn(self.floating_ip)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_floating_ips_disassociate',
args=[self.TEST_TENANT, 1]),
{'floating_ip_id': self.floating_ip.id,
'method': 'FloatingIpDisassociate'})
self.assertRaises(novaclient_exceptions.ClientException)
self.assertRedirects(res, reverse('dash_floating_ips',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
| {
"content_hash": "831f557cb7371332e4e81c010b98551e",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 80,
"avg_line_length": 47.2970297029703,
"alnum_prop": 0.5576721791919614,
"repo_name": "ntt-pf-lab/horizon",
"id": "cd926ca1112eaf4f839b6c76f76b04be5a755bea",
"size": "10363",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "django-openstack/django_openstack/tests/view_tests/dash/floating_ip_tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "108904"
},
{
"name": "Python",
"bytes": "448080"
},
{
"name": "Shell",
"bytes": "4828"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
import mosql
with open('README.rst', 'rb') as f:
README = f.read()
# We want README to be a str, no matter it is byte or text. 'rb' reads bytes,
# so we need extra conversion on Python 3. On Python 2 bytes is synonym to str,
# and we're good.
if not isinstance(README, str):
README = README.decode('utf-8')
setup(
name='mosql',
version=mosql.__version__,
description='Build SQL with native Python data structure smoothly.',
long_description=README,
author='Mosky',
author_email='mosky.tw@gmail.com',
url='http://mosql.mosky.tw/',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=find_packages(exclude=['oldtests']),
zip_safe=True,
)
| {
"content_hash": "348f29b0f526ac341afee702d07a4583",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 79,
"avg_line_length": 32.09756097560975,
"alnum_prop": 0.6268996960486323,
"repo_name": "uranusjr/mosql",
"id": "3d7e7e80c3bdce7cd3ca164309e99aee796eb0e0",
"size": "1363",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "219"
},
{
"name": "Makefile",
"bytes": "315"
},
{
"name": "Python",
"bytes": "142614"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/intangible/ship/shared_prototype_tiefighter_pcd.iff"
result.attribute_template_id = 8
result.stfName("space_item_n","tiefighter_pcd")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "d6601c9683f60e235a7ef052a326265e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 24.46153846153846,
"alnum_prop": 0.7075471698113207,
"repo_name": "obi-two/Rebelion",
"id": "118e9d76155331c962c757f7ee08b9fee7251384",
"size": "463",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/intangible/ship/shared_prototype_tiefighter_pcd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
} |
try:
import json
except ImportError:
import simplejson as json
from Base import materializerBase
class materializer(materializerBase):
"""
"""
def buildCapture(self):
"""
Builds the deployment capture base on input.
"""
stepsCount = 2
nodeCount = 2
nodeNamePrefix = 'gluster'
capture = self.createCaptureTemplate()
capture['preDeleteNodesScript']['sections'].append(self.createPreDeleteNodesScript(self.createNodelist(nodeNamePrefix, nodeCount, False),
'dhclient -v -r -lf /var/lib/dhclient/dhclient-eth0.leases -B -d -H `hostname` eth0; poweroff -f'))
nodes = capture['installNodes']['nodes']
for i in range(0,nodeCount):
node = self.createNode(self.createVmName(nodeNamePrefix + str(i), False))
node['postConfiguration'] = True
nodes.append(node)
steps = capture['installModules']
for i in range(0, stepsCount):
step = dict()
modules = list()
if i == 0:
gluster = self.createModule('gluster',
'3.4.2.1',
self.createNodelist(nodeNamePrefix, nodeCount, False)
)
modules.append(gluster)
elif i == 1:
gluster_configure = self.createModule('gluster_configure',
'3.4.2.1',
self.createNodelist(nodeNamePrefix, nodeCount, False),
file=self.createFile([],
self.createAdditionalValues(['glusterfs::peers'],
[';;'.join(self.createNodelist(nodeNamePrefix, nodeCount, False))]))
)
modules.append(gluster_configure)
step['modules'] = modules
steps['step' + str(i)] = step
exposeAccessPoints = capture['exposeAccessPoints']
accessPoints = []
for i in range(0,nodeCount):
nodeName = self.createVmName(nodeNamePrefix + str(i), False)
accessPoints.append(self.createAccessPoint('glusterFS-' + str(i), '****'))
exposeAccessPoints['accessPoints'] = accessPoints
print(json.dumps(capture,indent=2))
def main():
"""
"""
m = materializer()
m.parseInput()
m.buildCapture()
if __name__ == '__main__':
main()
#print(result)
| {
"content_hash": "cb6c75a6f01311955801910d0bd1baa7",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 163,
"avg_line_length": 29.802197802197803,
"alnum_prop": 0.49336283185840707,
"repo_name": "foundation-runtime/orchestration",
"id": "34a0a995a7be1ef17bf657de70e87fca6889ed2b",
"size": "2712",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/rpm/products/Gluster-3.4.2.1/materializer/materializerExample.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Augeas",
"bytes": "2190"
},
{
"name": "CSS",
"bytes": "10024"
},
{
"name": "JavaScript",
"bytes": "30701"
},
{
"name": "Pascal",
"bytes": "1080"
},
{
"name": "Puppet",
"bytes": "7634"
},
{
"name": "Python",
"bytes": "14256"
},
{
"name": "Ruby",
"bytes": "206266"
},
{
"name": "Scala",
"bytes": "390380"
},
{
"name": "Shell",
"bytes": "25374"
}
],
"symlink_target": ""
} |
import hf
from sqlalchemy import *
from urllib2 import Request, urlopen, URLError
from decimal import *
import json
class DDM(hf.module.ModuleBase):
config_keys = {
'site_name': ('Site name', 'GOEGRID'),
'cloud': ('Cloud', 'DE'),
'time_interval': ('Time interval in minutes', '120'),
'url_destination_space_tokens': ('URL for the destination space tokens', 'local||http://dashb-atlas-data.cern.ch/dashboard/request.py/matrix.json?activity=0&activity=1&activity=2&activity=3&activity=4&activity=5&activity=6&activity=7&src_grouping=cloud&dst_cloud=%%22%s%%22&dst_site=%%22%s%%22&dst_grouping=cloud&dst_grouping=site&dst_grouping=token&interval=%s'),
'url_source_space_tokens': ('URL for the source space tokens', 'local||http://dashb-atlas-data.cern.ch/dashboard/request.py/matrix.json?activity=0&activity=1&activity=2&activity=3&activity=4&activity=5&activity=6&activity=7&src_cloud=%%22%s%%22&src_site=%%22%s%%22&src_grouping=cloud&src_grouping=site&src_grouping=token&dst_grouping=cloud&interval=%s'),
'url_destination_failed_transfers': ('URL for failed transfers from destination', 'http://dashb-atlas-data.cern.ch/dashboard/request.py/details.json?activity=0&activity=1&activity=2&activity=3&activity=4&activity=5&activity=6&activity=7&dst_cloud=%%22%s%%22&dst_site=%%22%s%%22&state=FAILED_TRANSFER&error_code=&offset=0&limit=1000&from_date=%sT%s%%3A%s%%3A%s&to_date=%sT%s%%3A%s%%3A%s'),
'url_source_failed_transfers': ('URL for failed transfers from source', 'http://dashb-atlas-data.cern.ch/dashboard/request.py/details.json?activity=0&activity=1&activity=2&activity=3&activity=4&activity=5&activity=6&activity=7&state=FAILED_TRANSFER&error_code=&offset=0&limit=1000&src_cloud=%%22%s%%22&src_site=%%22%s%%22&from_date=%sT%s%%3A%s%%3A%s&to_date=%sT%s%%3A%s%%3A%s'),
'destination_warning_threshold': ('Below this efficiency for destination transfers, the status of the module will be warning (ok, if above).', '0.8'),
'source_warning_threshold': ('Below this efficiency for destination transfers, the status of the module will be critical.', '0.8'),
'destination_critical_threshold': ('Below this efficiency for source transfers, the status of the module will be warning (ok, if above).', '0.5'),
'source_critical_threshold': ('Below this efficiency for source transfers, the status of the module will be critical.', '0.5'),
}
config_hint = 'Adjust the parameters site_name, cloud, and time_interval to your needs, as well as the thresholds for different statuses.'
table_columns = [
Column('site_name', TEXT),
Column('cloud', TEXT),
Column('time_interval', INT),
Column('url_destination_space_tokens', TEXT),
Column('url_source_space_tokens', TEXT),
Column('url_source_space_tokens', TEXT),
Column('url_destination_failed_transfers', TEXT),
Column('url_source_failed_transfers', TEXT),
Column('destination_successful_transfers_total', INT),
Column('source_successful_transfers_total', INT),
Column('destination_failed_transfers_total', INT),
Column('source_failed_transfers_total', INT),
Column('destination_throughput_total', INT),
Column('source_throughput_total', INT),
Column('destination_failures_total', INT),
Column('source_failures_total', INT),
Column('destination_efficiency_total', FLOAT),
Column('source_efficiency_total', FLOAT),
], []
subtable_columns = {
'destination_details_table': ([
Column('token', TEXT),
Column('successful', INT),
Column('failed', INT),
Column('failed_reason_destination', INT),
Column('throughput', INT),
Column('efficiency', FLOAT),
], []),
'source_details_table': ([
Column('token', TEXT),
Column('successful', INT),
Column('failed', INT),
Column('failed_reason_source', INT),
Column('throughput', INT),
Column('efficiency', FLOAT),
], []),
}
def prepareAcquisition(self):
self.source_url = 'www.google.com'
self.site_name = self.config['site_name']
self.cloud = self.config['cloud']
self.time_interval = self.config['time_interval']
self.url_destination_space_tokens = self.config['url_destination_space_tokens']%(str(self.cloud),str(self.site_name),self.time_interval)
self.url_source_space_tokens = self.config['url_source_space_tokens']%(str(self.cloud),str(self.site_name),self.time_interval)
self.url_destination_failed_transfers = self.config['url_destination_failed_transfers']
self.url_source_failed_transfers = self.config['url_source_failed_transfers']
# prepare downloads
self.source_destination_space_tokens = hf.downloadService.addDownload(self.url_destination_space_tokens)
self.source_source_space_tokens = hf.downloadService.addDownload(self.url_source_space_tokens)
self.destination_details_table_db_value_list = []
self.source_details_table_db_value_list = []
def extractData(self):
data = {
'site_name': self.site_name,
'cloud': self.cloud,
'time_interval': int(self.time_interval),
'url_destination_space_tokens': self.url_destination_space_tokens,
'url_source_space_tokens': self.url_source_space_tokens,
}
# read the downloaded files
content_destination_space_tokens = open(self.source_destination_space_tokens.getTmpPath()).read()
content_source_space_tokens = open(self.source_source_space_tokens.getTmpPath()).read()
# parse the source; due to the fact that some download links are created from other downloaded files, some downloads still have to take place here
ddm_info = ddm_parser(self.cloud,self.site_name,content_destination_space_tokens,content_source_space_tokens,self.url_destination_failed_transfers,self.url_source_failed_transfers)
data['destination_successful_transfers_total'] = ddm_info.destination_successful_transfers_total
data['source_successful_transfers_total'] = ddm_info.source_successful_transfers_total
data['destination_failed_transfers_total'] = ddm_info.destination_failed_transfers_total
data['source_failed_transfers_total'] = ddm_info.source_failed_transfers_total
data['destination_throughput_total'] = ddm_info.destination_throughput_total
data['source_throughput_total'] = ddm_info.source_throughput_total
data['destination_failures_total'] = ddm_info.destination_failures_total
data['source_failures_total'] = ddm_info.source_failures_total
if ddm_info.destination_successful_transfers_total + ddm_info.destination_failed_transfers_total != 0:
data['destination_efficiency_total'] = ddm_info.destination_successful_transfers_total / (ddm_info.destination_successful_transfers_total + ddm_info.destination_failed_transfers_total)
else:
data['destination_efficiency_total'] = 0
if ddm_info.source_successful_transfers_total + ddm_info.source_failed_transfers_total != 0:
data['source_efficiency_total'] = ddm_info.source_successful_transfers_total / (ddm_info.source_successful_transfers_total + ddm_info.source_failed_transfers_total)
else:
data['source_efficiency_total'] = 0
self.destination_details_table_db_value_list = [
{
'token': token,
'successful': (ddm_info.destination_space_tokens[token])['successful'],
'failed': (ddm_info.destination_space_tokens[token])['failed'],
'failed_reason_destination': (ddm_info.destination_space_tokens[token])['failed_reason_destination'],
'throughput': (ddm_info.destination_space_tokens[token])['throughput'],
'efficiency': (ddm_info.destination_space_tokens[token])['efficiency'],
}
for token in ddm_info.destination_space_tokens
]
self.source_details_table_db_value_list = [
{
'token': token,
'successful': (ddm_info.source_space_tokens[token])['successful'],
'failed': (ddm_info.source_space_tokens[token])['failed'],
'failed_reason_source': (ddm_info.source_space_tokens[token])['failed_reason_source'],
'throughput': (ddm_info.source_space_tokens[token])['throughput'],
'efficiency': (ddm_info.source_space_tokens[token])['efficiency'],
}
for token in ddm_info.source_space_tokens
]
data['status'] = 1
for token in ddm_info.destination_space_tokens:
if float(self.config['destination_warning_threshold']) <= (ddm_info.destination_space_tokens[token])['efficiency'] <= 1:
data['status'] = min(data['status'], 1)
elif float(self.config['destination_critical_threshold']) <= (ddm_info.destination_space_tokens[token])['efficiency'] < float(self.config['destination_warning_threshold']):
data['status'] = min(data['status'], 0.5)
elif 0 <= (ddm_info.destination_space_tokens[token])['efficiency'] < float(self.config['destination_critical_threshold']) and (ddm_info.destination_space_tokens[token])['successful'] + (ddm_info.destination_space_tokens[token])['failed'] > 0:
data['status'] = min(data['status'], 0)
else:
data['status'] = min(data['status'], 0)
for token in ddm_info.source_space_tokens:
if float(self.config['source_warning_threshold']) <= (ddm_info.source_space_tokens[token])['efficiency'] <= 1:
data['status'] = min(data['status'], 1)
elif float(self.config['source_critical_threshold']) <= (ddm_info.source_space_tokens[token])['efficiency'] < float(self.config['source_warning_threshold']):
data['status'] = min(data['status'], 0.5)
elif 0 <= (ddm_info.source_space_tokens[token])['efficiency'] < float(self.config['source_critical_threshold']) and (ddm_info.source_space_tokens[token])['successful'] + (ddm_info.source_space_tokens[token])['failed'] > 0:
data['status'] = min(data['status'], 0)
else:
data['status'] = min(data['status'], 0)
return data
def fillSubtables(self, parent_id):
self.subtables['destination_details_table'].insert().execute([dict(parent_id=parent_id, **row) for row in self.destination_details_table_db_value_list])
self.subtables['source_details_table'].insert().execute([dict(parent_id=parent_id, **row) for row in self.source_details_table_db_value_list])
def getTemplateData(self):
data = hf.module.ModuleBase.getTemplateData(self)
destination_details = self.subtables['destination_details_table'].select().where(self.subtables['destination_details_table'].c.parent_id==self.dataset['id']).execute().fetchall()
source_details = self.subtables['source_details_table'].select().where(self.subtables['source_details_table'].c.parent_id==self.dataset['id']).execute().fetchall()
data['destination_warning_threshold'] = self.config['destination_warning_threshold']
data['source_warning_threshold'] = self.config['source_warning_threshold']
data['destination_critical_threshold'] = self.config['destination_critical_threshold']
data['source_critical_threshold'] = self.config['source_critical_threshold']
data['destination_details'] = map(dict, destination_details)
data['source_details'] = map(dict, source_details)
return data
class ddm_parser:
def __init__(self, cloud, site_name, content_destination, content_source, link_destination_failed_transfers, link_source_failed_transfers):
self.cloud = cloud
self.site_name = site_name
self.content_destination = content_destination
self.content_source = content_source
self.link_destination_failed_transfers = link_destination_failed_transfers
self.link_source_failed_transfers = link_source_failed_transfers
self.destination_space_tokens = {}
self.source_space_tokens = {}
self.destination_successful_transfers_total = 0
self.source_successful_transfers_total = 0
self.destination_failed_transfers_total = 0
self.destination_throughput_total = 0
self.source_throughput_total = 0
self.source_failed_transfers_total = 0
self.destination_failures_total = 0
self.source_failures_total = 0
self.parse_destination()
self.parse_source()
def __get_times(self,json_content):
from_time = "n/a"
from_time_dict = {}
to_time = "n/a"
to_time_dict = {}
from_time = json_content['params']['from_date']
from_time_dict['date'] = from_time.split("T")[0]
from_time_dict['hh'] = from_time.split("T")[1].split(":")[0]
from_time_dict['mm'] = from_time.split("T")[1].split(":")[1]
from_time_dict['ss'] = from_time.split("T")[1].split(":")[2]
to_time = json_content['params']['to_date']
to_time_dict['date'] = to_time.split("T")[0]
to_time_dict['hh'] = to_time.split("T")[1].split(":")[0]
to_time_dict['mm'] = to_time.split("T")[1].split(":")[1]
to_time_dict['ss'] = to_time.split("T")[1].split(":")[2]
return from_time_dict, to_time_dict
def parse_destination(self):
content = self.content_destination
json_content = json.loads(content)
from_time, to_time = self.__get_times(json_content)
if from_time == "n/a" or to_time == "n/a":
print "No times defined, please check the __get_times() method"
return "n/a"
else:
for transfer in json_content['transfers']['rows']:
self.destination_successful_transfers_total += float(transfer[5])
self.destination_failed_transfers_total += float(transfer[6])
self.destination_throughput_total += float(transfer[4])
if transfer[2] == self.site_name and transfer[3] in self.destination_space_tokens:
self.destination_space_tokens[transfer[3]] = {'successful': (self.destination_space_tokens[transfer[3]])['successful'] + float(transfer[5]), 'failed': (self.destination_space_tokens[transfer[3]])['failed'] + float(transfer[6]), 'failed_reason_destination': 0, 'throughput': (self.destination_space_tokens[transfer[3]])['throughput'] + float(transfer[4])}
if transfer[2] == self.site_name and transfer[3] not in self.destination_space_tokens:
self.destination_space_tokens[transfer[3]] = {'successful': float(transfer[5]), 'failed': float(transfer[6]), 'failed_reason_destination': 0, 'throughput': float(transfer[4])}
destination_failure_info_link = self.link_destination_failed_transfers%(self.cloud, self.site_name, from_time['date'], from_time['hh'], from_time['mm'], from_time['ss'], to_time['date'], to_time['hh'], to_time['mm'], to_time['ss'])
req = Request(destination_failure_info_link)
try:
response = urlopen(req)
except URLError as e:
if hasattr(e,'reason'):
print "Impossible to reach the server"
print "Reason: ", e.reason
return "n/a"
elif hasattr(e, 'code'):
print "The server couldn't fulfill the request."
print "Error Code: ", e.code
return "n/a"
else:
destination_source = urlopen(destination_failure_info_link)
destination_failures_json_content = json.load(destination_source)
for transfer_detail in destination_failures_json_content['details']:
if "DESTINATION" in str(transfer_detail['transfer_error']):
self.destination_failures_total += 1
else:
continue
for token in self.destination_space_tokens:
if (self.destination_space_tokens[token])['successful'] + (self.destination_space_tokens[token])['failed'] != 0:
(self.destination_space_tokens[token])['efficiency'] = (self.destination_space_tokens[token])['successful'] / ((self.destination_space_tokens[token])['successful'] + (self.destination_space_tokens[token])['failed'])
else:
(self.destination_space_tokens[token])['efficiency'] = 0
destination_failure_info_link_token = self.link_destination_failed_transfers%(self.cloud, self.site_name, from_time['date'], from_time['hh'], from_time['mm'], from_time['ss'], to_time['date'], to_time['hh'], to_time['mm'], to_time['ss'])
destination_failure_info_link_token += '&dst_token="' + token + '"'
req = Request(destination_failure_info_link)
try:
response = urlopen(req)
except URLError as e:
if hasattr(e,'reason'):
print "Impossible to reach the server"
print "Reason: ", e.reason
return "n/a"
elif hasattr(e, 'code'):
print "The server couldn't fulfill the request."
print "Error Code: ", e.code
return "n/a"
else:
destination_source_token = urlopen(destination_failure_info_link_token)
destination_failure_json_content_token = json.load(destination_source_token)
for transfer_detail in destination_failure_json_content_token['details']:
if "DESTINATION" in str(transfer_detail['transfer_error']):
self.destination_space_tokens[token]['failed_reason_destination'] += 1
else:
continue
def parse_source(self):
content = self.content_source
json_content = json.loads(content)
from_time, to_time = self.__get_times(json_content)
if from_time == "n/a" or to_time == "n/a":
print "No times defined, please check the __get_times() method"
return "n/a"
else:
for transfer in json_content['transfers']['rows']:
self.source_successful_transfers_total += float(transfer[5])
self.source_failed_transfers_total += float(transfer[6])
self.source_throughput_total += float(transfer[4])
if transfer[1] == self.site_name and transfer[2] in self.source_space_tokens:
self.source_space_tokens[transfer[2]] = {'successful': (self.source_space_tokens[transfer[2]])['successful'] + float(transfer[5]), 'failed': (self.source_space_tokens[transfer[2]])['failed'] + float(transfer[6]), 'failed_reason_source': 0, 'throughput': (self.source_space_tokens[transfer[2]])['throughput'] + float(transfer[4])}
if transfer[1] == self.site_name and transfer[2] not in self.source_space_tokens:
self.source_space_tokens[transfer[2]] = {'successful': float(transfer[5]), 'failed': float(transfer[6]), 'failed_reason_source': 0, 'throughput': float(transfer[4])}
source_failure_info_link = self.link_source_failed_transfers%(self.cloud, self.site_name, from_time['date'], from_time['hh'], from_time['mm'], from_time['ss'], to_time['date'], to_time['hh'], to_time['mm'], to_time['ss'])
req = Request(source_failure_info_link)
try:
response = urlopen(req)
except URLError as e:
if hasattr(e,'reason'):
print "Impossible to reach the server"
print "Reason: ", e.reason
return "n/a"
elif hasattr(e, 'code'):
print "The server couldn't fulfill the request."
print "Error Code: ", e.code
return "n/a"
else:
source_source = urlopen(source_failure_info_link)
source_failures_json_content = json.load(source_source)
for transfer_detail in source_failures_json_content['details']:
if "SOURCE" in str(transfer_detail['transfer_error']):
self.source_failures_total += 1
else:
continue
for token in self.source_space_tokens:
if (self.source_space_tokens[token])['successful'] + (self.source_space_tokens[token])['failed'] != 0:
(self.source_space_tokens[token])['efficiency'] = (self.source_space_tokens[token])['successful'] / ((self.source_space_tokens[token])['successful'] + (self.source_space_tokens[token])['failed'])
else:
(self.source_space_tokens[token])['efficiency'] = 0
source_failure_info_link_token = self.link_source_failed_transfers%(self.cloud, self.site_name, from_time['date'], from_time['hh'], from_time['mm'], from_time['ss'], to_time['date'], to_time['hh'], to_time['mm'], to_time['ss'])
source_failure_info_link_token += '&src_token="' + token + '"'
req = Request(source_failure_info_link)
try:
response = urlopen(req)
except URLError as e:
if hasattr(e,'reason'):
print "Impossible to reach the server"
print "Reason: ", e.reason
return "n/a"
elif hasattr(e, 'code'):
print "The server couldn't fulfill the request."
print "Error Code: ", e.code
return "n/a"
else:
source_source_token = urlopen(source_failure_info_link_token)
source_failure_json_content_token = json.load(source_source_token)
for transfer_detail in source_failure_json_content_token['details']:
if "SOURCE" in str(transfer_detail['transfer_error']):
self.source_space_tokens[token]['failed_reason_source'] += 1
else:
continue
| {
"content_hash": "216f10b76a090ea0354bac9ddd436e5f",
"timestamp": "",
"source": "github",
"line_count": 349,
"max_line_length": 396,
"avg_line_length": 64.79083094555874,
"alnum_prop": 0.6122412878117813,
"repo_name": "HappyFaceGoettingen/HappyFaceATLASModules",
"id": "5578a5ef1b62e72670745e6f444a6af33ebc477f",
"size": "23324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/DDM.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "65744"
},
{
"name": "Python",
"bytes": "137521"
}
],
"symlink_target": ""
} |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
# with open(os.path.join(here, 'README.md')) as f:
# README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_jinja2',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
'google-api-python-client',
'psycopg2',
'passlib',
]
tests_require = [
'WebTest >= 1.3.1', # py3 compat
'pytest', # includes virtualenv
'pytest-cov',
'pytest-watch',
'tox',
]
setup(name='Elections-R-Us',
version='0.0',
description='Elections-R-Us',
# long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
extras_require={
'testing': tests_require,
},
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = elections_r_us:main
[console_scripts]
init_db = elections_r_us.scripts.initializedb:main
""",
)
| {
"content_hash": "2a7fa9f4690c4b2b822062cb4f328c98",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 65,
"avg_line_length": 24.147540983606557,
"alnum_prop": 0.5661914460285132,
"repo_name": "Elections-R-Us/Elections-R-Us",
"id": "34430253340dec46322431105a13f7502ef81e46",
"size": "1473",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "580052"
},
{
"name": "HTML",
"bytes": "539"
},
{
"name": "JavaScript",
"bytes": "930231"
},
{
"name": "Python",
"bytes": "119264"
},
{
"name": "Shell",
"bytes": "60"
}
],
"symlink_target": ""
} |
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class server(base_resource) :
""" Configuration for server resource. """
def __init__(self) :
self._name = ""
self._ipaddress = ""
self._domain = ""
self._translationip = ""
self._translationmask = ""
self._domainresolveretry = 0
self._state = ""
self._ipv6address = ""
self._comment = ""
self._td = 0
self._domainresolvenow = False
self._delay = 0
self._graceful = ""
self._Internal = False
self._newname = ""
self._statechangetimesec = ""
self._tickssincelaststatechange = 0
self._autoscale = ""
self._customserverid = ""
self._monthreshold = 0
self._maxclient = 0
self._maxreq = 0
self._maxbandwidth = 0
self._usip = ""
self._cka = ""
self._tcpb = ""
self._cmp = ""
self._clttimeout = 0
self._svrtimeout = 0
self._cipheader = ""
self._cip = ""
self._cacheable = ""
self._sc = ""
self._sp = ""
self._downstateflush = ""
self._appflowlog = ""
self._boundtd = 0
self.___count = 0
@property
def name(self) :
ur"""Name for the server.
Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the name is created.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name for the server.
Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.
Can be changed after the name is created.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def ipaddress(self) :
ur"""IPv4 or IPv6 address of the server. If you create an IP address based server, you can specify the name of the server, instead of its IP address, when creating a service. Note: If you do not create a server entry, the server IP address that you enter when you create a service becomes the name of the server.
"""
try :
return self._ipaddress
except Exception as e:
raise e
@ipaddress.setter
def ipaddress(self, ipaddress) :
ur"""IPv4 or IPv6 address of the server. If you create an IP address based server, you can specify the name of the server, instead of its IP address, when creating a service. Note: If you do not create a server entry, the server IP address that you enter when you create a service becomes the name of the server.
"""
try :
self._ipaddress = ipaddress
except Exception as e:
raise e
@property
def domain(self) :
ur"""Domain name of the server. For a domain based configuration, you must create the server first.<br/>Minimum length = 1.
"""
try :
return self._domain
except Exception as e:
raise e
@domain.setter
def domain(self, domain) :
ur"""Domain name of the server. For a domain based configuration, you must create the server first.<br/>Minimum length = 1
"""
try :
self._domain = domain
except Exception as e:
raise e
@property
def translationip(self) :
ur"""IP address used to transform the server's DNS-resolved IP address.
"""
try :
return self._translationip
except Exception as e:
raise e
@translationip.setter
def translationip(self, translationip) :
ur"""IP address used to transform the server's DNS-resolved IP address.
"""
try :
self._translationip = translationip
except Exception as e:
raise e
@property
def translationmask(self) :
ur"""The netmask of the translation ip.
"""
try :
return self._translationmask
except Exception as e:
raise e
@translationmask.setter
def translationmask(self, translationmask) :
ur"""The netmask of the translation ip.
"""
try :
self._translationmask = translationmask
except Exception as e:
raise e
@property
def domainresolveretry(self) :
ur"""Time, in seconds, for which the NetScaler appliance must wait, after DNS resolution fails, before sending the next DNS query to resolve the domain name.<br/>Default value: 5<br/>Minimum length = 5<br/>Maximum length = 20939.
"""
try :
return self._domainresolveretry
except Exception as e:
raise e
@domainresolveretry.setter
def domainresolveretry(self, domainresolveretry) :
ur"""Time, in seconds, for which the NetScaler appliance must wait, after DNS resolution fails, before sending the next DNS query to resolve the domain name.<br/>Default value: 5<br/>Minimum length = 5<br/>Maximum length = 20939
"""
try :
self._domainresolveretry = domainresolveretry
except Exception as e:
raise e
@property
def state(self) :
ur"""Initial state of the server.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
ur"""Initial state of the server.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED
"""
try :
self._state = state
except Exception as e:
raise e
@property
def ipv6address(self) :
ur"""Support IPv6 addressing mode. If you configure a server with the IPv6 addressing mode, you cannot use the server in the IPv4 addressing mode.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._ipv6address
except Exception as e:
raise e
@ipv6address.setter
def ipv6address(self, ipv6address) :
ur"""Support IPv6 addressing mode. If you configure a server with the IPv6 addressing mode, you cannot use the server in the IPv4 addressing mode.<br/>Default value: NO<br/>Possible values = YES, NO
"""
try :
self._ipv6address = ipv6address
except Exception as e:
raise e
@property
def comment(self) :
ur"""Any information about the server.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
ur"""Any information about the server.
"""
try :
self._comment = comment
except Exception as e:
raise e
@property
def td(self) :
ur"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Maximum length = 4094.
"""
try :
return self._td
except Exception as e:
raise e
@td.setter
def td(self, td) :
ur"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Maximum length = 4094
"""
try :
self._td = td
except Exception as e:
raise e
@property
def domainresolvenow(self) :
ur"""Immediately send a DNS query to resolve the server's domain name.
"""
try :
return self._domainresolvenow
except Exception as e:
raise e
@domainresolvenow.setter
def domainresolvenow(self, domainresolvenow) :
ur"""Immediately send a DNS query to resolve the server's domain name.
"""
try :
self._domainresolvenow = domainresolvenow
except Exception as e:
raise e
@property
def delay(self) :
ur"""Time, in seconds, after which all the services configured on the server are disabled.
"""
try :
return self._delay
except Exception as e:
raise e
@delay.setter
def delay(self, delay) :
ur"""Time, in seconds, after which all the services configured on the server are disabled.
"""
try :
self._delay = delay
except Exception as e:
raise e
@property
def graceful(self) :
ur"""Shut down gracefully, without accepting any new connections, and disabling each service when all of its connections are closed.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._graceful
except Exception as e:
raise e
@graceful.setter
def graceful(self, graceful) :
ur"""Shut down gracefully, without accepting any new connections, and disabling each service when all of its connections are closed.<br/>Default value: NO<br/>Possible values = YES, NO
"""
try :
self._graceful = graceful
except Exception as e:
raise e
@property
def Internal(self) :
ur"""Display names of the servers that have been created for internal use.
"""
try :
return self._Internal
except Exception as e:
raise e
@Internal.setter
def Internal(self, Internal) :
ur"""Display names of the servers that have been created for internal use.
"""
try :
self._Internal = Internal
except Exception as e:
raise e
@property
def newname(self) :
ur"""New name for the server. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.<br/>Minimum length = 1.
"""
try :
return self._newname
except Exception as e:
raise e
@newname.setter
def newname(self, newname) :
ur"""New name for the server. Must begin with an ASCII alphabetic or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters.<br/>Minimum length = 1
"""
try :
self._newname = newname
except Exception as e:
raise e
@property
def statechangetimesec(self) :
ur"""Time when last state change happened. Seconds part.
"""
try :
return self._statechangetimesec
except Exception as e:
raise e
@property
def tickssincelaststatechange(self) :
ur"""Time in 10 millisecond ticks since the last state change.
"""
try :
return self._tickssincelaststatechange
except Exception as e:
raise e
@property
def autoscale(self) :
ur"""Auto scale option for a servicegroup.<br/>Default value: DISABLED<br/>Possible values = DISABLED, DNS, POLICY.
"""
try :
return self._autoscale
except Exception as e:
raise e
@property
def customserverid(self) :
ur"""A positive integer to identify the service. Used when the persistency type is set to Custom Server ID.<br/>Default value: "None".
"""
try :
return self._customserverid
except Exception as e:
raise e
@property
def monthreshold(self) :
ur"""Minimum sum of weights of the monitors that are bound to this service. Used to determine whether to mark a service as UP or DOWN.<br/>Minimum value = 0<br/>Maximum value = 65535.
"""
try :
return self._monthreshold
except Exception as e:
raise e
@property
def maxclient(self) :
ur"""Maximum number of simultaneous open connections for the service group.<br/>Minimum value = 0<br/>Maximum value = 4294967294.
"""
try :
return self._maxclient
except Exception as e:
raise e
@property
def maxreq(self) :
ur"""Maximum number of requests that can be sent on a persistent connection to the service group.
Note: Connection requests beyond this value are rejected.<br/>Minimum value = 0<br/>Maximum value = 65535.
"""
try :
return self._maxreq
except Exception as e:
raise e
@property
def maxbandwidth(self) :
ur"""Maximum bandwidth, in Kbps, allocated for all the services in the service group.<br/>Minimum value = 0<br/>Maximum value = 4294967287.
"""
try :
return self._maxbandwidth
except Exception as e:
raise e
@property
def usip(self) :
ur"""Use the client's IP address as the source IP address when initiating a connection to the server. When creating a service, if you do not set this parameter, the service inherits the global Use Source IP setting (available in the enable ns mode and disable ns mode CLI commands, or in the System > Settings > Configure modes > Configure Modes dialog box). However, you can override this setting after you create the service.<br/>Possible values = YES, NO.
"""
try :
return self._usip
except Exception as e:
raise e
@property
def cka(self) :
ur"""Enable client keep-alive for the service group.<br/>Possible values = YES, NO.
"""
try :
return self._cka
except Exception as e:
raise e
@property
def tcpb(self) :
ur"""Enable TCP buffering for the service group.<br/>Possible values = YES, NO.
"""
try :
return self._tcpb
except Exception as e:
raise e
@property
def cmp(self) :
ur"""Enable compression for the specified service.<br/>Possible values = YES, NO.
"""
try :
return self._cmp
except Exception as e:
raise e
@property
def clttimeout(self) :
ur"""Time, in seconds, after which to terminate an idle client connection.<br/>Minimum value = 0<br/>Maximum value = 31536000.
"""
try :
return self._clttimeout
except Exception as e:
raise e
@property
def svrtimeout(self) :
ur"""Time, in seconds, after which to terminate an idle server connection.<br/>Minimum value = 0<br/>Maximum value = 31536000.
"""
try :
return self._svrtimeout
except Exception as e:
raise e
@property
def cipheader(self) :
ur"""Name of the HTTP header whose value must be set to the IP address of the client. Used with the Client IP parameter. If client IP insertion is enabled, and the client IP header is not specified, the value of Client IP Header parameter or the value set by the set ns config command is used as client's IP header name.<br/>Minimum length = 1.
"""
try :
return self._cipheader
except Exception as e:
raise e
@property
def cip(self) :
ur"""Before forwarding a request to the service, insert an HTTP header with the client's IPv4 or IPv6 address as its value. Used if the server needs the client's IP address for security, accounting, or other purposes, and setting the Use Source IP parameter is not a viable option.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._cip
except Exception as e:
raise e
@property
def cacheable(self) :
ur"""Use the transparent cache redirection virtual server to forward the request to the cache server.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._cacheable
except Exception as e:
raise e
@property
def sc(self) :
ur"""State of the SureConnect feature for the service group.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._sc
except Exception as e:
raise e
@property
def sp(self) :
ur"""Enable surge protection for the service group.<br/>Default value: OFF<br/>Possible values = ON, OFF.
"""
try :
return self._sp
except Exception as e:
raise e
@property
def downstateflush(self) :
ur"""Perform delayed clean-up of connections to all services in the service group.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._downstateflush
except Exception as e:
raise e
@property
def appflowlog(self) :
ur"""Enable logging of AppFlow information for the specified service group.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._appflowlog
except Exception as e:
raise e
@property
def boundtd(self) :
ur"""Integer value that uniquely identifies the traffic domain in which you want to configure the entity. If you do not specify an ID, the entity becomes part of the default traffic domain, which has an ID of 0.<br/>Minimum value = 0<br/>Maximum value = 4094.
"""
try :
return self._boundtd
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(server_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.server
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
ur""" Use this API to add server.
"""
try :
if type(resource) is not list :
addresource = server()
addresource.name = resource.name
addresource.ipaddress = resource.ipaddress
addresource.domain = resource.domain
addresource.translationip = resource.translationip
addresource.translationmask = resource.translationmask
addresource.domainresolveretry = resource.domainresolveretry
addresource.state = resource.state
addresource.ipv6address = resource.ipv6address
addresource.comment = resource.comment
addresource.td = resource.td
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].ipaddress = resource[i].ipaddress
addresources[i].domain = resource[i].domain
addresources[i].translationip = resource[i].translationip
addresources[i].translationmask = resource[i].translationmask
addresources[i].domainresolveretry = resource[i].domainresolveretry
addresources[i].state = resource[i].state
addresources[i].ipv6address = resource[i].ipv6address
addresources[i].comment = resource[i].comment
addresources[i].td = resource[i].td
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
ur""" Use this API to delete server.
"""
try :
if type(resource) is not list :
deleteresource = server()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
ur""" Use this API to update server.
"""
try :
if type(resource) is not list :
updateresource = server()
updateresource.name = resource.name
updateresource.ipaddress = resource.ipaddress
updateresource.domainresolveretry = resource.domainresolveretry
updateresource.translationip = resource.translationip
updateresource.translationmask = resource.translationmask
updateresource.domainresolvenow = resource.domainresolvenow
updateresource.comment = resource.comment
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].ipaddress = resource[i].ipaddress
updateresources[i].domainresolveretry = resource[i].domainresolveretry
updateresources[i].translationip = resource[i].translationip
updateresources[i].translationmask = resource[i].translationmask
updateresources[i].domainresolvenow = resource[i].domainresolvenow
updateresources[i].comment = resource[i].comment
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
ur""" Use this API to unset the properties of server resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = server()
if type(resource) != type(unsetresource):
unsetresource.name = resource
else :
unsetresource.name = resource.name
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i].name
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def enable(cls, client, resource) :
ur""" Use this API to enable server.
"""
try :
if type(resource) is not list :
enableresource = server()
if type(resource) != type(enableresource):
enableresource.name = resource
else :
enableresource.name = resource.name
return enableresource.perform_operation(client,"enable")
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
enableresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
enableresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
enableresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
enableresources[i].name = resource[i].name
result = cls.perform_operation_bulk_request(client, enableresources,"enable")
return result
except Exception as e :
raise e
@classmethod
def disable(cls, client, resource) :
ur""" Use this API to disable server.
"""
try :
if type(resource) is not list :
disableresource = server()
if type(resource) != type(disableresource):
disableresource.name = resource
else :
disableresource.name = resource.name
disableresource.delay = resource.delay
disableresource.graceful = resource.graceful
return disableresource.perform_operation(client,"disable")
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
disableresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
disableresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
disableresources = [ server() for _ in range(len(resource))]
for i in range(len(resource)) :
disableresources[i].name = resource[i].name
disableresources[i].delay = resource[i].delay
disableresources[i].graceful = resource[i].graceful
result = cls.perform_operation_bulk_request(client, disableresources,"disable")
return result
except Exception as e :
raise e
@classmethod
def rename(cls, client, resource, new_name) :
ur""" Use this API to rename a server resource.
"""
try :
renameresource = server()
if type(resource) == cls :
renameresource.name = resource.name
else :
renameresource.name = resource
return renameresource.rename_resource(client,new_name)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the server resources that are configured on netscaler.
"""
try :
if not name :
obj = server()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = server()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [server() for _ in range(len(name))]
obj = [server() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = server()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_args(cls, client, args) :
ur""" Use this API to fetch all the server resources that are configured on netscaler.
# This uses server_args which is a way to provide additional arguments while fetching the resources.
"""
try :
obj = server()
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(args)
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of server resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = server()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the server resources configured on NetScaler.
"""
try :
obj = server()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of server resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = server()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Sp:
ON = "ON"
OFF = "OFF"
class Sc:
ON = "ON"
OFF = "OFF"
class State:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Downstateflush:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Ipv6address:
YES = "YES"
NO = "NO"
class Usip:
YES = "YES"
NO = "NO"
class Cacheable:
YES = "YES"
NO = "NO"
class Autoscale:
DISABLED = "DISABLED"
DNS = "DNS"
POLICY = "POLICY"
class Tcpb:
YES = "YES"
NO = "NO"
class Cip:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Cka:
YES = "YES"
NO = "NO"
class Appflowlog:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Cmp:
YES = "YES"
NO = "NO"
class Graceful:
YES = "YES"
NO = "NO"
class server_response(base_response) :
def __init__(self, length=1) :
self.server = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.server = [server() for _ in range(length)]
| {
"content_hash": "69adc2d9331d57e5812929e8b8dfd5e2",
"timestamp": "",
"source": "github",
"line_count": 906,
"max_line_length": 460,
"avg_line_length": 30.221854304635762,
"alnum_prop": 0.6858040246886528,
"repo_name": "atopuzov/nitro-python",
"id": "dd792e40ed039e7b54c1def20cb300aa2c8068c8",
"size": "27995",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/resource/config/basic/server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10881939"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
} |
import unittest
from got_1 import contains_palindrome
class TestGOTOne(unittest.TestCase):
def test_given(self):
self.assertTrue(contains_palindrome('aaabbbb'))
self.assertFalse(contains_palindrome('cdefghmnopqrstuvw'))
self.assertTrue(contains_palindrome('cdcdcdcdeeeef'))
if __name__ == '__main__':
unittest.main() | {
"content_hash": "7c07e9fd2fbc881087dbdc7751108f61",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 66,
"avg_line_length": 27.076923076923077,
"alnum_prop": 0.7073863636363636,
"repo_name": "Goyatuzo/Challenges",
"id": "65c1a4054797ed69977211b1f41d21fd0b095140",
"size": "352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "HackerRank/Algorithms/Strings/Game of Thrones - 1/test_got_1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "5153"
},
{
"name": "C++",
"bytes": "13232"
},
{
"name": "OCaml",
"bytes": "109"
},
{
"name": "Python",
"bytes": "34880"
},
{
"name": "TSQL",
"bytes": "671"
},
{
"name": "TypeScript",
"bytes": "17962"
}
],
"symlink_target": ""
} |
"""Support for IKEA Tradfri lights."""
from __future__ import annotations
from collections.abc import Callable
from typing import Any, cast
from pytradfri.command import Command
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_HS_COLOR,
ATTR_TRANSITION,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
import homeassistant.util.color as color_util
from .base_class import TradfriBaseClass, TradfriBaseDevice
from .const import (
ATTR_DIMMER,
ATTR_HUE,
ATTR_SAT,
ATTR_TRANSITION_TIME,
CONF_GATEWAY_ID,
CONF_IMPORT_GROUPS,
DEVICES,
DOMAIN,
GROUPS,
KEY_API,
SUPPORTED_GROUP_FEATURES,
SUPPORTED_LIGHT_FEATURES,
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Load Tradfri lights based on a config entry."""
gateway_id = config_entry.data[CONF_GATEWAY_ID]
tradfri_data = hass.data[DOMAIN][config_entry.entry_id]
api = tradfri_data[KEY_API]
devices = tradfri_data[DEVICES]
lights = [dev for dev in devices if dev.has_light_control]
if lights:
async_add_entities(TradfriLight(light, api, gateway_id) for light in lights)
if config_entry.data[CONF_IMPORT_GROUPS] and (groups := tradfri_data[GROUPS]):
async_add_entities(TradfriGroup(group, api, gateway_id) for group in groups)
class TradfriGroup(TradfriBaseClass, LightEntity):
"""The platform class for light groups required by hass."""
_attr_supported_features = SUPPORTED_GROUP_FEATURES
def __init__(
self,
device: Command,
api: Callable[[Command | list[Command]], Any],
gateway_id: str,
) -> None:
"""Initialize a Group."""
super().__init__(device, api, gateway_id)
self._attr_unique_id = f"group-{gateway_id}-{device.id}"
self._attr_should_poll = True
self._refresh(device)
async def async_update(self) -> None:
"""Fetch new state data for the group.
This method is required for groups to update properly.
"""
await self._api(self._device.update())
@property
def is_on(self) -> bool:
"""Return true if group lights are on."""
return cast(bool, self._device.state)
@property
def brightness(self) -> int | None:
"""Return the brightness of the group lights."""
return cast(int, self._device.dimmer)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the group lights to turn off."""
await self._api(self._device.set_state(0))
async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the group lights to turn on, or dim."""
keys = {}
if ATTR_TRANSITION in kwargs:
keys["transition_time"] = int(kwargs[ATTR_TRANSITION]) * 10
if ATTR_BRIGHTNESS in kwargs:
if kwargs[ATTR_BRIGHTNESS] == 255:
kwargs[ATTR_BRIGHTNESS] = 254
await self._api(self._device.set_dimmer(kwargs[ATTR_BRIGHTNESS], **keys))
else:
await self._api(self._device.set_state(1))
class TradfriLight(TradfriBaseDevice, LightEntity):
"""The platform class required by Home Assistant."""
def __init__(
self,
device: Command,
api: Callable[[Command | list[Command]], Any],
gateway_id: str,
) -> None:
"""Initialize a Light."""
super().__init__(device, api, gateway_id)
self._attr_unique_id = f"light-{gateway_id}-{device.id}"
self._hs_color = None
# Calculate supported features
_features = SUPPORTED_LIGHT_FEATURES
if device.light_control.can_set_dimmer:
_features |= SUPPORT_BRIGHTNESS
if device.light_control.can_set_color:
_features |= SUPPORT_COLOR | SUPPORT_COLOR_TEMP
if device.light_control.can_set_temp:
_features |= SUPPORT_COLOR_TEMP
self._attr_supported_features = _features
self._refresh(device)
if self._device_control:
self._attr_min_mireds = self._device_control.min_mireds
self._attr_max_mireds = self._device_control.max_mireds
@property
def is_on(self) -> bool:
"""Return true if light is on."""
if not self._device_data:
return False
return cast(bool, self._device_data.state)
@property
def brightness(self) -> int | None:
"""Return the brightness of the light."""
if not self._device_data:
return None
return cast(int, self._device_data.dimmer)
@property
def color_temp(self) -> int | None:
"""Return the color temp value in mireds."""
if not self._device_data:
return None
return cast(int, self._device_data.color_temp)
@property
def hs_color(self) -> tuple[float, float] | None:
"""HS color of the light."""
if not self._device_control or not self._device_data:
return None
if self._device_control.can_set_color:
hsbxy = self._device_data.hsb_xy_color
hue = hsbxy[0] / (self._device_control.max_hue / 360)
sat = hsbxy[1] / (self._device_control.max_saturation / 100)
if hue is not None and sat is not None:
return hue, sat
return None
async def async_turn_off(self, **kwargs: Any) -> None:
"""Instruct the light to turn off."""
# This allows transitioning to off, but resets the brightness
# to 1 for the next set_state(True) command
if not self._device_control:
return
transition_time = None
if ATTR_TRANSITION in kwargs:
transition_time = int(kwargs[ATTR_TRANSITION]) * 10
dimmer_data = {ATTR_DIMMER: 0, ATTR_TRANSITION_TIME: transition_time}
await self._api(self._device_control.set_dimmer(**dimmer_data))
else:
await self._api(self._device_control.set_state(False))
async def async_turn_on(self, **kwargs: Any) -> None:
"""Instruct the light to turn on."""
if not self._device_control:
return
transition_time = None
if ATTR_TRANSITION in kwargs:
transition_time = int(kwargs[ATTR_TRANSITION]) * 10
dimmer_command = None
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
brightness = min(brightness, 254)
dimmer_data = {
ATTR_DIMMER: brightness,
ATTR_TRANSITION_TIME: transition_time,
}
dimmer_command = self._device_control.set_dimmer(**dimmer_data)
transition_time = None
else:
dimmer_command = self._device_control.set_state(True)
color_command = None
if ATTR_HS_COLOR in kwargs and self._device_control.can_set_color:
hue = int(kwargs[ATTR_HS_COLOR][0] * (self._device_control.max_hue / 360))
sat = int(
kwargs[ATTR_HS_COLOR][1] * (self._device_control.max_saturation / 100)
)
color_data = {
ATTR_HUE: hue,
ATTR_SAT: sat,
ATTR_TRANSITION_TIME: transition_time,
}
color_command = self._device_control.set_hsb(**color_data)
transition_time = None
temp_command = None
if ATTR_COLOR_TEMP in kwargs and (
self._device_control.can_set_temp or self._device_control.can_set_color
):
temp = kwargs[ATTR_COLOR_TEMP]
# White Spectrum bulb
if self._device_control.can_set_temp:
if temp > self.max_mireds:
temp = self.max_mireds
elif temp < self.min_mireds:
temp = self.min_mireds
temp_data = {
ATTR_COLOR_TEMP: temp,
ATTR_TRANSITION_TIME: transition_time,
}
temp_command = self._device_control.set_color_temp(**temp_data)
transition_time = None
# Color bulb (CWS)
# color_temp needs to be set with hue/saturation
elif self._device_control.can_set_color:
temp_k = color_util.color_temperature_mired_to_kelvin(temp)
hs_color = color_util.color_temperature_to_hs(temp_k)
hue = int(hs_color[0] * (self._device_control.max_hue / 360))
sat = int(hs_color[1] * (self._device_control.max_saturation / 100))
color_data = {
ATTR_HUE: hue,
ATTR_SAT: sat,
ATTR_TRANSITION_TIME: transition_time,
}
color_command = self._device_control.set_hsb(**color_data)
transition_time = None
# HSB can always be set, but color temp + brightness is bulb dependent
if (command := dimmer_command) is not None:
command += color_command
else:
command = color_command
if self._device_control.can_combine_commands:
await self._api(command + temp_command)
else:
if temp_command is not None:
await self._api(temp_command)
if command is not None:
await self._api(command)
def _refresh(self, device: Command) -> None:
"""Refresh the light data."""
super()._refresh(device)
# Caching of LightControl and light object
self._device_control = device.light_control
self._device_data = device.light_control.lights[0]
| {
"content_hash": "712a9896038d8514807ddc3f20f9c7a1",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 86,
"avg_line_length": 35.516129032258064,
"alnum_prop": 0.5898677969522657,
"repo_name": "aronsky/home-assistant",
"id": "53309e144ffee30eff9118f4a0a5c057c417c083",
"size": "9909",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/tradfri/light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38448521"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
from devilry.restful import restful_modelapi, ModelRestfulView, RestfulManager
from devilry.apps.extjshelpers import extjs_restful_modelapi
from devilry.apps.administrator.restful import (RestfulSimplifiedAssignment,
RestfulSimplifiedDelivery)
from ..simplified.administrator import (SimplifiedConfig,
SimplifiedFeedbackDraft)
from examiner import RestfulSimplifiedFeedbackDraftCommon
__all__ = ('RestfulSimplifiedConfig', 'RestfulSimplifiedFeedbackDraft')
administrator_restful = RestfulManager()
@administrator_restful.register
@extjs_restful_modelapi
@restful_modelapi
class RestfulSimplifiedConfig(ModelRestfulView):
class Meta:
simplified = SimplifiedConfig
foreignkey_fields = {'assignment': RestfulSimplifiedAssignment}
@administrator_restful.register
@extjs_restful_modelapi
@restful_modelapi
class RestfulSimplifiedFeedbackDraft(RestfulSimplifiedFeedbackDraftCommon, ModelRestfulView):
class Meta:
simplified = SimplifiedFeedbackDraft
foreignkey_fields = {'delivery': RestfulSimplifiedDelivery}
| {
"content_hash": "4b3ac12b3994d4f66f558e3fcd602cb9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 93,
"avg_line_length": 40.607142857142854,
"alnum_prop": 0.762532981530343,
"repo_name": "vegarang/devilry-django",
"id": "88e4280b21b36b38f419e54aaab3d822b65e1066",
"size": "1137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "devilry/apps/gradeeditors/restful/administrator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "697906"
},
{
"name": "Python",
"bytes": "931589"
}
],
"symlink_target": ""
} |
import inspect
from typing import Any, MutableMapping
from ._kernel.types import JSClass, Referenceable
_types = {}
_data_types: MutableMapping[str, Any] = {}
_enums: MutableMapping[str, Any] = {}
_interfaces: MutableMapping[str, Any] = {}
def register_type(klass: JSClass):
_types[klass.__jsii_type__] = klass
def register_data_type(data_type: Any):
_data_types[data_type.__jsii_type__] = data_type
def register_enum(enum_type: Any):
_enums[enum_type.__jsii_type__] = enum_type
def register_interface(iface: Any):
_interfaces[iface.__jsii_type__] = iface
class _FakeReference:
def __init__(self, ref: str) -> None:
self.__jsii_ref__ = ref
class _ReferenceMap:
def __init__(self, types):
# We are using a real dictionary here instead of a WeakValueDictionary because
# the nature of the JSII is such that we can never free the memory of JSII
# objects ever, because we have no idea how many references exist on the *other*
# side.
self._refs = {}
self._types = types
def register(self, inst: Referenceable):
self._refs[inst.__jsii_ref__.ref] = inst
def resolve(self, kernel, ref):
# First we need to check our reference map to see if we have any instance that
# already matches this reference.
try:
return self._refs[ref.ref]
except KeyError:
pass
# If we got to this point, then we didn't have a referene for this, in that case
# we want to create a new instance, but we need to create it in such a way that
# we don't try to recreate the type inside of the JSII interface.
class_fqn = ref.ref.rsplit("@", 1)[0]
if class_fqn in _types:
klass = _types[class_fqn]
# If this class is an abstract class, then we'll use the generated proxy
# class instead of the abstract class to handle return values for this type.
if inspect.isabstract(klass):
klass = klass.__jsii_proxy_class__()
# Create our instance, bypassing __init__ by directly calling __new__, and
# then assign our reference to __jsii_ref__
inst = klass.__new__(klass)
inst.__jsii_ref__ = ref
elif class_fqn in _data_types:
# Data types have been serialized by-reference (see aws/jsii#400).
# We retrieve all of its properties right now and then construct a value
# object from it. This will be slow :(.
# Ugly delayed import here because I can't solve the cyclic
# package dependency right now :(.
from ._runtime import python_jsii_mapping
data_type = _data_types[class_fqn]
remote_struct = _FakeReference(ref)
python_props = {python_name: kernel.get(remote_struct, jsii_name)
for python_name, jsii_name in python_jsii_mapping(data_type).items()}
return data_type(**python_props)
elif class_fqn in _enums:
inst = _enums[class_fqn]
elif class_fqn in _interfaces:
# Get our proxy class by finding our interface, then asking it to give us
# the proxy class.
iface = _interfaces[class_fqn]
klass = iface.__jsii_proxy_class__()
# Create our instance, bypassing __init__ by directly calling __new__, and
# then assign our reference to __jsii_ref__
inst = klass.__new__(klass)
inst.__jsii_ref__ = ref
else:
raise ValueError(f"Unknown type: {class_fqn}")
return inst
def resolve_id(self, id):
return self._refs[id]
_refs = _ReferenceMap(_types)
register_reference = _refs.register
resolve_reference = _refs.resolve
resolve_id = _refs.resolve_id
| {
"content_hash": "fc1642bf412be173748b52de23c2efcc",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 89,
"avg_line_length": 34.017699115044245,
"alnum_prop": 0.6061394380853278,
"repo_name": "randyzingle/tools",
"id": "e183cc8d49d48f1e2dc7233b40adfea824d13afb",
"size": "3927",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kub/services/archive/cdk/python/sample-app/.env/lib/python3.6/site-packages/jsii/_reference_map.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "773"
},
{
"name": "Go",
"bytes": "118058"
},
{
"name": "Groovy",
"bytes": "1555"
},
{
"name": "HTML",
"bytes": "4288"
},
{
"name": "Java",
"bytes": "818301"
},
{
"name": "JavaScript",
"bytes": "605048"
},
{
"name": "Makefile",
"bytes": "5063"
},
{
"name": "Python",
"bytes": "10438760"
},
{
"name": "Scala",
"bytes": "37406"
},
{
"name": "Shell",
"bytes": "8611"
}
],
"symlink_target": ""
} |
import sys
import os
import pdb
import girder_client
import json
import urllib2
import numpy as np
import scipy
import cv2
import math
def ensure_path(dir_path):
parent, name = os.path.split(dir_path)
if name == '':
return
ensure_path(parent)
ensure_dir(dir_path)
def ensure_dir(dir_path):
if not os.path.exists(dir_path):
os.makedirs(dir_path)
# out_path is the directory on disk to save the images.
# It must be created before this method is called.
# item_obj is an object returned by the girder api get/item call.
def dump_item(gc, item_obj, out_path):
item_id = item_obj['_id']
# change the item name into a file name.
item_name = item_obj['name']
item_name = item_name.split('_')[-1]
# lets try to save a sa png image.
item_root_name = item_name.split('.')[0]
item_name = '%s.png'%item_root_name
file_path_root = os.path.join(out_path, item_name)
# Get the file.
file_resp = gc.get("item/%s/files"%item_id)
file_id = file_resp[0]['_id']
tile_url = gc.urlBase+"file/%s/download"%file_id
req = urllib2.Request(tile_url)
req.add_header('Girder-Token', gc.token)
resp = urllib2.urlopen(req)
image = np.asarray(bytearray(resp.read()), dtype="uint8")
#image = cv2.imdecode(image, cv2.IMREAD_COLOR)
pdb.set_trace()
image = cv2.imdecode(image, 2)
print(item_name)
print(image.shape)
# now save the image as a png.
#scipy.misc.imsave(path+'.png', region)
cv2.imwrite(file_path_root, image)
# parent_path directory must exist
def dump_folder(gc, folder_obj, out_path):
folder_id = folder_obj['_id']
# dump all items
items_resp = gc.get("item?folderId=%s&limit=5000"%folder_id)
for item_obj in items_resp:
dump_item(gc, item_obj, out_path)
def print_usage():
print("usage:")
print("python %s serverName girder_id"%sys.argv[0])
if __name__ == '__main__':
keys = {'lemon':'', \
'wsi2': ''}
urls = {'lemon':'http://lemon/api/v1', \
'wsi2': 'http://wsi2.slide-atlas.org:8080/api/v1'}
if len(sys.argv) != 3:
print_usage()
exit()
server_name = sys.argv[1]
if not server_name in keys:
print("Unknown server %s"%server_name)
exit()
gc = girder_client.GirderClient(apiUrl=urls[server_name])
gc.authenticate('law12019', apiKey=keys[server_name])
# can be a folder id or an item id.
girder_id = sys.argv[2]
#out_path = sys.argv[3]
out_path = os.path.realpath('./')
ensure_dir(out_path)
try:
# Get the folder object
folder_obj = gc.get("folder/%s"%girder_id)
# now save to disk
dump_folder(gc, folder_obj, out_path)
except Exception as inst:
print("not a folder")
#try:
# Get the item object
#item_obj = gc.get("item/%s"%girder_id)
# now save to disk
#dump_item(gc, item_obj, out_path)
#except Exception as inst:
#print("not an item")
| {
"content_hash": "01044e657784834d67b7618f7f6083ba",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 65,
"avg_line_length": 23.045801526717558,
"alnum_prop": 0.60947333554157,
"repo_name": "law12019/deep_learning",
"id": "2bbb9d2549c1d95865f912e94fc5503865969083",
"size": "3161",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/download_salsa_images.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "49"
},
{
"name": "JavaScript",
"bytes": "177639"
},
{
"name": "MATLAB",
"bytes": "2585"
},
{
"name": "Python",
"bytes": "106848"
}
],
"symlink_target": ""
} |
from datetime import datetime
import dateutil.parser
import tornado.ioloop
import tornado.web
from edb_converter import converter
from starcharts import sky_area
from starcharts.coord_calc import CoordCalc
from starcharts.diagram import Diagram
from starcharts.input_file import InputFile
class MainHandler(tornado.web.RequestHandler):
def get(self):
(
date,
lat,
lon,
elevation,
mag_min,
mag_max,
skyculture,
color,
frame_enabled,
frame_width,
frame_color,
) = self.get_query_strings()
new_catalog = converter.convert(date, lat, lon, elevation)
input_file = InputFile(new_catalog)
area = sky_area.SKY_AREA_CUSTOM
area.mag_min = mag_min
area.mag_max = mag_max
star_data_list = input_file.get_stars(area)
cc = CoordCalc(star_data_list, area, 500)
cc.process()
d = Diagram(
area,
star_data_list,
skyculture,
color,
frame_enabled,
frame_width,
frame_color,
)
list(map(d.add_curve, cc.calc_curves()))
svg_file = d.get_svg()
self.set_header("Content-Type", "image/svg+xml")
self.write("".join(svg_file))
def get_query_strings(self):
today_utc = datetime.utcnow().isoformat()
date_argument = str(self.get_argument("date", today_utc, strip=True))
parsed_date = dateutil.parser.parse(date_argument)
date = parsed_date.strftime("%Y/%m/%d %H:%M:%S")
lat = str(self.get_argument("lat", "41.015137", strip=True))
lon = str(self.get_argument("lon", "28.979530", strip=True))
elevation = int(self.get_argument("elevation", 0, strip=True))
mag_min = int(self.get_argument("mag_min", 4, strip=True))
mag_max = int(self.get_argument("mag_max", 0, strip=True))
skyculture = str(self.get_argument("skyculture", "western", strip=True))
color = str(self.get_argument("color", "black", strip=True))
frame_enabled = self.get_argument("frame_enabled", "True", strip=True) == "True"
frame_width = int(self.get_argument("frame_width", 4, strip=True))
frame_color = str(self.get_argument("frame_color", "black", strip=True))
return (
date,
lat,
lon,
elevation,
mag_min,
mag_max,
skyculture,
color,
frame_enabled,
frame_width,
frame_color,
)
def make_app():
return tornado.web.Application(
[
(r"/", MainHandler),
]
)
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
| {
"content_hash": "7ea5e388524f4e7a6e48e26a8728fb3a",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 88,
"avg_line_length": 28.949494949494948,
"alnum_prop": 0.5565247732030705,
"repo_name": "ogun/starmap",
"id": "8272ff4400fb95fd7659b76947269ccb02434fbc",
"size": "2866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16824"
}
],
"symlink_target": ""
} |
'''
Copyright (c) 2017, 2018 Chongzhi Zang, Zhenjia Wang <zhenjia@virginia.edu>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License.
@status: release candidate
@version: $Id$
@author: Chongzhi Zang, Zhenjia Wang
@contact: zhenjia@virginia.edu
'''
import sys,io,re
import gzip,struct
from struct import unpack
plus = re.compile('\+')
minus = re.compile('\-')
hg38_chroms = ['chr1','chr2','chr3','chr4','chr5','chr6','chr7','chr8','chr9',
'chr10','chr11','chr12','chr13','chr14','chr15','chr16','chr17',
'chr18','chr19','chr20','chr21','chr22','chrX','chrY', 'chrM'];
mm10_chroms = ['chr1','chr2','chr3','chr4','chr5','chr6','chr7','chr8','chr9',
'chr10','chr11','chr12','chr13','chr14','chr15','chr16','chr17',
'chr18','chr19','chrX','chrY', 'chrM']
def add_region(chrom,outer,inner,regions):
'''
Add (unique) start-end in regions
'''
if chrom not in regions:
regions[chrom]={}
if inner not in regions[chrom]:
regions[chrom][inner] = set()# [],if do not require unique read
regions[chrom][inner].add(outer)#append
return regions
def get_bed_regions(bedfile,chroms):
'''
Get tag regions from BED files
'''
infile = open(bedfile,'r')
#
try:
line = infile.readline()
except:
sys.stderr.write('Not a valid BED format of file: {} ! \n\n'.format(bedfile))
sys.exit(1)
regions1 = {} # plus strand
regions2 = {} # minus strand
while line:
line = line.strip().split()
if line[0] in chroms and len(line)>=6:
chrom = line[0]
start = int(line[1])
end = int(line[2])
strand = line[5]
#tag = BED(line[0],line[1],line[2],line[3],line[4],line[5])
if plus.match(strand):
#all start(outer) positions with same end(inner) are in a same set/list in plus strand
regions1 = add_region(chrom,start,end,regions1)
elif minus.match(strand):
#all end(outer) positions with same start(inner) are in a same set/list in minus strand
regions2 = add_region(chrom,end,start,regions2)
else: #if the line dose not match the bed format
pass
line = infile.readline()
infile.close()
# check if tag info is read into regions
if len(regions1)+len(regions2) == 0:
sys.stderr.write('File <{}> is not of a valid BED format! \n'.format(bedfile))
sys.exit(1)
return regions1, regions2
def bam_binary_parse(data):
'''
Refer to : https://github.com/taoliu/MACS/blob/master/MACS2/IO/Parser.pyx
The bitwise flag is made like this:
dec meaning
--- -------
1 paired read
2 proper pair
4 query unmapped
8 mate unmapped
16 strand of the query (1 -> reverse)
32 strand of the mate
64 first read in pair
128 second read in pair
256 alignment is not primary
512 does not pass quality check
1024 PCR or optical duplicate
2048 supplementary alignment
'''
if not data:
return (-1,-1,-1, -1)
thisref = unpack('<i',data[0:4])[0]
thisstart = unpack('<i',data[4:8])[0]
thistagsize = unpack('<i',data[16:20])[0]
(n_cigar_op, bwflag ) = unpack( '<HH' , data[ 12:16 ] )
#print(thisref,thisstart,thistagsize)
#exit(0)
if bwflag & 4 or bwflag & 512 or bwflag & 256 or bwflag & 2048:
return ( -1, -1, -1, -1 ) #unmapped sequence or bad sequence or secondary or supplementary alignment
if bwflag & 1:
# paired read. We should only keep sequence if the mate is mapped
# and if this is the left mate, all is within the flag!
if not bwflag & 2:
return ( -1, -1, -1, -1 ) # not a proper pair
if bwflag & 8:
return ( -1, -1, -1, -1 ) # the mate is unmapped
# From Benjamin Schiller https://github.com/benjschiller
if bwflag & 128:
# this is not the first read in a pair
return ( -1, -1, -1, -1 )
# end of the patch
# In case of paired-end we have now skipped all possible "bad" pairs
# in case of proper pair we have skipped the rightmost one... if the leftmost pair comes
# we can treat it as a single read, so just check the strand and calculate its
# start position... hope I'm right!
if bwflag & 16:
# read mapped to minus strand
l_read_name = unpack( '<B', data[ 8:9 ] )[ 0 ]
# need to decipher CIGAR string
for cigar_code in unpack( '<%dI' % (n_cigar_op) , data[ 32 + l_read_name : 32 + l_read_name + n_cigar_op*4 ] ):
#print(cigar_code>>4,'****')
if cigar_code & 15 in [ 0, 2, 3, 7, 8 ]: # they are CIGAR op M/D/N/=/X
thisstart += cigar_code >> 4
thistagsize = cigar_code >> 4
thisstrand = 1
else:
thisstrand = 0
return (thisref,thisstart,thistagsize,thisstrand)
def get_bam_regions(bamfile,chroms):
'''
Get tag regions from BAM files
Refer to : https://github.com/taoliu/MACS/blob/master/MACS2/IO/Parser.pyx
File is gzip-compatible and binary.
'''
gzipped = True
#try gzip first
gfile = gzip.open(bamfile)
try:
gfile.read(10)
except IOError:
gzipped = False
gfile.close()
if gzipped:
# open with gzip.open, then wrap it with BufferedReader
infile = io.BufferedReader(gzip.open(bamfile,mode='rb'))
else:
infile = io.open(bamfile,mode='rb')
fseek = infile.seek
fread = infile.read
ftell = infile.tell
# check the first 3 bytes of BAM file
if fread(3).decode('utf-8') == 'BAM':
fseek(0)
else:
sys.stderr.write('Not a valid BAM format of file: {} ! \n\n'.format(bamfile))
sys.exit(1)
# move to pos 4
fseek(4)
header_len = unpack('<i',fread(4))[0]
fseek(header_len + ftell())
references = []
# get the number of chromosome
nc = unpack('<i',fread(4))[0]
for x in range(nc):
#read each chromosome
nlength = unpack('<i',fread(4))[0]
refname = fread(nlength)[:-1].decode('utf-8')
#print(x,refname)
references.append(refname)
# a: jump over chromosome size?
fseek(ftell()+4)
# b: or read the chromosome size ? of same fun of forward 4 pos?
#len_refname = unpack('<i',fread(4))[0]
#print(ftell(),len_refname)
#print(references)
regions1 = {} # plus strand
regions2 = {} # minus strand
while True:
try:
entrylength = unpack('<i',fread(4))[0]
except struct.error:
break
(chromref,tagpos,tagsize,strand) = bam_binary_parse(fread(entrylength))
#print(chromref,tagpos,tagsize,strand)
if references[chromref] in chroms:
#strand -- 0: plus, 1: minus
if tagpos >= 0 and strand == 0:
#all start(outer) positions with same end(inner) are in a same set/list in plus strand
regions1 = add_region(references[chromref],tagpos,tagpos+tagsize,regions1)
elif tagpos >= 0 and strand == 1:
#all end(outer) positions with same start(inner) are in a same set/list in minus strand
regions2 = add_region(references[chromref],tagpos,tagpos-tagsize,regions2)
infile.close()
# check if tag info is read into regions
if len(regions1)+len(regions2) == 0:
sys.stderr.write('File <{}> is not of a valid BAM format! \n'.format(bamfile))
sys.exit(1)
return regions1, regions2
def get_tag_regions(species,format,infile):
'''
Get all the unique start-end regions in input bed file,
separated by chrom and strand
'''
chroms = hg38_chroms if species=='hg38' else mm10_chroms
if format=="bed":
return get_bed_regions(infile,chroms)
elif format=="bam":
return get_bam_regions(infile,chroms)
| {
"content_hash": "c529035d380f613d391fc23d06a43457",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 119,
"avg_line_length": 34.295833333333334,
"alnum_prop": 0.5757502126108613,
"repo_name": "zhenjiawang157/BART_Py2",
"id": "9f0cb914037f21fd42f329ef26079f642a155244",
"size": "8258",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "BART/IOparser.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "39326"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# adding PhpLexer
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.8.5'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sensio.sphinx.codeblock', 'sensio.sphinx.configurationblock', 'sensio.sphinx.phpcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'SonataExporter'
copyright = u'2010-2021, Thomas Rabaix'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = '0.0.1'
# The full version, including alpha/beta/rc tags.
#release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# This will be used when using the shorthand notation
highlight_language = 'php'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Settings for symfony doc extension ---------------------------------------------------
# enable highlighting for PHP code not between ``<?php ... ?>`` by default
lexers['php'] = PhpLexer(startinline=True)
lexers['php-annotations'] = PhpLexer(startinline=True)
lexers['php-attributes'] = PhpLexer(startinline=True)
lexers['php-standalone'] = PhpLexer(startinline=True)
lexers['php-symfony'] = PhpLexer(startinline=True)
# -- Options for HTML output ---------------------------------------------------
import sphinx_rtd_theme
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
#latex_documents = [
# ('index', 'PythonElement.tex', u'Python Documentation',
# u'Thomas Rabaix', 'manual'),
#]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
#(source start file, name, description, authors, manual section).
#man_pages = [
# ('index', 'ioc', u'IoC Documentation',
# [u'Thomas Rabaix'], 1)
#]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
#texinfo_documents = [
# ('index', 'IoC', u'IoC Documentation',
# u'Thomas Rabaix', 'IoC', 'One line description of project.',
# 'Miscellaneous'),
#]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| {
"content_hash": "999532af2e6c86c2d182d042a47a01f2",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 91,
"avg_line_length": 32.73279352226721,
"alnum_prop": 0.700803957946815,
"repo_name": "sonata-project/exporter",
"id": "db2c84b3dd848bdec7ba89fe5bd23f60424d8ee1",
"size": "8583",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.x",
"path": "docs/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2214"
},
{
"name": "PHP",
"bytes": "163815"
}
],
"symlink_target": ""
} |
import unittest
from swift.common.swob import Request, Response
from informant import middleware
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {
'x-test-header-one-a': 'value1',
'x-test-header-two-a': 'value2',
'x-test-header-two-b': 'value3'}, '')])
self.request = None
def __call__(self, env, start_response):
self.calls += 1
self.request = Request.blank('', environ=env)
status, headers, body = self.status_headers_body_iter.next()
return Response(status=status, headers=headers,
body=body)(env, start_response)
def start_response(*args):
pass
class Mocked(object):
def __init__(self):
self._send_events_calls = []
def fake_send_events(self, *args, **kwargs):
self._send_events_calls = [((args, kwargs))]
def fake_send_sampled_event(self, *args, **kwargs):
return True
def fake_time(self, *args, **kwargs):
return 1331098500.00
class TestInformant(unittest.TestCase):
def setUp(self):
self.mock = Mocked()
self.app = middleware.Informant(FakeApp(),
{'prefix_accounts': 'AUTH_omgtests'})
self.orig_send_events = self.app._send_events
self.orig_send_sampled_event = self.app._send_sampled_event
self.app._send_events = self.mock.fake_send_events
self.app._send_sampled_event = self.mock.fake_send_sampled_event
def tearDown(self):
self.app._send_events = self.orig_send_events
self.app._send_sampled_event = self.orig_send_sampled_event
def test_informant_invalid(self):
expected = [((['invalid.GET.200:1|c|@0.5',
'invalid.GET.200:1331097138625|ms|@0.5',
'tfer.invalid.GET.200:0|c|@0.5']))]
req = Request.blank('/invalidrandomness',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
self.assertEquals(counter.startswith('invalid.GET.200'), True)
self.assertEquals(timer.startswith('invalid.GET.200'), True)
self.assertEquals(srt.startswith('srt.invalid.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.invalid.GET.200'), True)
def test_informant_slash(self):
expected = [((['invalid.GET.200:1|c|@0.5',
'invalid.GET.200:1331097138625|ms|@0.5',
'tfer.invalid.GET.200:0|c|@0.5']))]
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
self.assertTrue(counter.startswith('invalid.GET.200'), counter)
self.assertTrue(timer.startswith('invalid.GET.200'), timer)
self.assertTrue(srt.startswith('srt.invalid.GET.200'), srt)
self.assertTrue(tfer.startswith('tfer.invalid.GET.200'), tfer)
def test_informant_healthcheck(self):
expected = [((['invalid.GET.200:1|c|@0.5',
'invalid.GET.200:1331097138625|ms|@0.5',
'tfer.invalid.GET.200:0|c|@0.5']))]
req = Request.blank('/healthcheck',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
self.assertTrue(counter.startswith('healthcheck.GET.200'), counter)
self.assertTrue(timer.startswith('healthcheck.GET.200'), timer)
self.assertTrue(srt.startswith('srt.healthcheck.GET.200'), timer)
self.assertTrue(tfer.startswith('tfer.healthcheck.GET.200'), tfer)
def test_informant_bad_method(self):
req = Request.blank('/invalidrandomness',
environ={'REQUEST_METHOD': 'WTFMONKEYS'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('invalid.BAD_METHOD.200'), True)
self.assertEquals(timer.startswith('invalid.BAD_METHOD.200'), True)
self.assertEquals(srt.startswith('srt.invalid.BAD_METHOD.200'), True)
self.assertEquals(tfer.startswith('tfer.invalid.BAD_METHOD.200'), True)
def test_informant_client_disconnect(self):
req = Request.blank('/invalidrandomness',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = True
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('invalid.GET.499'), True)
self.assertEquals(timer.startswith('invalid.GET.499'), True)
self.assertEquals(srt.startswith('srt.invalid.GET.499'), True)
self.assertEquals(tfer.startswith('tfer.invalid.GET.499'), True)
def test_informant_empty_transferred(self):
req = Request.blank('/invalidrandomness',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "-"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(tfer.startswith('tfer.invalid.GET.200:0'), True)
def test_informant_acct_op_v1_0(self):
req = Request.blank(
'/v1.0/someaccount', environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('acct.GET.200'), True)
self.assertEquals(timer.startswith('acct.GET.200'), True)
self.assertEquals(srt.startswith('srt.acct.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.acct.GET.200:500'), True)
def test_informant_acct_op(self):
req = Request.blank('/v1/someaccount', environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('acct.GET.200'), True)
self.assertEquals(timer.startswith('acct.GET.200'), True)
self.assertEquals(srt.startswith('srt.acct.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.acct.GET.200:500'), True)
def test_informant_container_op(self):
req = Request.blank('/v1/someaccount/somecontainer',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('cont.GET.200'), True)
self.assertEquals(timer.startswith('cont.GET.200'), True)
self.assertEquals(srt.startswith('srt.cont.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.cont.GET.200:500'), True)
def test_informant_object_op(self):
req = Request.blank('/v1/someaccount/somecontainer/someobj',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('obj.GET.200'), True)
self.assertEquals(timer.startswith('obj.GET.200'), True)
self.assertEquals(srt.startswith('srt.obj.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.obj.GET.200:500'), True)
def test_informant_pseudodirs(self):
req = Request.blank('/v1/theact/thecont/theobj/with/extras',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('obj.GET.200'), True)
self.assertEquals(timer.startswith('obj.GET.200'), True)
self.assertEquals(srt.startswith('srt.obj.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.obj.GET.200:500'), True)
def test_informant_account_prefix(self):
req = Request.blank('/v1/AUTH_omgtests/thecont/theobj/with/extras',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
acct_counter = self.mock._send_events_calls[0][0][0][4]
acct_timer = self.mock._send_events_calls[0][0][0][5]
print "Mock calls"
print self.mock._send_events_calls
self.assertEquals(counter.startswith('obj.GET.200'), True)
self.assertEquals(acct_counter.startswith('AUTH_omgtests.obj.GET.200'), True)
self.assertEquals(timer.startswith('obj.GET.200'), True)
self.assertEquals(srt.startswith('srt.obj.GET.200'), True)
self.assertEquals(acct_counter.startswith('AUTH_omgtests.obj.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.obj.GET.200:500'), True)
def test_informant_sos_op(self):
req = Request.blank('/something',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.environ['swift.source'] = 'SOS'
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('SOS.GET.200'), True)
self.assertEquals(timer.startswith('SOS.GET.200'), True)
self.assertEquals(srt.startswith('srt.SOS.GET.200'), True)
self.assertEquals(tfer.startswith('tfer.SOS.GET.200:500'), True)
def test_informant_no_srt(self):
req = Request.blank('/something',
environ={'REQUEST_METHOD': 'GET'})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['swift.source'] = 'SOS'
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
self.assertEquals(counter.startswith('SOS.GET.200'), True)
self.assertEquals(timer.startswith('SOS.GET.200'), True)
#should be 0
self.assertEquals(srt.startswith('srt.SOS.GET.200:0'), True)
self.assertEquals(tfer.startswith('tfer.SOS.GET.200:500'), True)
def test_informant_methods(self):
for method in [
'GET', 'HEAD', 'PUT', 'POST', 'DELETE', 'OPTIONS', '-JUNK']:
req = Request.blank('/v1/someaccount',
environ={'REQUEST_METHOD': method})
req.environ['informant.status'] = 200
req.environ['informant.start_time'] = 1331098000.00
req.environ['informant.start_response_time'] = 1331099000.00
req.client_disconnect = False
req.bytes_transferred = "500"
print "--> %s" % req.environ
resp = self.app.statsd_event(req.environ, req)
print self.mock._send_events_calls
counter = self.mock._send_events_calls[0][0][0][0]
timer = self.mock._send_events_calls[0][0][0][1]
srt = self.mock._send_events_calls[0][0][0][2]
tfer = self.mock._send_events_calls[0][0][0][3]
print self.mock._send_events_calls
if method is not '-JUNK':
self.assertEquals(counter.startswith('acct.%s.200' % method), True)
self.assertEquals(timer.startswith('acct.%s.200' % method), True)
self.assertEquals(srt.startswith('srt.acct.%s.200' % method), True)
self.assertEquals(tfer.startswith('tfer.acct.%s.200:500' % method), True)
else:
self.assertEquals(counter.startswith('acct.BAD_METHOD.200'), True)
self.assertEquals(timer.startswith('acct.BAD_METHOD.200'), True)
self.assertEquals(srt.startswith('srt.acct.BAD_METHOD.200'), True)
self.assertEquals(tfer.startswith('tfer.acct.BAD_METHOD.200:500'), True)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "b1f8bd31ce1680c2ebc330b8d83b0451",
"timestamp": "",
"source": "github",
"line_count": 375,
"max_line_length": 91,
"avg_line_length": 50.074666666666666,
"alnum_prop": 0.6049100010650762,
"repo_name": "rovanleeuwen/informant",
"id": "be3b7356a804bda35b83e8e06b4d12b76720e554",
"size": "18778",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test/test_informant.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "27213"
}
],
"symlink_target": ""
} |
__version_info__ = ('0', '0', '3')
__version__ = '.'.join(__version_info__)
from .heap import heap
from .autovivify import AutovivifiedDict
from .objectify import ObjectifiedDict
| {
"content_hash": "7250ae9bf13d0283b05b2c570ff83f27",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 40,
"avg_line_length": 30,
"alnum_prop": 0.6888888888888889,
"repo_name": "Ceasar/trees",
"id": "961caeb1194a7cec1b89058b5cd7acb03ae93937",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trees/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8196"
}
],
"symlink_target": ""
} |
from gui.options.abstractOption import AbstractOption
__author__ = 'andres'
class PlayOption(AbstractOption):
def accept(self, file_name):
dspPlay.start()
| {
"content_hash": "6da126b24871ca6c340be5abb635b756",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 53,
"avg_line_length": 20.125,
"alnum_prop": 0.7577639751552795,
"repo_name": "andimarafioti/intercomunicador",
"id": "c0f1c08e46f021e49d3228379b9cd2c40ca90e22",
"size": "200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gui/options/playOption.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "32761"
},
{
"name": "Shell",
"bytes": "297"
}
],
"symlink_target": ""
} |
import datetime
import os
import sys
import weakref
from distutils.version import StrictVersion
import jinja2
from flask import Flask, Request
from flask.helpers import safe_join
from flask_migrate import upgrade
from jinja2 import FileSystemLoader
from jinja2.sandbox import SandboxedEnvironment
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.utils import cached_property
import CTFd.utils.config
from CTFd import utils
from CTFd.constants.themes import ADMIN_THEME, DEFAULT_THEME
from CTFd.plugins import init_plugins
from CTFd.utils.crypto import sha256
from CTFd.utils.initialization import (
init_events,
init_logs,
init_request_processors,
init_template_filters,
init_template_globals,
)
from CTFd.utils.migrations import create_database, migrations, stamp_latest_revision
from CTFd.utils.sessions import CachingSessionInterface
from CTFd.utils.updates import update_check
__version__ = "3.4.0"
__channel__ = "oss"
class CTFdRequest(Request):
@cached_property
def path(self):
"""
Hijack the original Flask request path because it does not account for subdirectory deployments in an intuitive
manner. We append script_root so that the path always points to the full path as seen in the browser.
e.g. /subdirectory/path/route vs /path/route
:return: string
"""
return self.script_root + super(CTFdRequest, self).path
class CTFdFlask(Flask):
def __init__(self, *args, **kwargs):
"""Overriden Jinja constructor setting a custom jinja_environment"""
self.jinja_environment = SandboxedBaseEnvironment
self.session_interface = CachingSessionInterface(key_prefix="session")
self.request_class = CTFdRequest
# Store server start time
self.start_time = datetime.datetime.utcnow()
# Create generally unique run identifier
self.run_id = sha256(str(self.start_time))[0:8]
Flask.__init__(self, *args, **kwargs)
def create_jinja_environment(self):
"""Overridden jinja environment constructor"""
return super(CTFdFlask, self).create_jinja_environment()
class SandboxedBaseEnvironment(SandboxedEnvironment):
"""SandboxEnvironment that mimics the Flask BaseEnvironment"""
def __init__(self, app, **options):
if "loader" not in options:
options["loader"] = app.create_global_jinja_loader()
SandboxedEnvironment.__init__(self, **options)
self.app = app
def _load_template(self, name, globals):
if self.loader is None:
raise TypeError("no loader for this environment specified")
# Add theme to the LRUCache cache key
cache_name = name
if name.startswith("admin/") is False:
theme = str(utils.get_config("ctf_theme"))
cache_name = theme + "/" + name
# Rest of this code is copied from Jinja
# https://github.com/pallets/jinja/blob/master/src/jinja2/environment.py#L802-L815
cache_key = (weakref.ref(self.loader), cache_name)
if self.cache is not None:
template = self.cache.get(cache_key)
if template is not None and (
not self.auto_reload or template.is_up_to_date
):
return template
template = self.loader.load(self, name, globals)
if self.cache is not None:
self.cache[cache_key] = template
return template
class ThemeLoader(FileSystemLoader):
"""Custom FileSystemLoader that is aware of theme structure and config.
"""
DEFAULT_THEMES_PATH = os.path.join(os.path.dirname(__file__), "themes")
_ADMIN_THEME_PREFIX = ADMIN_THEME + "/"
def __init__(
self,
searchpath=DEFAULT_THEMES_PATH,
theme_name=None,
encoding="utf-8",
followlinks=False,
):
super(ThemeLoader, self).__init__(searchpath, encoding, followlinks)
self.theme_name = theme_name
def get_source(self, environment, template):
# Refuse to load `admin/*` from a loader not for the admin theme
# Because there is a single template loader, themes can essentially
# provide files for other themes. This could end up causing issues if
# an admin theme references a file that doesn't exist that a malicious
# theme provides.
if template.startswith(self._ADMIN_THEME_PREFIX):
if self.theme_name != ADMIN_THEME:
raise jinja2.TemplateNotFound(template)
template = template[len(self._ADMIN_THEME_PREFIX) :]
theme_name = self.theme_name or str(utils.get_config("ctf_theme"))
template = safe_join(theme_name, "templates", template)
return super(ThemeLoader, self).get_source(environment, template)
def confirm_upgrade():
if sys.stdin.isatty():
print("/*\\ CTFd has updated and must update the database! /*\\")
print("/*\\ Please backup your database before proceeding! /*\\")
print("/*\\ CTFd maintainers are not responsible for any data loss! /*\\")
if input("Run database migrations (Y/N)").lower().strip() == "y": # nosec B322
return True
else:
print("/*\\ Ignored database migrations... /*\\")
return False
else:
return True
def run_upgrade():
upgrade()
utils.set_config("ctf_version", __version__)
def create_app(config="CTFd.config.Config"):
app = CTFdFlask(__name__)
with app.app_context():
app.config.from_object(config)
loaders = []
# We provide a `DictLoader` which may be used to override templates
app.overridden_templates = {}
loaders.append(jinja2.DictLoader(app.overridden_templates))
# A `ThemeLoader` with no `theme_name` will load from the current theme
loaders.append(ThemeLoader())
# If `THEME_FALLBACK` is set and true, we add another loader which will
# load from the `DEFAULT_THEME` - this mirrors the order implemented by
# `config.ctf_theme_candidates()`
if bool(app.config.get("THEME_FALLBACK")):
loaders.append(ThemeLoader(theme_name=DEFAULT_THEME))
# All themes including admin can be accessed by prefixing their name
prefix_loader_dict = {ADMIN_THEME: ThemeLoader(theme_name=ADMIN_THEME)}
for theme_name in CTFd.utils.config.get_themes():
prefix_loader_dict[theme_name] = ThemeLoader(theme_name=theme_name)
loaders.append(jinja2.PrefixLoader(prefix_loader_dict))
# Plugin templates are also accessed via prefix but we just point a
# normal `FileSystemLoader` at the plugin tree rather than validating
# each plugin here (that happens later in `init_plugins()`). We
# deliberately don't add this to `prefix_loader_dict` defined above
# because to do so would break template loading from a theme called
# `prefix` (even though that'd be weird).
plugin_loader = jinja2.FileSystemLoader(
searchpath=os.path.join(app.root_path, "plugins"), followlinks=True
)
loaders.append(jinja2.PrefixLoader({"plugins": plugin_loader}))
# Use a choice loader to find the first match from our list of loaders
app.jinja_loader = jinja2.ChoiceLoader(loaders)
from CTFd.models import ( # noqa: F401
db,
Teams,
Solves,
Challenges,
Fails,
Flags,
Tags,
Files,
Tracking,
)
url = create_database()
# This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
# This is mostly so we can force MySQL's charset
app.config["SQLALCHEMY_DATABASE_URI"] = str(url)
# Register database
db.init_app(app)
# Register Flask-Migrate
migrations.init_app(app, db)
# Alembic sqlite support is lacking so we should just create_all anyway
if url.drivername.startswith("sqlite"):
# Enable foreign keys for SQLite. This must be before the
# db.create_all call because tests use the in-memory SQLite
# database (each connection, including db creation, is a new db).
# https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#foreign-key-support
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
db.create_all()
stamp_latest_revision()
else:
# This creates tables instead of db.create_all()
# Allows migrations to happen properly
upgrade()
from CTFd.models import ma
ma.init_app(app)
app.db = db
app.VERSION = __version__
app.CHANNEL = __channel__
from CTFd.cache import cache
cache.init_app(app)
app.cache = cache
reverse_proxy = app.config.get("REVERSE_PROXY")
if reverse_proxy:
if type(reverse_proxy) is str and "," in reverse_proxy:
proxyfix_args = [int(i) for i in reverse_proxy.split(",")]
app.wsgi_app = ProxyFix(app.wsgi_app, *proxyfix_args)
else:
app.wsgi_app = ProxyFix(
app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_port=1, x_prefix=1
)
version = utils.get_config("ctf_version")
# Upgrading from an older version of CTFd
if version and (StrictVersion(version) < StrictVersion(__version__)):
if confirm_upgrade():
run_upgrade()
else:
exit()
if not version:
utils.set_config("ctf_version", __version__)
if not utils.get_config("ctf_theme"):
utils.set_config("ctf_theme", DEFAULT_THEME)
update_check(force=True)
init_request_processors(app)
init_template_filters(app)
init_template_globals(app)
# Importing here allows tests to use sensible names (e.g. api instead of api_bp)
from CTFd.views import views
from CTFd.teams import teams
from CTFd.users import users
from CTFd.challenges import challenges
from CTFd.scoreboard import scoreboard
from CTFd.auth import auth
from CTFd.admin import admin
from CTFd.api import api
from CTFd.events import events
from CTFd.errors import render_error
app.register_blueprint(views)
app.register_blueprint(teams)
app.register_blueprint(users)
app.register_blueprint(challenges)
app.register_blueprint(scoreboard)
app.register_blueprint(auth)
app.register_blueprint(api)
app.register_blueprint(events)
app.register_blueprint(admin)
for code in {403, 404, 500, 502}:
app.register_error_handler(code, render_error)
init_logs(app)
init_events(app)
init_plugins(app)
return app
| {
"content_hash": "d76057f2b858030f71a5363e61041787",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 119,
"avg_line_length": 36.71661237785016,
"alnum_prop": 0.6324520936834634,
"repo_name": "isislab/CTFd",
"id": "cdda23588fa098d8ca74b4cfee0517080769f68f",
"size": "11272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CTFd/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13062"
},
{
"name": "HTML",
"bytes": "156198"
},
{
"name": "JavaScript",
"bytes": "38944"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "146388"
},
{
"name": "Shell",
"bytes": "611"
}
],
"symlink_target": ""
} |
"""Node namespace"""
# pylint: disable=unused-import
from __future__ import absolute_import
import ctypes
import sys
from .. import _api_internal
from .node_generic import NodeGeneric, convert_to_node, const
from .base import _LIB, check_call, c_str, py_str, _FFI_MODE
IMPORT_EXCEPT = RuntimeError if _FFI_MODE == "cython" else ImportError
try:
# pylint: disable=wrong-import-position
if _FFI_MODE == "ctypes":
raise ImportError()
if sys.version_info >= (3, 0):
from ._cy3.core import _register_node, NodeBase as _NodeBase
else:
from ._cy2.core import _register_node, NodeBase as _NodeBase
except IMPORT_EXCEPT:
# pylint: disable=wrong-import-position
from ._ctypes.node import _register_node, NodeBase as _NodeBase
def _new_object(cls):
"""Helper function for pickle"""
return cls.__new__(cls)
class NodeBase(_NodeBase):
"""NodeBase is the base class of all TVM language AST object."""
def __repr__(self):
return _api_internal._format_str(self)
def __dir__(self):
plist = ctypes.POINTER(ctypes.c_char_p)()
size = ctypes.c_uint()
check_call(_LIB.TVMNodeListAttrNames(
self.handle, ctypes.byref(size), ctypes.byref(plist)))
names = []
for i in range(size.value):
names.append(py_str(plist[i]))
return names
def __hash__(self):
return _api_internal._raw_ptr(self)
def __eq__(self, other):
return self.same_as(other)
def __ne__(self, other):
return not self.__eq__(other)
def __reduce__(self):
cls = type(self)
return (_new_object, (cls, ), self.__getstate__())
def __getstate__(self):
handle = self.handle
if handle is not None:
return {'handle': _api_internal._save_json(self)}
return {'handle': None}
def __setstate__(self, state):
# pylint: disable=assigning-non-slot
handle = state['handle']
if handle is not None:
json_str = handle
other = _api_internal._load_json(json_str)
self.handle = other.handle
other.handle = None
else:
self.handle = None
def same_as(self, other):
"""check object identity equality"""
if not isinstance(other, NodeBase):
return False
return self.__hash__() == other.__hash__()
def register_node(type_key=None):
"""register node type
Parameters
----------
type_key : str or cls
The type key of the node
"""
node_name = type_key if isinstance(type_key, str) else type_key.__name__
def register(cls):
"""internal register function"""
tindex = ctypes.c_int()
ret = _LIB.TVMNodeTypeKey2Index(c_str(node_name), ctypes.byref(tindex))
if ret == 0:
_register_node(tindex.value, cls)
return cls
if isinstance(type_key, str):
return register
return register(type_key)
| {
"content_hash": "84e943517381f94c8f771a8d7f27ad40",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 79,
"avg_line_length": 29.294117647058822,
"alnum_prop": 0.5953815261044176,
"repo_name": "mlperf/training_results_v0.6",
"id": "98ece19f77f2048e582e5ff13f473784112c0582",
"size": "2988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/python/tvm/_ffi/node.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13941"
},
{
"name": "C",
"bytes": "208630"
},
{
"name": "C++",
"bytes": "10999411"
},
{
"name": "CMake",
"bytes": "129712"
},
{
"name": "CSS",
"bytes": "64767"
},
{
"name": "Clojure",
"bytes": "396764"
},
{
"name": "Cuda",
"bytes": "2272433"
},
{
"name": "Dockerfile",
"bytes": "67820"
},
{
"name": "Groovy",
"bytes": "62557"
},
{
"name": "HTML",
"bytes": "19753082"
},
{
"name": "Java",
"bytes": "166294"
},
{
"name": "JavaScript",
"bytes": "71846"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "2713169"
},
{
"name": "Lua",
"bytes": "4430"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "115694"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "24905683"
},
{
"name": "R",
"bytes": "351865"
},
{
"name": "Roff",
"bytes": "293052"
},
{
"name": "Scala",
"bytes": "1189019"
},
{
"name": "Shell",
"bytes": "794096"
},
{
"name": "Smalltalk",
"bytes": "3497"
},
{
"name": "TypeScript",
"bytes": "361164"
}
],
"symlink_target": ""
} |
def create_mqb_steering_control(packer, bus, apply_steer, idx, lkas_enabled):
values = {
"SET_ME_0X3": 0x3,
"Assist_Torque": abs(apply_steer),
"Assist_Requested": lkas_enabled,
"Assist_VZ": 1 if apply_steer < 0 else 0,
"HCA_Available": 1,
"HCA_Standby": not lkas_enabled,
"HCA_Active": lkas_enabled,
"SET_ME_0XFE": 0xFE,
"SET_ME_0X07": 0x07,
}
return packer.make_can_msg("HCA_01", bus, values, idx)
def create_mqb_hud_control(packer, bus, hca_enabled, steering_pressed, hud_alert, leftLaneVisible, rightLaneVisible):
if hca_enabled:
leftlanehud = 3 if leftLaneVisible else 1
rightlanehud = 3 if rightLaneVisible else 1
else:
leftlanehud = 2 if leftLaneVisible else 1
rightlanehud = 2 if rightLaneVisible else 1
values = {
"LDW_Unknown": 2, # FIXME: possible speed or attention relationship
"Kombi_Lamp_Orange": 1 if hca_enabled and steering_pressed else 0,
"Kombi_Lamp_Green": 1 if hca_enabled and not steering_pressed else 0,
"Left_Lane_Status": leftlanehud,
"Right_Lane_Status": rightlanehud,
"Alert_Message": hud_alert,
}
return packer.make_can_msg("LDW_02", bus, values)
def create_mqb_acc_buttons_control(packer, bus, buttonStatesToSend, CS, idx):
values = {
"GRA_Hauptschalter": CS.graHauptschalter,
"GRA_Abbrechen": buttonStatesToSend["cancel"],
"GRA_Tip_Setzen": buttonStatesToSend["setCruise"],
"GRA_Tip_Hoch": buttonStatesToSend["accelCruise"],
"GRA_Tip_Runter": buttonStatesToSend["decelCruise"],
"GRA_Tip_Wiederaufnahme": buttonStatesToSend["resumeCruise"],
"GRA_Verstellung_Zeitluecke": 3 if buttonStatesToSend["gapAdjustCruise"] else 0,
"GRA_Typ_Hauptschalter": CS.graTypHauptschalter,
"GRA_Codierung": 2,
"GRA_Tip_Stufe_2": CS.graTipStufe2,
"GRA_ButtonTypeInfo": CS.graButtonTypeInfo
}
return packer.make_can_msg("GRA_ACC_01", bus, values, idx)
| {
"content_hash": "2892330f1afc060adc957ea476610ec6",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 117,
"avg_line_length": 39.729166666666664,
"alnum_prop": 0.6953329837441007,
"repo_name": "vntarasov/openpilot",
"id": "3cd47a0957ee12bcda407f1bd04af9fe79b1e58a",
"size": "2026",
"binary": false,
"copies": "1",
"ref": "refs/heads/eon-0710",
"path": "selfdrive/car/volkswagen/volkswagencan.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "44441"
},
{
"name": "C",
"bytes": "7910872"
},
{
"name": "C++",
"bytes": "2526252"
},
{
"name": "CMake",
"bytes": "7454"
},
{
"name": "Cap'n Proto",
"bytes": "55667"
},
{
"name": "Dockerfile",
"bytes": "3141"
},
{
"name": "Groovy",
"bytes": "2620"
},
{
"name": "Makefile",
"bytes": "49988"
},
{
"name": "NSIS",
"bytes": "7977"
},
{
"name": "Objective-C",
"bytes": "72319"
},
{
"name": "Python",
"bytes": "1188467"
},
{
"name": "Shell",
"bytes": "13772"
}
],
"symlink_target": ""
} |
"""
Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms.
"""
def fib(n):
a, b = 0, 1
while a < n:
if not a % 2:
yield a
a, b = b, a + b
def run_calc():
return sum(fib(4000000))
def test_function():
assert run_calc() == 4613732
if __name__ == '__main__':
run_calc()
| {
"content_hash": "5b7c3e01db3b5a25bac1efda7a3a0c55",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 140,
"avg_line_length": 21.51851851851852,
"alnum_prop": 0.6006884681583476,
"repo_name": "marshallhumble/Coding_Challenges",
"id": "d781c7892a9084dd050d3244f7feaf57b2c51de9",
"size": "604",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Project-Euler/python/2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groovy",
"bytes": "5348"
},
{
"name": "Java",
"bytes": "4719"
},
{
"name": "Python",
"bytes": "119677"
}
],
"symlink_target": ""
} |
import struct
import decimal
D = decimal.Decimal
import logging
from . import (util, config, exceptions, worldcoin, util, blockchain)
FORMAT = '>QQQQHQ'
LENGTH = 8 + 8 + 8 + 8 + 2 + 8
ID = 10
def exact_penalty (db, address, block_index, order_match_id):
# Penalize addresses that don’t make WDC payments. If an address lets an
# order match expire, expire sell WDC orders from that address.
cursor = db.cursor()
# Orders.
bad_orders = list(cursor.execute('''SELECT * FROM orders \
WHERE (source = ? AND give_asset = ? AND status = ?)''',
(address, config.WDC, 'open')))
for bad_order in bad_orders:
cancel_order(db, bad_order, 'expired', block_index)
if not (block_index >= 314250 or config.TESTNET): # Protocol change.
# Order matches.
bad_order_matches = list(cursor.execute('''SELECT * FROM order_matches \
WHERE ((tx0_address = ? AND forward_asset = ?) OR (tx1_address = ? AND backward_asset = ?)) AND (status = ?)''',
(address, config.WDC, address, config.WDC, 'pending')))
for bad_order_match in bad_order_matches:
cancel_order_match(db, bad_order_match, 'expired', block_index)
cursor.close()
return
def cancel_order (db, order, status, block_index):
cursor = db.cursor()
# Update status of order.
bindings = {
'status': status,
'tx_hash': order['tx_hash']
}
sql='update orders set status = :status where tx_hash = :tx_hash'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'orders', bindings)
if order['give_asset'] != config.WDC: # Can’t credit WDC.
util.credit(db, block_index, order['source'], order['give_asset'], order['give_remaining'], action='cancel order', event=order['tx_hash'])
if status == 'expired':
# Record offer expiration.
bindings = {
'order_index': order['tx_index'],
'order_hash': order['tx_hash'],
'source': order['source'],
'block_index': block_index
}
sql='insert into order_expirations values(:order_index, :order_hash, :source, :block_index)'
cursor.execute(sql, bindings)
cursor.close()
def cancel_order_match (db, order_match, status, block_index):
'''
May only be cancelled by callbacks.'''
cursor = db.cursor()
# Skip order matches just expired as a penalty. (Not very efficient.)
if not (block_index >= 314250 or config.TESTNET): # Protocol change.
order_matches = list(cursor.execute('''SELECT * FROM order_matches \
WHERE (id = ? AND status = ?)''',
(order_match['id'], 'expired')))
if order_matches:
cursor.close()
return
# Update status of order match.
bindings = {
'status': status,
'order_match_id': order_match['id']
}
sql='update order_matches set status = :status where id = :order_match_id'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'order_matches', bindings)
order_match_id = order_match['tx0_hash'] + order_match['tx1_hash']
# If tx0 is dead, credit address directly; if not, replenish give remaining, get remaining, and fee required remaining.
orders = list(cursor.execute('''SELECT * FROM orders \
WHERE tx_index = ?''',
(order_match['tx0_index'],)))
assert len(orders) == 1
tx0_order = orders[0]
if tx0_order['status'] in ('expired', 'cancelled'):
tx0_order_status = tx0_order['status']
if order_match['forward_asset'] != config.WDC:
util.credit(db, block_index, order_match['tx0_address'],
order_match['forward_asset'],
order_match['forward_quantity'], action='order {}'.format(tx0_order_status), event=order_match['id'])
else:
tx0_give_remaining = tx0_order['give_remaining'] + order_match['forward_quantity']
tx0_get_remaining = tx0_order['get_remaining'] + order_match['backward_quantity']
if tx0_order['get_asset'] == config.WDC and (block_index >= 297000 or config.TESTNET): # Protocol change.
tx0_fee_required_remaining = tx0_order['fee_required_remaining'] + order_match['fee_paid']
else:
tx0_fee_required_remaining = tx0_order['fee_required_remaining']
tx0_order_status = tx0_order['status']
bindings = {
'give_remaining': tx0_give_remaining,
'get_remaining': tx0_get_remaining,
'status': tx0_order_status,
'fee_required_remaining': tx0_fee_required_remaining,
'tx_hash': order_match['tx0_hash']
}
sql='update orders set give_remaining = :give_remaining, get_remaining = :get_remaining, fee_required_remaining = :fee_required_remaining where tx_hash = :tx_hash'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'orders', bindings)
# If tx1 is dead, credit address directly; if not, replenish give remaining, get remaining, and fee required remaining.
orders = list(cursor.execute('''SELECT * FROM orders \
WHERE tx_index = ?''',
(order_match['tx1_index'],)))
assert len(orders) == 1
tx1_order = orders[0]
if tx1_order['status'] in ('expired', 'cancelled'):
tx1_order_status = tx1_order['status']
if order_match['backward_asset'] != config.WDC:
util.credit(db, block_index, order_match['tx1_address'],
order_match['backward_asset'],
order_match['backward_quantity'], action='order {}'.format(tx1_order_status), event=order_match['id'])
else:
tx1_give_remaining = tx1_order['give_remaining'] + order_match['backward_quantity']
tx1_get_remaining = tx1_order['get_remaining'] + order_match['forward_quantity']
if tx1_order['get_asset'] == config.WDC and (block_index >= 297000 or config.TESTNET): # Protocol change.
tx1_fee_required_remaining = tx1_order['fee_required_remaining'] + order_match['fee_paid']
else:
tx1_fee_required_remaining = tx1_order['fee_required_remaining']
tx1_order_status = tx1_order['status']
bindings = {
'give_remaining': tx1_give_remaining,
'get_remaining': tx1_get_remaining,
'status': tx1_order_status,
'fee_required_remaining': tx1_fee_required_remaining,
'tx_hash': order_match['tx1_hash']
}
sql='update orders set give_remaining = :give_remaining, get_remaining = :get_remaining, fee_required_remaining = :fee_required_remaining where tx_hash = :tx_hash'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'orders', bindings)
if block_index < 286500: # Protocol change.
# Sanity check: one of the two must have expired.
tx0_order_time_left = tx0_order['expire_index'] - block_index
tx1_order_time_left = tx1_order['expire_index'] - block_index
assert tx0_order_time_left or tx1_order_time_left
# Penalize tardiness.
if block_index >= 313900 or config.TESTNET: # Protocol change.
if tx0_order['status'] == 'expired' and order_match['forward_asset'] == config.WDC:
exact_penalty(db, order_match['tx0_address'], block_index, order_match['id'])
if tx1_order['status'] == 'expired' and order_match['backward_asset'] == config.WDC:
exact_penalty(db, order_match['tx1_address'], block_index, order_match['id'])
# Re‐match.
if block_index >= 310000 or config.TESTNET: # Protocol change.
if not (block_index >= 315000 or config.TESTNET): # Protocol change.
cursor.execute('''SELECT * FROM transactions\
WHERE tx_hash = ?''', (tx0_order['tx_hash'],))
match(db, list(cursor)[0], block_index)
cursor.execute('''SELECT * FROM transactions\
WHERE tx_hash = ?''', (tx1_order['tx_hash'],))
match(db, list(cursor)[0], block_index)
if status == 'expired':
# Record order match expiration.
bindings = {
'order_match_id': order_match['id'],
'tx0_address': order_match['tx0_address'],
'tx1_address': order_match['tx1_address'],
'block_index': block_index
}
sql='insert into order_match_expirations values(:order_match_id, :tx0_address, :tx1_address, :block_index)'
cursor.execute(sql, bindings)
cursor.close()
def validate (db, source, give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, block_index):
problems = []
cursor = db.cursor()
if give_asset == config.WDC and get_asset == config.WDC:
problems.append('cannot trade {} for itself'.format(config.WDC))
if not isinstance(give_quantity, int):
problems.append('give_quantity must be in satoshis')
return problems
if not isinstance(get_quantity, int):
problems.append('get_quantity must be in satoshis')
return problems
if not isinstance(fee_required, int):
problems.append('fee_required must be in satoshis')
return problems
if not isinstance(expiration, int):
problems.append('expiration must be expressed as an integer block delta')
return problems
if give_quantity <= 0: problems.append('non‐positive give quantity')
if get_quantity <= 0: problems.append('non‐positive get quantity')
if fee_required < 0: problems.append('negative fee_required')
if expiration < 0: problems.append('negative expiration')
if expiration == 0 and not (block_index >= 317500 or config.TESTNET): # Protocol change.
problems.append('zero expiration')
if not give_quantity or not get_quantity:
problems.append('zero give or zero get')
cursor.execute('select * from issuances where (status = ? and asset = ?)', ('valid', give_asset))
if give_asset not in (config.WDC, config.XBJ) and not cursor.fetchall():
problems.append('no such asset to give ({})'.format(give_asset))
cursor.execute('select * from issuances where (status = ? and asset = ?)', ('valid', get_asset))
if get_asset not in (config.WDC, config.XBJ) and not cursor.fetchall():
problems.append('no such asset to get ({})'.format(get_asset))
if expiration > config.MAX_EXPIRATION:
problems.append('expiration overflow')
# For SQLite3
if give_quantity > config.MAX_INT or get_quantity > config.MAX_INT or fee_required > config.MAX_INT:
problems.append('integer overflow')
cursor.close()
return problems
def compose (db, source, give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required):
cursor = db.cursor()
# Check balance.
if give_asset == config.WDC:
if sum(out['amount'] for out in worldcoin.get_unspent_txouts(source)) * config.UNIT < give_quantity:
print('WARNING: insufficient funds for {}pay.'.format(config.WDC))
else:
balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, give_asset)))
if (not balances or balances[0]['quantity'] < give_quantity):
raise exceptions.OrderError('insufficient funds')
problems = validate(db, source, give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, util.last_block(db)['block_index'])
if problems: raise exceptions.OrderError(problems)
give_id = util.asset_id(give_asset)
get_id = util.asset_id(get_asset)
data = struct.pack(config.TXTYPE_FORMAT, ID)
data += struct.pack(FORMAT, give_id, give_quantity, get_id, get_quantity,
expiration, fee_required)
cursor.close()
return (source, [], data)
def parse (db, tx, message):
order_parse_cursor = db.cursor()
# Unpack message.
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
give_id, give_quantity, get_id, get_quantity, expiration, fee_required = struct.unpack(FORMAT, message)
give_asset = util.asset_name(give_id)
get_asset = util.asset_name(get_id)
status = 'open'
except (exceptions.UnpackError, exceptions.AssetNameError, struct.error) as e:
give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required = 0, 0, 0, 0, 0, 0
status = 'invalid: could not unpack'
price = 0
if status == 'open':
try:
price = util.price(get_quantity, give_quantity, tx['block_index'])
except ZeroDivisionError:
price = 0
# Overorder
order_parse_cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (tx['source'], give_asset))
balances = list(order_parse_cursor)
if give_asset != config.WDC:
if not balances:
give_quantity = 0
else:
balance = balances[0]['quantity']
if balance < give_quantity:
give_quantity = balance
get_quantity = int(price * give_quantity)
problems = validate(db, tx['source'], give_asset, give_quantity, get_asset, get_quantity, expiration, fee_required, tx['block_index'])
if problems: status = 'invalid: ' + '; '.join(problems)
# Debit give quantity. (Escrow.)
if status == 'open':
if give_asset != config.WDC: # No need (or way) to debit WDC.
util.debit(db, tx['block_index'], tx['source'], give_asset, give_quantity, action='open order', event=tx['tx_hash'])
# Add parsed transaction to message-type–specific table.
bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'give_asset': give_asset,
'give_quantity': give_quantity,
'give_remaining': give_quantity,
'get_asset': get_asset,
'get_quantity': get_quantity,
'get_remaining': get_quantity,
'expiration': expiration,
'expire_index': tx['block_index'] + expiration,
'fee_required': fee_required,
'fee_required_remaining': fee_required,
'fee_provided': tx['fee'],
'fee_provided_remaining': tx['fee'],
'status': status,
}
sql='insert into orders values(:tx_index, :tx_hash, :block_index, :source, :give_asset, :give_quantity, :give_remaining, :get_asset, :get_quantity, :get_remaining, :expiration, :expire_index, :fee_required, :fee_required_remaining, :fee_provided, :fee_provided_remaining, :status)'
order_parse_cursor.execute(sql, bindings)
# Match.
if status == 'open' and tx['block_index'] != config.MEMPOOL_BLOCK_INDEX:
match(db, tx)
order_parse_cursor.close()
def match (db, tx, block_index=None):
cursor = db.cursor()
# Get order in question.
orders = list(cursor.execute('''SELECT * FROM orders\
WHERE (tx_index = ? AND status = ?)''', (tx['tx_index'], 'open')))
if not orders:
cursor.close()
return
else:
assert len(orders) == 1
tx1 = orders[0]
cursor.execute('''SELECT * FROM orders \
WHERE (give_asset=? AND get_asset=? AND status=? AND tx_hash != ?)''',
(tx1['get_asset'], tx1['give_asset'], 'open', tx1['tx_hash']))
tx1_give_remaining = tx1['give_remaining']
tx1_get_remaining = tx1['get_remaining']
order_matches = cursor.fetchall()
if tx['block_index'] > 284500 or config.TESTNET: # Protocol change.
order_matches = sorted(order_matches, key=lambda x: x['tx_index']) # Sort by tx index second.
order_matches = sorted(order_matches, key=lambda x: util.price(x['get_quantity'], x['give_quantity'], tx1['block_index'])) # Sort by price first.
# Get fee remaining.
tx1_fee_required_remaining = tx1['fee_required_remaining']
tx1_fee_provided_remaining = tx1['fee_provided_remaining']
tx1_status = tx1['status']
for tx0 in order_matches:
order_match_id = tx0['tx_hash'] + tx1['tx_hash']
if not block_index:
block_index = max(tx0['block_index'], tx1['block_index'])
if tx1_status != 'open': break
logging.debug('Considering: ' + tx0['tx_hash'])
tx0_give_remaining = tx0['give_remaining']
tx0_get_remaining = tx0['get_remaining']
# Ignore previous matches. (Both directions, just to be sure.)
cursor.execute('''SELECT * FROM order_matches
WHERE id = ? ''', (tx0['tx_hash'] + tx1['tx_hash'], ))
if list(cursor):
logging.debug('Skipping: previous match')
continue
cursor.execute('''SELECT * FROM order_matches
WHERE id = ? ''', (tx1['tx_hash'] + tx0['tx_hash'], ))
if list(cursor):
logging.debug('Skipping: previous match')
continue
# Get fee provided remaining.
tx0_fee_required_remaining = tx0['fee_required_remaining']
tx0_fee_provided_remaining = tx0['fee_provided_remaining']
# Make sure that that both orders still have funds remaining (if order involves WDC, and so cannot be ‘filled’).
if tx0['give_asset'] == config.WDC or tx0['get_asset'] == config.WDC: # Gratuitous
if tx0_give_remaining <= 0 or tx1_give_remaining <= 0:
logging.debug('Skipping: negative give quantity remaining')
continue
if block_index >= 292000 and block_index <= 310500 and not config.TESTNET: # Protocol changes
if tx0_get_remaining <= 0 or tx1_get_remaining <= 0:
logging.debug('Skipping: negative get quantity remaining')
continue
if block_index >= 294000 or config.TESTNET: # Protocol change.
if tx0['fee_required_remaining'] < 0:
logging.debug('Skipping: negative tx0 fee required remaining')
continue
if tx0['fee_provided_remaining'] < 0:
logging.debug('Skipping: negative tx0 fee provided remaining')
continue
if tx1_fee_provided_remaining < 0:
logging.debug('Skipping: negative tx1 fee provided remaining')
continue
if tx1_fee_required_remaining < 0:
logging.debug('Skipping: negative tx1 fee required remaining')
continue
# If the prices agree, make the trade. The found order sets the price,
# and they trade as much as they can.
tx0_price = util.price(tx0['get_quantity'], tx0['give_quantity'], block_index)
tx1_price = util.price(tx1['get_quantity'], tx1['give_quantity'], block_index)
tx1_inverse_price = util.price(tx1['give_quantity'], tx1['get_quantity'], block_index)
# Protocol change.
if tx['block_index'] < 286000: tx1_inverse_price = util.price(1, tx1_price, block_index)
logging.debug('Tx0 Price: {}; Tx1 Inverse Price: {}'.format(float(tx0_price), float(tx1_inverse_price)))
if tx0_price > tx1_inverse_price:
logging.debug('Skipping: price mismatch.')
else:
logging.debug('Potential forward quantities: {}, {}'.format(tx0_give_remaining, int(util.price(tx1_give_remaining, tx0_price, block_index))))
forward_quantity = int(min(tx0_give_remaining, int(util.price(tx1_give_remaining, tx0_price, block_index))))
logging.debug('Forward Quantity: {}'.format(forward_quantity))
backward_quantity = round(forward_quantity * tx0_price)
logging.debug('Backward Quantity: {}'.format(backward_quantity))
if not forward_quantity:
logging.debug('Skipping: zero forward quantity.')
continue
if block_index >= 286500 or config.TESTNET: # Protocol change.
if not backward_quantity:
logging.debug('Skipping: zero backward quantity.')
continue
forward_asset, backward_asset = tx1['get_asset'], tx1['give_asset']
if block_index >= 313900 or config.TESTNET: # Protocol change.
min_wdc_quantity = 0.001 * config.UNIT # 0.001 WDC
if (forward_asset == config.WDC and forward_quantity <= min_wdc_quantity) or (backward_asset == config.WDC and backward_quantity <= min_wdc_quantity):
logging.debug('Skipping: below minimum {} quantity'.format(config.WDC))
continue
# Check and update fee remainings.
fee = 0
if block_index >= 286500 or config.TESTNET: # Protocol change. Deduct fee_required from fee_provided_remaining, etc., if possible (else don’t match).
if tx1['get_asset'] == config.WDC:
if block_index >= 310500 or config.TESTNET: # Protocol change.
fee = int(tx1['fee_required'] * util.price(backward_quantity, tx1['give_quantity'], block_index))
else:
fee = int(tx1['fee_required_remaining'] * util.price(forward_quantity, tx1_get_remaining, block_index))
logging.debug('Tx0 fee provided remaining: {}; required fee: {}'.format(tx0_fee_provided_remaining / config.UNIT, fee / config.UNIT))
if tx0_fee_provided_remaining < fee:
logging.debug('Skipping: tx0 fee provided remaining is too low.')
continue
else:
tx0_fee_provided_remaining -= fee
if block_index >= 287800 or config.TESTNET: # Protocol change.
tx1_fee_required_remaining -= fee
elif tx1['give_asset'] == config.WDC:
if block_index >= 310500 or config.TESTNET: # Protocol change.
fee = int(tx0['fee_required'] * util.price(backward_quantity, tx0['give_quantity'], block_index))
else:
fee = int(tx0['fee_required_remaining'] * util.price(backward_quantity, tx0_get_remaining, block_index))
logging.debug('Tx1 fee provided remaining: {}; required fee: {}'.format(tx1_fee_provided_remaining / config.UNIT, fee / config.UNIT))
if tx1_fee_provided_remaining < fee:
logging.debug('Skipping: tx1 fee provided remaining is too low.')
continue
else:
tx1_fee_provided_remaining -= fee
if block_index >= 287800 or config.TESTNET: # Protocol change.
tx0_fee_required_remaining -= fee
else: # Don’t deduct.
if tx1['get_asset'] == config.WDC:
if tx0_fee_provided_remaining < tx1['fee_required']: continue
elif tx1['give_asset'] == config.WDC:
if tx1_fee_provided_remaining < tx0['fee_required']: continue
if config.WDC in (tx1['give_asset'], tx1['get_asset']):
status = 'pending'
else:
status = 'completed'
# Credit.
util.credit(db, tx['block_index'], tx1['source'], tx1['get_asset'],
forward_quantity, action='order match', event=order_match_id)
util.credit(db, tx['block_index'], tx0['source'], tx0['get_asset'],
backward_quantity, action='order match', event=order_match_id)
# Debit the order, even if it involves giving worldcoins, and so one
# can't debit the sending account.
# Get remainings may be negative.
tx0_give_remaining -= forward_quantity
tx0_get_remaining -= backward_quantity
tx1_give_remaining -= backward_quantity
tx1_get_remaining -= forward_quantity
# Update give_remaining, get_remaining.
# tx0
tx0_status = 'open'
if tx0_give_remaining <= 0 or (tx0_get_remaining <= 0 and (block_index >= 292000 or config.TESTNET)): # Protocol change
if tx0['give_asset'] != config.WDC and tx0['get_asset'] != config.WDC:
# Fill order, and recredit give_remaining.
tx0_status = 'filled'
util.credit(db, block_index, tx0['source'], tx0['give_asset'], tx0_give_remaining, event=tx1['tx_hash'], action='filled')
bindings = {
'give_remaining': tx0_give_remaining,
'get_remaining': tx0_get_remaining,
'fee_required_remaining': tx0_fee_required_remaining,
'fee_provided_remaining': tx0_fee_provided_remaining,
'status': tx0_status,
'tx_hash': tx0['tx_hash']
}
sql='update orders set give_remaining = :give_remaining, get_remaining = :get_remaining, fee_required_remaining = :fee_required_remaining, fee_provided_remaining = :fee_provided_remaining, status = :status where tx_hash = :tx_hash'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'orders', bindings)
# tx1
if tx1_give_remaining <= 0 or (tx1_get_remaining <= 0 and (block_index >= 292000 or config.TESTNET)): # Protocol change
if tx1['give_asset'] != config.WDC and tx1['get_asset'] != config.WDC:
# Fill order, and recredit give_remaining.
tx1_status = 'filled'
util.credit(db, block_index, tx1['source'], tx1['give_asset'], tx1_give_remaining, event=tx0['tx_hash'], action='filled')
bindings = {
'give_remaining': tx1_give_remaining,
'get_remaining': tx1_get_remaining,
'fee_required_remaining': tx1_fee_required_remaining,
'fee_provided_remaining': tx1_fee_provided_remaining,
'status': tx1_status,
'tx_hash': tx1['tx_hash']
}
sql='update orders set give_remaining = :give_remaining, get_remaining = :get_remaining, fee_required_remaining = :fee_required_remaining, fee_provided_remaining = :fee_provided_remaining, status = :status where tx_hash = :tx_hash'
cursor.execute(sql, bindings)
util.message(db, block_index, 'update', 'orders', bindings)
# Calculate when the match will expire.
if block_index >= 308000 or config.TESTNET: # Protocol change.
match_expire_index = block_index + 20
elif block_index >= 286500 or config.TESTNET: # Protocol change.
match_expire_index = block_index + 10
else:
match_expire_index = min(tx0['expire_index'], tx1['expire_index'])
# Record order match.
bindings = {
'id': tx0['tx_hash'] + tx['tx_hash'],
'tx0_index': tx0['tx_index'],
'tx0_hash': tx0['tx_hash'],
'tx0_address': tx0['source'],
'tx1_index': tx1['tx_index'],
'tx1_hash': tx1['tx_hash'],
'tx1_address': tx1['source'],
'forward_asset': forward_asset,
'forward_quantity': forward_quantity,
'backward_asset': backward_asset,
'backward_quantity': backward_quantity,
'tx0_block_index': tx0['block_index'],
'tx1_block_index': tx1['block_index'],
'block_index': block_index,
'tx0_expiration': tx0['expiration'],
'tx1_expiration': tx1['expiration'],
'match_expire_index': match_expire_index,
'fee_paid': fee,
'status': status,
}
sql='insert into order_matches values(:id, :tx0_index, :tx0_hash, :tx0_address, :tx1_index, :tx1_hash, :tx1_address, :forward_asset, :forward_quantity, :backward_asset, :backward_quantity, :tx0_block_index, :tx1_block_index, :block_index, :tx0_expiration, :tx1_expiration, :match_expire_index, :fee_paid, :status)'
cursor.execute(sql, bindings)
if tx1_status == 'filled':
break
cursor.close()
return
def expire (db, block_index):
cursor = db.cursor()
# Expire orders and give refunds for the quantity give_remaining (if non-zero; if not WDC).
cursor.execute('''SELECT * FROM orders \
WHERE (status = ? AND expire_index < ?)''', ('open', block_index))
orders = list(cursor)
for order in orders:
cancel_order(db, order, 'expired', block_index)
# Expire order_matches for WDC with no WDC.
cursor.execute('''SELECT * FROM order_matches \
WHERE (status = ? and match_expire_index < ?)''', ('pending', block_index))
order_matches = list(cursor)
for order_match in order_matches:
cancel_order_match(db, order_match, 'expired', block_index)
if block_index >= 315000 or config.TESTNET: # Protocol change.
# Re‐match.
for order_match in order_matches:
cursor.execute('''SELECT * FROM transactions\
WHERE tx_hash = ?''', (order_match['tx0_hash'],))
match(db, list(cursor)[0], block_index)
cursor.execute('''SELECT * FROM transactions\
WHERE tx_hash = ?''', (order_match['tx1_hash'],))
match(db, list(cursor)[0], block_index)
cursor.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| {
"content_hash": "34394f19b877475f15b33114ad1e3416",
"timestamp": "",
"source": "github",
"line_count": 608,
"max_line_length": 326,
"avg_line_length": 49.82401315789474,
"alnum_prop": 0.5797048823160467,
"repo_name": "Bluejudy/bluejudyd",
"id": "6d54b2136a7ca8fae9c7ad1341769eaeaa761abd",
"size": "30476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/order.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "5576"
},
{
"name": "Python",
"bytes": "403533"
},
{
"name": "Shell",
"bytes": "5102"
}
],
"symlink_target": ""
} |
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import batma
class Game(batma.Scene):
def initialize(self):
pass
def load_content(self):
self.label = batma.Text(u'Hello World!', batma.display.center)
def unload_content(self):
pass
def update(self, tick):
pass
def draw(self):
self.label.draw()
game = Game()
batma.run(game) | {
"content_hash": "517ff4a61e9a02840fffdf68fde1fb2a",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 70,
"avg_line_length": 18.083333333333332,
"alnum_prop": 0.6036866359447005,
"repo_name": "renatopp/batma",
"id": "2c7f204ddc99c779a48f921ebf06d9adc478018c",
"size": "458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/hello_world.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "165387"
}
],
"symlink_target": ""
} |
"""Definition of bifrost operator strategy."""
# pylint: disable=invalid-name,unused-argument,wildcard-import,unused-wildcard-import
import re
from tvm import topi
from .generic import *
from .. import op as _op
@conv2d_strategy.register("bifrost")
def conv2d_strategy_bifrost(attrs, inputs, out_type, target):
"""conv2d mali(bifrost) strategy"""
strategy = _op.OpStrategy()
data, kernel = inputs
dilation_h, dilation_w = attrs.get_int_tuple("dilation")
stride_h, stride_w = attrs.get_int_tuple("strides")
groups = attrs.groups
layout = attrs.data_layout
kernel_layout = attrs.kernel_layout
if dilation_h < 1 or dilation_w < 1:
raise ValueError("dilation should be positive value")
if groups == 1:
if layout == "NCHW":
if kernel_layout == "OIHW":
strategy.add_implementation(
wrap_compute_conv2d(topi.bifrost.conv2d_nchw_spatial_pack),
wrap_topi_schedule(topi.bifrost.schedule_conv2d_nchw_spatial_pack),
name="conv2d_nchw_spatial_pack.bifrost",
)
_, _, kh, kw = get_const_tuple(kernel.shape)
if (
kh == 3
and kw == 3
and stride_h == 1
and stride_w == 1
and dilation_h == 1
and dilation_w == 1
):
strategy.add_implementation(
wrap_compute_conv2d(topi.bifrost.conv2d_nchw_winograd),
wrap_topi_schedule(topi.bifrost.schedule_conv2d_nchw_winograd),
name="conv2d_nchw_winograd.bifrost",
plevel=5,
)
elif re.match(r"OIHW\d*o", kernel_layout):
strategy.add_implementation(
wrap_compute_conv2d(topi.bifrost.conv2d_nchw_spatial_pack),
wrap_topi_schedule(topi.bifrost.schedule_conv2d_nchw_spatial_pack),
name="conv2d_nchw_spatial_pack.bifrost",
)
else:
raise RuntimeError("Unsupported conv2d layout {} for Mali(Bifrost)".format(layout))
elif is_depthwise_conv2d(data.shape, layout, kernel.shape, kernel_layout, groups):
if layout == "NCHW":
assert kernel_layout == "OIHW"
strategy.add_implementation(
wrap_compute_conv2d(topi.nn.depthwise_conv2d_nchw),
wrap_topi_schedule(topi.bifrost.schedule_depthwise_conv2d_nchw),
name="depthwise_conv2d_nchw.bifrost",
)
else:
raise RuntimeError(
"Unsupported depthwise_conv2d layout {} for Mali(Bifrost)".format(layout)
)
else: # group_conv2d
raise RuntimeError("group_conv2d is not supported for Mali(Bifrost)")
return strategy
@conv2d_winograd_without_weight_transfrom_strategy.register("bifrost")
def conv2d_winograd_without_weight_transfrom_strategy_bifrost(attrs, inputs, out_type, target):
"""conv2d_winograd_without_weight_transfrom mali(bifrost) strategy"""
dilation = attrs.get_int_tuple("dilation")
groups = attrs.get_int("groups")
layout = attrs.data_layout
strides = attrs.get_int_tuple("strides")
assert dilation == (1, 1), "Do not support dilate now"
assert strides == (1, 1), "Do not support strides now"
assert groups == 1, "Do not supoort arbitrary group number"
strategy = _op.OpStrategy()
if layout == "NCHW":
strategy.add_implementation(
wrap_compute_conv2d(topi.bifrost.conv2d_nchw_winograd),
wrap_topi_schedule(topi.bifrost.schedule_conv2d_nchw_winograd),
name="conv2d_nchw_winograd.bifrost",
)
else:
raise RuntimeError(
"Unsupported conv2d_winograd_without_weight_transfrom layout {}".format(layout)
)
return strategy
@dense_strategy.register("bifrost")
def dense_strategy_bifrost(attrs, inputs, out_type, target):
"""dense mali(bifrost) strategy"""
strategy = _op.OpStrategy()
strategy.add_implementation(
wrap_compute_dense(topi.bifrost.dense),
wrap_topi_schedule(topi.bifrost.schedule_dense),
name="dense.bifrost",
)
return strategy
| {
"content_hash": "2f01523bfb3caff7594ce92537eb8305",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 95,
"avg_line_length": 41.5,
"alnum_prop": 0.5998609823911029,
"repo_name": "sxjscience/tvm",
"id": "24e68a47bbeb2364bb3d1e56dd46a61afea4ca79",
"size": "5101",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/tvm/relay/op/strategy/bifrost.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "6056"
},
{
"name": "C",
"bytes": "95567"
},
{
"name": "C++",
"bytes": "5565032"
},
{
"name": "CMake",
"bytes": "67305"
},
{
"name": "Go",
"bytes": "112376"
},
{
"name": "HTML",
"bytes": "8625"
},
{
"name": "Java",
"bytes": "173219"
},
{
"name": "JavaScript",
"bytes": "49801"
},
{
"name": "Makefile",
"bytes": "50818"
},
{
"name": "Objective-C",
"bytes": "15264"
},
{
"name": "Objective-C++",
"bytes": "46673"
},
{
"name": "Python",
"bytes": "6763729"
},
{
"name": "Rust",
"bytes": "182027"
},
{
"name": "Scala",
"bytes": "184105"
},
{
"name": "Shell",
"bytes": "96967"
},
{
"name": "Tcl",
"bytes": "53645"
},
{
"name": "Verilog",
"bytes": "30605"
}
],
"symlink_target": ""
} |
import re
import sys
import eventlet
eventlet.monkey_patch()
from oslo.config import cfg
from oslo import messaging
from neutron.agent.common import config
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.common import config as common_cfg
from neutron.common import rpc
from neutron.common import utils as neutron_utils
from neutron.db import agents_db
from neutron.i18n import _LE, _LI
from neutron import manager
from neutron.openstack.common import lockutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import periodic_task
from neutron.openstack.common import service as svc
from neutron.plugins.ml2.drivers.cisco.apic import mechanism_apic as ma
from neutron.plugins.ml2.drivers import type_vlan # noqa
from neutron import service
ACI_PORT_DESCR_FORMATS = [
'topology/pod-1/node-(\d+)/sys/conng/path-\[eth(\d+)/(\d+)\]',
'topology/pod-1/paths-(\d+)/pathep-\[eth(\d+)/(\d+)\]',
]
AGENT_FORCE_UPDATE_COUNT = 100
BINARY_APIC_SERVICE_AGENT = 'neutron-cisco-apic-service-agent'
BINARY_APIC_HOST_AGENT = 'neutron-cisco-apic-host-agent'
TOPIC_APIC_SERVICE = 'apic-service'
TYPE_APIC_SERVICE_AGENT = 'cisco-apic-service-agent'
TYPE_APIC_HOST_AGENT = 'cisco-apic-host-agent'
LOG = logging.getLogger(__name__)
class ApicTopologyService(manager.Manager):
target = messaging.Target(version='1.1')
def __init__(self, host=None):
if host is None:
host = neutron_utils.get_hostname()
super(ApicTopologyService, self).__init__(host=host)
self.conf = cfg.CONF.ml2_cisco_apic
self.conn = None
self.peers = {}
self.invalid_peers = []
self.dispatcher = None
self.state = None
self.state_agent = None
self.topic = TOPIC_APIC_SERVICE
self.apic_manager = ma.APICMechanismDriver.get_apic_manager(False)
def init_host(self):
LOG.info(_LI("APIC service agent starting ..."))
self.state = {
'binary': BINARY_APIC_SERVICE_AGENT,
'host': self.host,
'topic': self.topic,
'configurations': {},
'start_flag': True,
'agent_type': TYPE_APIC_SERVICE_AGENT,
}
self.conn = rpc.create_connection(new=True)
self.dispatcher = [self, agents_db.AgentExtRpcCallback()]
self.conn.create_consumer(
self.topic, self.dispatcher, fanout=True)
self.conn.consume_in_threads()
def after_start(self):
LOG.info(_LI("APIC service agent started"))
def report_send(self, context):
if not self.state_agent:
return
LOG.debug("APIC service agent: sending report state")
try:
self.state_agent.report_state(context, self.state)
self.state.pop('start_flag', None)
except AttributeError:
# This means the server does not support report_state
# ignore it
return
except Exception:
LOG.exception(_LE("APIC service agent: failed in reporting state"))
@lockutils.synchronized('apic_service')
def update_link(self, context,
host, interface, mac,
switch, module, port):
LOG.debug("APIC service agent: received update_link: %s",
", ".join(map(str,
[host, interface, mac, switch, module, port])))
nlink = (host, interface, mac, switch, module, port)
clink = self.peers.get((host, interface), None)
if switch == 0:
# this is a link delete, remove it
if clink is not None:
self.apic_manager.remove_hostlink(*clink)
self.peers.pop((host, interface))
else:
if clink is None:
# add new link to database
self.apic_manager.add_hostlink(*nlink)
self.peers[(host, interface)] = nlink
elif clink != nlink:
# delete old link and add new one (don't update in place)
self.apic_manager.remove_hostlink(*clink)
self.peers.pop((host, interface))
self.apic_manager.add_hostlink(*nlink)
self.peers[(host, interface)] = nlink
class ApicTopologyServiceNotifierApi(object):
def __init__(self):
target = messaging.Target(topic=TOPIC_APIC_SERVICE, version='1.0')
self.client = rpc.get_client(target)
def update_link(self, context, host, interface, mac, switch, module, port):
cctxt = self.client.prepare(version='1.1', fanout=True)
cctxt.cast(context, 'update_link', host=host, interface=interface,
mac=mac, switch=switch, module=module, port=port)
def delete_link(self, context, host, interface):
cctxt = self.client.prepare(version='1.1', fanout=True)
cctxt.cast(context, 'delete_link', host=host, interface=interface,
mac=None, switch=0, module=0, port=0)
class ApicTopologyAgent(manager.Manager):
def __init__(self, host=None):
if host is None:
host = neutron_utils.get_hostname()
super(ApicTopologyAgent, self).__init__(host=host)
self.conf = cfg.CONF.ml2_cisco_apic
self.count_current = 0
self.count_force_send = AGENT_FORCE_UPDATE_COUNT
self.interfaces = {}
self.lldpcmd = None
self.peers = {}
self.port_desc_re = map(re.compile, ACI_PORT_DESCR_FORMATS)
self.root_helper = self.conf.root_helper
self.service_agent = ApicTopologyServiceNotifierApi()
self.state = None
self.state_agent = None
self.topic = TOPIC_APIC_SERVICE
self.uplink_ports = []
self.invalid_peers = []
def init_host(self):
LOG.info(_LI("APIC host agent: agent starting on %s"), self.host)
self.state = {
'binary': BINARY_APIC_HOST_AGENT,
'host': self.host,
'topic': self.topic,
'configurations': {},
'start_flag': True,
'agent_type': TYPE_APIC_HOST_AGENT,
}
self.uplink_ports = []
for inf in self.conf.apic_host_uplink_ports:
if ip_lib.device_exists(inf):
self.uplink_ports.append(inf)
else:
# ignore unknown interfaces
LOG.error(_LE("No such interface (ignored): %s"), inf)
self.lldpcmd = ['lldpctl', '-f', 'keyvalue'] + self.uplink_ports
def after_start(self):
LOG.info(_LI("APIC host agent: started on %s"), self.host)
@periodic_task.periodic_task
def _check_for_new_peers(self, context):
LOG.debug("APIC host agent: _check_for_new_peers")
if not self.lldpcmd:
return
try:
# Check if we must send update even if there is no change
force_send = False
self.count_current += 1
if self.count_current >= self.count_force_send:
force_send = True
self.count_current = 0
# Check for new peers
new_peers = self._get_peers()
new_peers = self._valid_peers(new_peers)
# Make a copy of current interfaces
curr_peers = {}
for interface in self.peers:
curr_peers[interface] = self.peers[interface]
# Based curr -> new updates, add the new interfaces
self.peers = {}
for interface in new_peers:
peer = new_peers[interface]
self.peers[interface] = peer
if (interface in curr_peers and
curr_peers[interface] != peer):
self.service_agent.update_link(
context, peer[0], peer[1], None, 0, 0, 0)
if (interface not in curr_peers or
curr_peers[interface] != peer or
force_send):
self.service_agent.update_link(context, *peer)
if interface in curr_peers:
curr_peers.pop(interface)
# Any interface still in curr_peers need to be deleted
for peer in curr_peers.values():
self.service_agent.update_link(
context, peer[0], peer[1], None, 0, 0, 0)
except Exception:
LOG.exception(_LE("APIC service agent: exception in LLDP parsing"))
def _get_peers(self):
peers = {}
lldpkeys = utils.execute(self.lldpcmd, self.root_helper)
for line in lldpkeys.splitlines():
if '=' not in line:
continue
fqkey, value = line.split('=', 1)
lldp, interface, key = fqkey.split('.', 2)
if key == 'port.descr':
for regexp in self.port_desc_re:
match = regexp.match(value)
if match:
mac = self._get_mac(interface)
switch, module, port = match.group(1, 2, 3)
peer = (self.host, interface, mac,
switch, module, port)
if interface not in peers:
peers[interface] = []
peers[interface].append(peer)
return peers
def _valid_peers(self, peers):
# Reduce the peers array to one valid peer per interface
# NOTE:
# There is a bug in lldpd daemon that it keeps reporting
# old peers even after their updates have stopped
# we keep track of that report remove them from peers
valid_peers = {}
invalid_peers = []
for interface in peers:
curr_peer = None
for peer in peers[interface]:
if peer in self.invalid_peers or curr_peer:
invalid_peers.append(peer)
else:
curr_peer = peer
if curr_peer is not None:
valid_peers[interface] = curr_peer
self.invalid_peers = invalid_peers
return valid_peers
def _get_mac(self, interface):
if interface in self.interfaces:
return self.interfaces[interface]
try:
mac = ip_lib.IPDevice(interface).link.address
self.interfaces[interface] = mac
return mac
except Exception:
# we can safely ignore it, it is only needed for debugging
LOG.exception(
_LE("APIC service agent: can not get MACaddr for %s"),
interface)
def report_send(self, context):
if not self.state_agent:
return
LOG.debug("APIC host agent: sending report state")
try:
self.state_agent.report_state(context, self.state)
self.state.pop('start_flag', None)
except AttributeError:
# This means the server does not support report_state
# ignore it
return
except Exception:
LOG.exception(_LE("APIC host agent: failed in reporting state"))
def launch(binary, manager, topic=None):
cfg.CONF(project='neutron')
common_cfg.init(sys.argv[1:])
config.setup_logging()
report_period = cfg.CONF.ml2_cisco_apic.apic_agent_report_interval
poll_period = cfg.CONF.ml2_cisco_apic.apic_agent_poll_interval
server = service.Service.create(
binary=binary, manager=manager, topic=topic,
report_interval=report_period, periodic_interval=poll_period)
svc.launch(server).wait()
def service_main():
launch(
BINARY_APIC_SERVICE_AGENT,
'neutron.plugins.ml2.drivers.' +
'cisco.apic.apic_topology.ApicTopologyService',
TOPIC_APIC_SERVICE)
def agent_main():
launch(
BINARY_APIC_HOST_AGENT,
'neutron.plugins.ml2.drivers.' +
'cisco.apic.apic_topology.ApicTopologyAgent')
| {
"content_hash": "e990524015f137ce3ab41d21de9a2227",
"timestamp": "",
"source": "github",
"line_count": 330,
"max_line_length": 79,
"avg_line_length": 36.236363636363635,
"alnum_prop": 0.579110219100184,
"repo_name": "CiscoSystems/neutron",
"id": "b747bb9a5422d40947033fdb91388e08ddc7a442",
"size": "12596",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "neutron/plugins/ml2/drivers/cisco/apic/apic_topology.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60527"
},
{
"name": "Makefile",
"bytes": "3295"
},
{
"name": "Python",
"bytes": "8821734"
},
{
"name": "Shell",
"bytes": "12988"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../../../')) # include project root dir for autodoc
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinx.ext.inheritance_diagram'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Kn0ckKn0ck'
copyright = u'2016, Valentijn Harmers'
author = u'Valentijn Harmers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# extension options
autodoc_default_flags = ['members',
'undoc-members',
'inherited-members'
'show-inheritance']
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'show_related': True,
'description': 'Stress testing tool',
'github_user': 'valentijn1995',
'github_repo': 'Kn0ckKn0ck',
'github_button': True
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Kn0ckKn0ckdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Kn0ckKn0ck.tex', u'Kn0ckKn0ck Documentation',
u'Valentijn', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'kn0ckkn0ck', u'Kn0ckKn0ck Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Kn0ckKn0ck', u'Kn0ckKn0ck Documentation',
author, 'Kn0ckKn0ck', 'Stress testing tool',
'Software'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| {
"content_hash": "0268e59d3d43d15d787b912ed7e6be97",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 87,
"avg_line_length": 32.16896551724138,
"alnum_prop": 0.6971808339586236,
"repo_name": "Valentijn1995/Kn0ckKn0ck",
"id": "e14bd9a48c877b53d6786efb7b33b23a2704aee4",
"size": "9752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Docs/Sphinx/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7266"
},
{
"name": "Makefile",
"bytes": "7681"
},
{
"name": "Python",
"bytes": "76482"
}
],
"symlink_target": ""
} |
import sys
from sqlQueries import dropQuery, createQuery, insertEmptyRacerQuery, createRaceQuery, dropRaceQuery, incrementRaceQuery, getRaceNumQuery, getNumberOfRacersQuery
from PySide import QtCore
from PySide.QtSql import *
from PySide.QtGui import *
from PySide.QtDeclarative import QDeclarativeView
from track import Track
from racerCalculator import racerCalculator
class MainWindow(QMainWindow):
def __init__(self, track):
super(MainWindow, self).__init__()
self.track = track
self.initUI()
def initUI(self):
self.db = QSqlDatabase.addDatabase("QSQLITE")
self.db.setDatabaseName("piwood.db")
self.db.open()
# Show if there is a problem with the database connection
print("Database Connection", self.db.isValid())
self.model = QSqlTableModel(None, self.db)
self.model.setTable("racers")
self.model.setEditStrategy(QSqlTableModel.EditStrategy.OnFieldChange)
# TODO: Remove ID row
# self.model.removeColumn(0)
self.model.setHeaderData(0, QtCore.Qt.Orientation.Horizontal, "ID")
self.model.setHeaderData(1, QtCore.Qt.Orientation.Horizontal, "Name")
self.model.setHeaderData(2, QtCore.Qt.Orientation.Horizontal, "Race 1")
self.model.setHeaderData(3, QtCore.Qt.Orientation.Horizontal, "Race 2")
self.model.setHeaderData(4, QtCore.Qt.Orientation.Horizontal, "Race 3")
self.model.setHeaderData(5, QtCore.Qt.Orientation.Horizontal, "Average")
self.model.setHeaderData(6, QtCore.Qt.Orientation.Horizontal, "Scale Speed")
# Populates the table, needed to show data
self.model.select()
projectView = QTableView()
projectView.setModel(self.model)
# TODO: Throw into seperate loop and test
controls = QWidget()
controlsLayout = QVBoxLayout()
testButton = QPushButton("Test Gate, Sensors and Displays")
testButton.setToolTip("Run a test on the track's gate, displays and sensors. After the displays clear waivin your hand under the sensors will change the displays for about ten seconds, then the test is done.")
testButton.clicked.connect(self.track.test)
insertButton = QPushButton("&Insert Racer")
insertButton.setToolTip("Adds racer to table")
insertButton.clicked.connect(self.insertRacer)
clearButton = QPushButton("Clear the table")
clearButton.setToolTip("Clear the table")
# TODO: Make it kill the whole table.
clearButton.clicked.connect(lambda _: None)
startRaceButton = QPushButton("Start Race")
startRaceButton.clicked.connect(self.startRace)
goBackOneRaceButton = QPushButton("Go Back One Race")
goBackOneRaceButton.clicked.connect(self.goBackOneRace)
stopRaceButton = QPushButton("Stop Race")
stopRaceButton.clicked.connect(self.stopRace)
controlsLayout.addWidget(testButton)
controlsLayout.addWidget(insertButton)
controlsLayout.addWidget(clearButton)
controlsLayout.addWidget(startRaceButton)
controlsLayout.addWidget(goBackOneRaceButton)
controlsLayout.addWidget(stopRaceButton)
controls.setLayout(controlsLayout)
rightDockWidget = QDockWidget()
rightDockWidget.setWidget(controls)
rightDockWidget.setFeatures(QDockWidget.NoDockWidgetFeatures)
self.setCentralWidget(projectView)
self.addDockWidget(QtCore.Qt.DockWidgetArea.RightDockWidgetArea,
rightDockWidget)
# &E allows users to use Alt E
exitAction = QAction('&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(self.close)
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction)
self.statusBar().showMessage('Ready')
# In case the windows is shrunk
self.setGeometry(300,300,250,150)
self.showMaximized()
self.setWindowTitle('PiWood-Derby')
self.show()
def insertRacer(self):
# -1 Puts the record at the end of the database
if QSqlQuery(insertEmptyRacerQuery, self.db) == False:
print(self.model.lastError())
# Populates the table, needed to show data
self.model.select()
def startRace(self):
QSqlQuery(incrementRaceQuery)
raceNum = QSqlQuery(getRaceNumQuery)
raceNum.first()
raceNum = raceNum.record().field(0).value()
numCars = QSqlQuery(getNumberOfRacersQuery)
numCars.first()
numCars = numCars.record().field(0).value()
self.racers = racerCalculator(raceNum, numCars)
msgBox = QMessageBox()
msgBox.setText("Place car numbered: " +
str(self.racers[0] + 1) + " " +
str(self.racers[1] + 1) + " " +
str(self.racers[2] + 1))
msgBox.exec_()
self.track.startRace()
def goBackOneRace(self):
QSqlQuery("UPDATE race_num SET num = num - 1;")
def stopRace(self):
times = self.track.stopRace()
print(times)
print("racers " + str(self.racers))
whichRace = self.getWhichRace(self.racers[0] + 1)
QSqlQuery("UPDATE racers SET time_" + str(whichRace) + " = " + str(times[0]) + " WHERE id = " + str(self.racers[0] + 1))
whichRace = self.getWhichRace(self.racers[1] + 1)
QSqlQuery("UPDATE racers SET time_" + str(whichRace) + " = " + str(times[1]) + " WHERE id = " + str(self.racers[1] + 1))
whichRace = self.getWhichRace(self.racers[2] + 1)
QSqlQuery("UPDATE racers SET time_" + str(whichRace) + " = " + str(times[2]) + " WHERE id = " + str(self.racers[2] + 1))
self.model.select()
def getWhichRace(self, id):
times = QSqlQuery("SELECT (time_1, time_2, time_3) FROM racers WHERE id = " + str(id))
times.first()
print(times.record().field(0).value())
if times.record().field(0).value() is None:
return 1
if times.record().field(1).value() is None:
return 2
if times.record().field(2).value() is None:
return 3
# Save all changes to the table
def saveTable(self):
if self.model.submitAll() == False:
print(self.model.lastError())
def clearTable(self):
QSqlQuery(dropQuery, self.db)
QSqlQuery(createQuery, self.db)
def close(self):
self.db.close()
print("DB connection closed")
super(MainWindow, self).close()
class PiWood:
def __init__(self):
self.app = QApplication(sys.argv)
self.track = Track()
self.setUpGui(self.track)
print('Setting up displays')
sys.exit(self.app.exec_())
def setUpGui(self, track):
self.mw = MainWindow(track)
if __name__ == '__main__':
PiWood()
| {
"content_hash": "3165278e11187242d733f02bdc6c16aa",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 217,
"avg_line_length": 37.1968085106383,
"alnum_prop": 0.6406406406406406,
"repo_name": "Sam-Gram/PiWood-Derby",
"id": "d58fbe4eb2504fd3de954b988d9caf4246bdbebd",
"size": "7012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "piwood.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20756"
},
{
"name": "Shell",
"bytes": "546"
}
],
"symlink_target": ""
} |
__author__ = "Matteo Turilli, Andre Merzky"
__copyright__ = "Copyright 2013, AIMES project"
__license__ = "MIT"
| {
"content_hash": "caab78e97486581f0e83422dbc867308",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 47,
"avg_line_length": 22.8,
"alnum_prop": 0.6491228070175439,
"repo_name": "mturilli/aimes.emanager",
"id": "fdbd086688b2db5a12c36160efe9f83531928126",
"size": "115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aimes/emanager/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "6790"
},
{
"name": "Python",
"bytes": "57072"
},
{
"name": "Shell",
"bytes": "7233"
}
],
"symlink_target": ""
} |
from .market import Market, TradeException
import time
import base64
import hmac
import urllib.request
import urllib.parse
import urllib.error
import urllib.request
import urllib.error
import urllib.parse
import hashlib
import sys
import json
import config
class PrivateBitstampUSD(Market):
balance_url = "https://www.bitstamp.net/api/balance/"
buy_url = "https://www.bitstamp.net/api/buy/"
sell_url = "https://www.bitstamp.net/api/sell/"
def __init__(self):
super().__init__()
self.username = config.bitstamp_username
self.password = config.bitstamp_password
self.currency = "USD"
self.get_info()
def _send_request(self, url, params={}, extra_headers=None):
headers = {
'Content-type': 'application/json',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'
}
if extra_headers is not None:
for k, v in extra_headers.items():
headers[k] = v
params['user'] = self.username
params['password'] = self.password
postdata = urllib.parse.urlencode(params).encode("utf-8")
req = urllib.request.Request(url, postdata, headers=headers)
response = urllib.request.urlopen(req)
code = response.getcode()
if code == 200:
jsonstr = response.read().decode('utf-8')
return json.loads(jsonstr)
return None
def _buy(self, amount, price):
"""Create a buy limit order"""
params = {"amount": amount, "price": price}
response = self._send_request(self.buy_url, params)
if "error" in response:
raise TradeException(response["error"])
def _sell(self, amount, price):
"""Create a sell limit order"""
params = {"amount": amount, "price": price}
response = self._send_request(self.sell_url, params)
if "error" in response:
raise TradeException(response["error"])
def get_info(self):
"""Get balance"""
response = self._send_request(self.balance_url)
if response:
self.btc_balance = float(response["btc_available"])
self.usd_balance = float(response["usd_available"])
| {
"content_hash": "6687b43718359cf29283da74ec09a9d7",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 74,
"avg_line_length": 33.34782608695652,
"alnum_prop": 0.6101694915254238,
"repo_name": "taariq/btcmarketdata",
"id": "cf95a310adf1ee36dd06a9ce983fd624c9f5e56b",
"size": "2356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "btcdata/private_markets/bitstampusd.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import sys
from functools import partial
from billiard.einfo import ExceptionInfo
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.test.testcases import TestCase as DjangoTestCase
from django.template import TemplateDoesNotExist
from anyjson import deserialize
from celery import current_app
from celery import states
from celery.task import task
from celery.utils import gen_unique_id, get_full_cls_name
from djcelery.views import task_webhook
from djcelery.tests.req import MockRequest
def reversestar(name, **kwargs):
return reverse(name, kwargs=kwargs)
class MyError(Exception):
# On Py2.4 repr(exc) includes the object id, so comparing
# texts is pointless when the id the "same" KeyError does not match.
def __repr__(self):
return '<{0.__class__.__name__}: {0.args!r}>'.format(self)
class MyRetryTaskError(MyError):
pass
task_is_successful = partial(reversestar, 'celery-is_task_successful')
task_status = partial(reversestar, 'celery-task_status')
task_apply = partial(reverse, 'celery-apply')
registered_tasks = partial(reverse, 'celery-tasks')
scratch = {}
@task()
def mytask(x, y):
ret = scratch['result'] = int(x) * int(y)
return ret
def create_exception(name, base=Exception):
return type(name, (base, ), {})
def catch_exception(exception):
try:
raise exception
except exception.__class__, exc:
exc = current_app.backend.prepare_exception(exc)
return exc, ExceptionInfo(sys.exc_info()).traceback
class ViewTestCase(DjangoTestCase):
def assertJSONEqual(self, json, py):
json = isinstance(json, HttpResponse) and json.content or json
try:
self.assertEqual(deserialize(json), py)
except TypeError, exc:
raise TypeError('{0}: {1}'.format(exc, json))
def assertIn(self, expected, source, *args):
try:
DjangoTestCase.assertIn(self, expected, source, *args)
except AttributeError:
self.assertTrue(expected in source)
def assertDictContainsSubset(self, a, b, *args):
try:
DjangoTestCase.assertDictContainsSubset(self, a, b, *args)
except AttributeError:
for key, value in a.items():
self.assertTrue(key in b)
self.assertEqual(b[key], value)
class test_task_apply(ViewTestCase):
def test_apply(self):
current_app.conf.CELERY_ALWAYS_EAGER = True
try:
self.client.get(task_apply(kwargs={'task_name':
mytask.name}) + '?x=4&y=4')
self.assertEqual(scratch['result'], 16)
finally:
current_app.conf.CELERY_ALWAYS_EAGER = False
def test_apply_raises_404_on_unregistered_task(self):
current_app.conf.CELERY_ALWAYS_EAGER = True
try:
name = 'xxx.does.not.exist'
action = partial(self.client.get, task_apply(kwargs={
'task_name': name}) + '?x=4&y=4')
self.assertRaises(TemplateDoesNotExist, action)
finally:
current_app.conf.CELERY_ALWAYS_EAGER = False
class test_registered_tasks(ViewTestCase):
def test_list_registered_tasks(self):
json = self.client.get(registered_tasks())
tasks = deserialize(json.content)
self.assertIn('celery.backend_cleanup', tasks['regular'])
class test_webhook_task(ViewTestCase):
def test_successful_request(self):
@task_webhook
def add_webhook(request):
x = int(request.GET['x'])
y = int(request.GET['y'])
return x + y
request = MockRequest().get('/tasks/add', dict(x=10, y=10))
response = add_webhook(request)
self.assertDictContainsSubset({'status': 'success', 'retval': 20},
deserialize(response.content))
def test_failed_request(self):
@task_webhook
def error_webhook(request):
x = int(request.GET['x'])
y = int(request.GET['y'])
raise MyError(x + y)
request = MockRequest().get('/tasks/error', dict(x=10, y=10))
response = error_webhook(request)
self.assertDictContainsSubset({'status': 'failure',
'reason': '<MyError: (20,)>'},
deserialize(response.content))
class test_task_status(ViewTestCase):
def assertStatusForIs(self, status, res, traceback=None):
uuid = gen_unique_id()
current_app.backend.store_result(uuid, res, status,
traceback=traceback)
json = self.client.get(task_status(task_id=uuid))
expect = dict(id=uuid, status=status, result=res)
if status in current_app.backend.EXCEPTION_STATES:
instore = current_app.backend.get_result(uuid)
self.assertEqual(str(instore.args[0]), str(res.args[0]))
expect['result'] = repr(res)
expect['exc'] = get_full_cls_name(res.__class__)
expect['traceback'] = traceback
self.assertJSONEqual(json, dict(task=expect))
def test_success(self):
self.assertStatusForIs(states.SUCCESS, 'The quick brown fox')
def test_failure(self):
exc, tb = catch_exception(MyError('foo'))
self.assertStatusForIs(states.FAILURE, exc, tb)
def test_retry(self):
oexc, _ = catch_exception(MyError('Resource not available'))
exc, tb = catch_exception(MyRetryTaskError(str(oexc), oexc))
self.assertStatusForIs(states.RETRY, exc, tb)
class test_task_is_successful(ViewTestCase):
def assertStatusForIs(self, status, outcome):
uuid = gen_unique_id()
result = gen_unique_id()
current_app.backend.store_result(uuid, result, status)
json = self.client.get(task_is_successful(task_id=uuid))
self.assertJSONEqual(json, {'task': {'id': uuid,
'executed': outcome}})
def test_success(self):
self.assertStatusForIs(states.SUCCESS, True)
def test_pending(self):
self.assertStatusForIs(states.PENDING, False)
def test_failure(self):
self.assertStatusForIs(states.FAILURE, False)
def test_retry(self):
self.assertStatusForIs(states.RETRY, False)
| {
"content_hash": "424a29fabce2c4c72d2f79990b6846be",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 74,
"avg_line_length": 32.19095477386934,
"alnum_prop": 0.627536684358414,
"repo_name": "alexhayes/django-celery",
"id": "c858de3e8ad522c5444f067bb7a8726e347fe18a",
"size": "6406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "djcelery/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "207537"
},
{
"name": "Shell",
"bytes": "2065"
}
],
"symlink_target": ""
} |
import theano
import theano.tensor as T
import numpy as np
from collections import OrderedDict
from functools import reduce
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from lasagne.utils import floatX
__all__ = [
'softmin',
'join',
'lsum',
'joinc',
'ldot',
'lmean',
'log_barrier',
'make_copy',
'as_shared',
'make_uniform',
'make_normal',
'get_srng',
'border_mask'
]
join = lambda xs: reduce(lambda a, b: a + b, xs)
lsum = join
ldot = lambda xs, ys: join([ T.sum(x * y) for x, y in zip(xs, ys) ])
def joinc(xs, cs=None):
if cs is None and len(xs) == 1:
return xs[0]
elif cs is None:
return join(xs)
else:
return join([x * c for x, c in zip(xs, cs)])
def lmean(xs, cs = None):
if len(xs) is 1:
return xs[0]
elif cs is None:
return join(xs) / len(xs)
else:
return joinc(xs, cs)
def get_srng(srng):
if srng is None:
# from theano.sandbox.cuda.rng_curand import CURAND_RandomStreams as RandomStreams
return RandomStreams(seed=np.random.randint(2**30))
else:
return srng
def softmin(xs, alpha=1.0):
alpha = np.float32(alpha)
if hasattr(xs, '__len__'):
exp_xs = [ T.exp(-x * alpha) for x in xs ]
n = join(exp_xs)
return [ ex / n for ex in exp_xs ]
else:
T.nnet.softmax(-xs * alpha)
def log_barrier(v, bounds):
return -(T.log(v - bounds[0]) + T.log(bounds[1] - v))
def make_copy(shared):
value = shared.get_value(borrow=True)
return theano.shared(
np.zeros(value.shape, dtype=value.dtype),
broadcastable=shared.broadcastable
)
def as_shared(var):
return theano.shared(
np.zeros(shape=(0, ) * var.ndim, dtype=var.dtype),
broadcastable=var.broadcastable
)
def make_uniform(shared, a, b, srng=None):
srng = get_srng(srng)
return srng.uniform(
low=a, high=b,
size=shared.get_value(borrow=True).shape,
ndim=shared.ndim, dtype=shared.dtype
)
def make_normal(shared, srng):
srng = get_srng(srng)
return srng.normal(
size=shared.get_value(borrow=True).shape,
ndim=shared.ndim, dtype=shared.dtype
)
def border_mask(exclude_borders, img_shape, dtype='float32'):
if img_shape is None:
raise Exception('With non-zero border exclusion `img_shape` argument must be defined!')
mask = np.ones(
shape=tuple(img_shape[-2:]),
dtype=dtype
)
n = exclude_borders
mask[:n, :] = 0
mask[-n:, :] = 0
mask[:, :n] = 0
mask[:, -n:] = 0
return mask
def masked(exclude_borders, img_shape, dtype='float32'):
if exclude_borders > 0:
M = border_mask(exclude_borders, img_shape, dtype)
def m(X):
return X * M[None, None, :, :]
return m
else:
M = None
return lambda X: X | {
"content_hash": "a80b89d67b99cb4f720e72020f7c7b72",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 91,
"avg_line_length": 21.244094488188978,
"alnum_prop": 0.636767976278725,
"repo_name": "maxim-borisyak/craynn",
"id": "04da861352381168909561a1b32dacb3558f5aaf",
"size": "2698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "craynn/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "111648"
}
],
"symlink_target": ""
} |
import random
from datetime import datetime, timedelta
from urllib.parse import urlencode
import pytest
from django.urls import reverse
from django.utils import timezone
from rest_framework.test import APIClient, APITestCase
from metaci.conftest import PlanFactory, StaffSuperuserFactory, TestResultFactory
rand = random.Random()
rand.seed("xyzzy")
class _TestingHelpers:
route = reverse("testmethod_perf-list")
def debugmsg(self, *args):
print(*args) # Pytest does useful stuff with stdout, better than logger data
@classmethod
def make_user_and_client(cls):
user = StaffSuperuserFactory()
client = APIClient()
client.force_authenticate(user)
response = client.get("/api/")
assert response.status_code == 200, response.content
return client, user
def api_url(self, **kwargs):
params = urlencode(kwargs, True)
self.debugmsg("QueryParams", params)
return self.route + "?" + params
def find_first(self, fieldname, objs, value):
""" Find objects in JSON result sets that match a value """
if type(objs) == dict:
objs = objs.get("results", objs)
return next((x for x in objs if x[fieldname] == value), None)
def get_api_results(self, **kwargs):
self.debugmsg("Request", kwargs)
response = self.client.get(self.api_url(**kwargs, format="api"))
self.assertEqual(response.status_code, 200)
response = self.client.get(self.api_url(**kwargs))
self.assertEqual(response.status_code, 200)
objs = response.json()
self.debugmsg("Response", objs)
return objs["results"]
def insert_identical_tests(self, count, method_name="GenericMethod", **fields):
t1 = TestResultFactory(
build_flow__tests_total=1, method__name=method_name, **fields
)
for i in range(count - 1):
TestResultFactory(**fields, build_flow__tests_total=1, method=t1.method)
@pytest.mark.filterwarnings("ignore: Using the add method")
class TestTestMethodPerfRESTAPI(APITestCase, _TestingHelpers):
"""Test the testmethodperf REST API"""
@classmethod
def setUpClass(cls):
cls.client, cls.user = cls.make_user_and_client()
super().setUpClass()
def setUp(self):
self.client.force_authenticate(self.user)
t1 = TestResultFactory(
method__name="Foo", duration=10, build_flow__tests_total=1, outcome="Pass"
)
TestResultFactory(
duration=2, build_flow__tests_total=1, method=t1.method, outcome="Pass"
)
TestResultFactory(method__name="Bar", duration=3, build_flow__tests_total=1)
TestResultFactory(method__name="Bar", duration=5, build_flow__tests_total=1)
def test_counting(self):
"""Test counting of method invocations"""
objs = self.get_api_results(include_fields="count")
self.assertEqual(self.find_first("method_name", objs, "Foo")["count"], 2)
self.assertEqual(self.find_first("method_name", objs, "Bar")["count"], 2)
def test_averaging(self):
"""Test averaging of methods"""
objs = self.get_api_results(include_fields="duration_average")
self.assertEqual(
self.find_first("method_name", objs, "Foo")["duration_average"], 6
)
self.assertEqual(
self.find_first("method_name", objs, "Bar")["duration_average"], 4
)
includable_fields = [
"duration_average",
"duration_slow",
"duration_fast",
"cpu_usage_average",
"cpu_usage_low",
"cpu_usage_high",
"count",
"failures",
"assertion_failures",
"DML_failures",
"other_failures",
"success_percentage",
]
def test_all_included_fields(self):
def _test_fields(fields):
response = self.client.get(self.api_url(include_fields=fields))
self.assertEqual(response.status_code, 200)
rows = response.json()["results"]
for row in rows:
self.assertSetEqual(set(fields + ["method_name"]), set(row))
for i in range(10):
field = rand.sample(self.includable_fields, 1)
_test_fields(field)
for i in range(10):
field1, field2 = rand.sample(self.includable_fields, 2)
_test_fields([field1, field2])
for i in range(10):
field1, field2, field3 = rand.sample(self.includable_fields, 3)
_test_fields([field1, field2, field3])
_test_fields(self.includable_fields)
def test_duration_slow(self):
"""Test counting high durations"""
self.insert_identical_tests(method_name="Foo", count=20, duration=10)
_outlier = TestResultFactory(method__name="Foo", duration=11) # noqa
rows = self.get_api_results(include_fields=["duration_slow", "count"])
self.assertEqual(
round(self.find_first("method_name", rows, "Foo")["duration_slow"]), 10
)
def test_duration_fast(self):
"""Test counting high durations"""
self.insert_identical_tests(method_name="FooBar", count=20, duration=2)
_outlier = TestResultFactory(method__name="FooBar", duration=1) # noqa
rows = self.get_api_results(include_fields=["duration_slow", "count"])
self.assertEqual(
round(self.find_first("method_name", rows, "FooBar")["duration_slow"]), 2
)
def test_count_failures(self):
"""Test counting failed tests"""
self.insert_identical_tests(method_name="FailingTest", count=15, outcome="Fail")
self.insert_identical_tests(method_name="FailingTest", count=10, outcome="Pass")
rows = self.get_api_results(
include_fields=["failures", "success_percentage", "other_failures"]
)
self.assertEqual(
self.find_first("method_name", rows, "FailingTest")["failures"], 15
)
self.assertEqual(
self.find_first("method_name", rows, "FailingTest")["success_percentage"],
(10 / 25) * 100,
)
self.assertEqual(
self.find_first("method_name", rows, "FailingTest")["other_failures"], 15
)
self.assertEqual(
self.find_first("method_name", rows, "Foo")["other_failures"], 0
)
def test_split_by_repo(self):
"""Test Splitting on repo"""
self.insert_identical_tests(
method_name="HedaTest",
count=15,
build_flow__build__planrepo__repo__name="HEDA",
)
self.insert_identical_tests(
method_name="NPSPTest",
count=20,
build_flow__build__planrepo__repo__name="Cumulus",
)
rows = self.get_api_results(include_fields=["count", "repo"])
self.assertEqual(self.find_first("method_name", rows, "HedaTest")["count"], 15)
self.assertEqual(
self.find_first("method_name", rows, "HedaTest")["repo"], "HEDA"
)
self.assertEqual(self.find_first("method_name", rows, "NPSPTest")["count"], 20)
self.assertEqual(
self.find_first("method_name", rows, "NPSPTest")["repo"], "Cumulus"
)
def test_split_by_plan(self):
"""Test splitting on plan regardless of the rest"""
plan1 = PlanFactory(name="plan1")
plan2 = PlanFactory(name="plan2")
self.insert_identical_tests(
count=3,
build_flow__build__planrepo__repo__name="HEDA",
build_flow__build__planrepo__plan=plan1,
)
self.insert_identical_tests(
count=5,
build_flow__build__planrepo__repo__name="HEDA",
build_flow__build__planrepo__plan=plan2,
)
self.insert_identical_tests(
count=7,
build_flow__build__planrepo__repo__name="Cumulus",
build_flow__build__planrepo__plan=plan1,
)
self.insert_identical_tests(
count=9,
build_flow__build__planrepo__repo__name="Cumulus",
build_flow__build__planrepo__plan=plan2,
)
rows = self.get_api_results(include_fields=["count", "plan"])
self.assertEqual(self.find_first("plan", rows, "plan1")["count"], 10)
self.assertEqual(self.find_first("plan", rows, "plan2")["count"], 14)
def test_order_by_count_desc(self):
"""Test ordering by count"""
TestResultFactory(method__name="Bar", duration=3, build_flow__tests_total=1)
rows = self.get_api_results(o="-count")
self.assertEqual(rows[0]["method_name"], "Bar")
self.assertEqual(rows[1]["method_name"], "Foo")
def test_order_by_count_asc(self):
"""Test ordering by count"""
TestResultFactory(method__name="Bar", duration=3, build_flow__tests_total=1)
rows = self.get_api_results(o="count")
self.assertEqual(rows[0]["method_name"], "Foo")
self.assertEqual(rows[1]["method_name"], "Bar")
def test_order_by_method_name_asc(self):
rows = self.get_api_results(o="method_name")
self.assertTrue(rows[0]["method_name"] < rows[-1]["method_name"])
def test_order_by_method_name_desc(self):
rows = self.get_api_results(o="-method_name")
self.assertTrue(rows[0]["method_name"] > rows[-1]["method_name"])
def test_order_by_success_percentage(self):
TestResultFactory(
method__name="Foo2", outcome="Fail", build_flow__tests_total=1
)
TestResultFactory(
method__name="Bar2", outcome="Pass", build_flow__tests_total=1
)
rows = self.get_api_results(o="success_percentage")
self.assertTrue(rows[0]["success_percentage"] < rows[-1]["success_percentage"])
def test_order_by_success_percentage_desc(self):
TestResultFactory(
method__name="FailingTest", outcome="Fail", build_flow__tests_total=1
)
TestResultFactory(
method__name="PassingTest", outcome="Pass", build_flow__tests_total=1
)
rows = self.get_api_results(o="-success_percentage")
self.assertTrue(rows[0]["success_percentage"] > rows[-1]["success_percentage"])
def test_order_by_unknown_field(self):
response = self.client.get(self.api_url(o="fjioesjfoi"))
self.assertEqual(response.status_code, 400)
response.json() # should still be able to parse it
def test_include_unknown_field(self):
response = self.client.get(self.api_url(include_fields=["fjioesjfofi"]))
self.assertEqual(response.status_code, 400)
response.json() # should still be able to parse it
def test_group_by_unknown_field(self):
response = self.client.get(self.api_url(include_fields=["fesafs"]))
self.assertEqual(response.status_code, 400)
response.json() # should still be able to parse it
def test_cannot_specify_two_kinds_of_dates(self):
response = self.client.get(
self.api_url(recentdate="today", daterange_after="2019-03-07")
)
self.assertEqual(response.status_code, 400)
response.json() # should still be able to parse it
def make_date(self, strdate):
return timezone.make_aware(datetime.strptime(strdate, r"%Y-%m-%d"))
def test_filter_by_before_and_after_date(self):
d = self.make_date
TestResultFactory(method__name="Bar1", build_flow__time_end=d("2018-03-08"))
TestResultFactory(method__name="Bar2", build_flow__time_end=d("2018-04-08"))
TestResultFactory(method__name="Bar3", build_flow__time_end=d("2018-05-08"))
TestResultFactory(method__name="Bar4", build_flow__time_end=d("2018-06-08"))
rows = self.get_api_results(
daterange_after="2018-04-01", daterange_before="2018-06-01"
)
self.assertEqual(len(rows), 2)
for row in rows:
self.assertIn(row["method_name"], ["Bar2", "Bar3"])
self.assertNotIn(row["method_name"], ["Bar1", "Bar4"])
@pytest.mark.filterwarnings("ignore:DateTimeField")
@pytest.mark.skip(reason="feature current turned off")
def test_filter_by_recent_date(self):
yesterday = timezone.make_aware(datetime.today() - timedelta(1))
day_before = timezone.make_aware(datetime.today() - timedelta(2))
long_ago = timezone.make_aware(datetime.today() - timedelta(10))
long_long_ago = timezone.make_aware(datetime.today() - timedelta(12))
TestResultFactory(method__name="Bar1", build_flow__time_end=yesterday)
TestResultFactory(method__name="Bar2", build_flow__time_end=day_before)
TestResultFactory(method__name="Bar3", build_flow__time_end=long_ago)
TestResultFactory(method__name="Bar4", build_flow__time_end=long_long_ago)
rows = self.get_api_results(recentdate="week")
self.assertEqual(len(rows), 2)
for row in rows:
self.assertIn(row["method_name"], ["Bar1", "Bar2"])
def test_api_view(self):
response = self.client.get(self.api_url(format="api"))
self.debugmsg(response)
self.assertEqual(response.status_code, 200)
self.assertIn("text/html", response["content-type"])
def test_filter_by_count(self):
TestResultFactory(method__name="Bar1")
TestResultFactory(method__name="Bar1")
TestResultFactory(method__name="Bar1")
TestResultFactory(method__name="Bar1")
rows = self.get_api_results(count_gt=3, count_lt=5)
self.assertEqual(len(rows), 1)
for row in rows:
self.assertEqual(row["method_name"], "Bar1")
def test_default_fields(self):
rows = self.get_api_results()
self.assertIn("duration_average", rows[0].keys())
self.assertIn("method_name", rows[0].keys())
def test_default_fields_repo_only(self):
TestResultFactory(
method__name="Bar1", build_flow__build__planrepo__repo__name="myrepo"
)
rows = self.get_api_results(repo="myrepo")
self.assertIn("duration_average", rows[0].keys())
self.assertIn("method_name", rows[0].keys())
def test_filter_by_methodname(self):
rows = self.get_api_results(method_name="Foo")
self.assertTrue(rows)
def test_filter_by_methodname_subset(self):
rows = self.get_api_results(method_name="Fo")
self.assertTrue(rows)
| {
"content_hash": "8c4e948d22d637ca53d795163250c9ce",
"timestamp": "",
"source": "github",
"line_count": 372,
"max_line_length": 88,
"avg_line_length": 38.876344086021504,
"alnum_prop": 0.6127783155856728,
"repo_name": "SalesforceFoundation/mrbelvedereci",
"id": "c1034444d4f59fa0a2b9659dceb919981b99ec8e",
"size": "14462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaci/api/tests/test_testmethod_perf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2069"
},
{
"name": "HTML",
"bytes": "123214"
},
{
"name": "JavaScript",
"bytes": "3993"
},
{
"name": "Python",
"bytes": "245560"
},
{
"name": "Shell",
"bytes": "4590"
}
],
"symlink_target": ""
} |
"""
====================
SimpleFitter wrapper
====================
Adds a variable height (background) component to any model
Module API
^^^^^^^^^^
"""
import numpy
from pyspeckit.mpfit import mpfit
from numpy.ma import median
from pyspeckit.spectrum.moments import moments
class SimpleFitter(object):
def __init__():
pass
def moments(self, *args, **kwargs):
"""
Get the spectral moments from the moments package
"""
return moments(*args,**kwargs)
def vheightmodel(zeroheightmodel):
def vhm(xax, *pars,**kwargs):
"""
Wrapper function vhm to set variable height.
Parameter order: height, amplitude, shift, width
"""
vheight=True
if 'vheight' in kwargs:
vheight = kwargs.pop('vheight')
if vheight:
return zeroheightmodel(xax, *pars[1:],**kwargs) + pars[0]
else:
return zeroheightmodel(xax, *pars[1:],**kwargs)
vhm.__doc__ += zeroheightmodel.__doc__
return vhm
| {
"content_hash": "53a2b5e8093ae72587653d756baf4c8a",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 69,
"avg_line_length": 25,
"alnum_prop": 0.5853658536585366,
"repo_name": "jpinedaf/pyspeckit",
"id": "529eec1bcef85b8fd28e82d209882084bb1c3e9f",
"size": "1025",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pyspeckit/spectrum/models/fitter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1249041"
},
{
"name": "Shell",
"bytes": "313"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.