commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
668f5f588998040cadc320eccc2689551d348bc3
|
anki/statsbg.py
|
anki/statsbg.py
|
# from subtlepatterns.com
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
# from subtlepatterns.com
#
# The lines are too long.
# flake8: noqa
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
Make flake8 ignore the bg image.
|
Make flake8 ignore the bg image.
|
Python
|
agpl-3.0
|
ospalh/libanki3
|
# from subtlepatterns.com
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
Make flake8 ignore the bg image.
|
# from subtlepatterns.com
#
# The lines are too long.
# flake8: noqa
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
<commit_before># from subtlepatterns.com
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
<commit_msg>Make flake8 ignore the bg image.<commit_after>
|
# from subtlepatterns.com
#
# The lines are too long.
# flake8: noqa
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
# from subtlepatterns.com
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
Make flake8 ignore the bg image.# from subtlepatterns.com
#
# The lines are too long.
# flake8: noqa
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
<commit_before># from subtlepatterns.com
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
<commit_msg>Make flake8 ignore the bg image.<commit_after># from subtlepatterns.com
#
# The lines are too long.
# flake8: noqa
bg = """\
iVBORw0KGgoAAAANSUhEUgAAABIAAAANCAMAAACTkM4rAAAAM1BMVEXy8vLz8/P5+fn19fXt7e329vb4+Pj09PTv7+/u7u739/fw8PD7+/vx8fHr6+v6+vrs7Oz2LjW2AAAAkUlEQVR42g3KyXHAQAwDQYAQj12ItvOP1qqZZwMMPVnd06XToQvz4L2HDQ2iRgkvA7yPPB+JD+OUPnfzZ0JNZh6kkQus5NUmR7g4Jpxv5XN6nYWNmtlq9o3zuK6w3XRsE1pQIEGPIsdtTP3m2cYwlPv6MbL8/QASsKppZefyDmJPbxvxa/NrX1TJ1yp20fhj9D+SiAWWLU8myQAAAABJRU5ErkJggg==
"""
|
ec96669641c9b753c3ce74ce432213a17b0403fe
|
tests/aggregate_tests.py
|
tests/aggregate_tests.py
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
Copy and call in-toto's check_usable_gpg function
|
Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.
|
Python
|
mit
|
secure-systems-lab/securesystemslib,secure-systems-lab/securesystemslib
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
<commit_msg>Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.<commit_after>
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
import unittest
if __name__ == '__main__':
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
<commit_msg>Copy and call in-toto's check_usable_gpg function
Set environment variable in test aggregate script that may be
used to skip tests if gpg is not available on the test system.<commit_after>#!/usr/bin/env python
"""
<Program Name>
aggregate_tests.py
<Author>
Konstantin Andrianov.
Zane Fisher.
<Started>
January 26, 2013.
August 2013.
Modified previous behavior that explicitly imported individual
unit tests. -Zane Fisher
<Copyright>
See LICENSE for licensing information.
<Purpose>
Run all the unit tests from every .py file beginning with "test_" in
'securesystemslib/tests'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import unittest
import subprocess
def check_usable_gpg():
"""Set `TEST_SKIP_GPG` environment variable if neither gpg2 nor gpg is
available. """
os.environ["TEST_SKIP_GPG"] = "1"
for gpg in ["gpg2", "gpg"]:
try:
subprocess.check_call([gpg, "--version"])
except OSError:
pass
else:
# If one of the two exists, we can unset the skip envvar and ...
os.environ.pop("TEST_SKIP_GPG", None)
# ... abort the availability check.:
break
if __name__ == '__main__':
check_usable_gpg()
suite = unittest.TestLoader().discover("tests", top_level_dir=".")
all_tests_passed = unittest.TextTestRunner(verbosity=1).run(suite).wasSuccessful()
if not all_tests_passed:
sys.exit(1)
|
593d152bd6eec64bc8ee504020ba0e5e2345966c
|
wafer/pages/views.py
|
wafer/pages/views.py
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
fields = ['name', 'content']
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
|
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
|
Python
|
isc
|
CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
fields = ['name', 'content']
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
Remove unneeded field specifier from EditPage form to make Django 1.8 happy
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
<commit_before>from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
fields = ['name', 'content']
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
<commit_msg>Remove unneeded field specifier from EditPage form to make Django 1.8 happy<commit_after>
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
fields = ['name', 'content']
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
Remove unneeded field specifier from EditPage form to make Django 1.8 happyfrom django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
<commit_before>from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
fields = ['name', 'content']
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
<commit_msg>Remove unneeded field specifier from EditPage form to make Django 1.8 happy<commit_after>from django.http import Http404
from django.core.exceptions import PermissionDenied
from django.views.generic import DetailView, TemplateView, UpdateView
from wafer.pages.models import Page
from wafer.pages.forms import PageForm
class ShowPage(DetailView):
template_name = 'wafer.pages/page.html'
model = Page
class EditPage(UpdateView):
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
def slug(request, url):
"""Look up a page by url (which is a tree of slugs)"""
page = None
for slug in url.split('/'):
if not slug:
continue
try:
page = Page.objects.get(slug=slug, parent=page)
except Page.DoesNotExist:
raise Http404
if page is None:
try:
page = Page.objects.get(slug='index')
except Page.DoesNotExist:
return TemplateView.as_view(
template_name='wafer/index.html')(request)
if 'edit' in request.GET.keys():
if not request.user.has_perm('pages.change_page'):
raise PermissionDenied
return EditPage.as_view()(request, pk=page.id)
return ShowPage.as_view()(request, pk=page.id)
|
3e12e61144fe4cd08f755130dde23066879521d1
|
InteractiveCommandLine.UnitTest.py
|
InteractiveCommandLine.UnitTest.py
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def testWithoutArguments( self ):
self.execute.expect()
self.program.executeWithArguments( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.program.executeWithArguments( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def __executeProgram( self, *arguments ):
CommandContainer.execute( self.program, *arguments )
def testWithoutArguments( self ):
self.execute.expect()
self.__executeProgram( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.__executeProgram( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
Fix unit tests for CommandContainer
|
Fix unit tests for CommandContainer
|
Python
|
mit
|
jacquev6/InteractiveCommandLine
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def testWithoutArguments( self ):
self.execute.expect()
self.program.executeWithArguments( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.program.executeWithArguments( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
Fix unit tests for CommandContainer
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def __executeProgram( self, *arguments ):
CommandContainer.execute( self.program, *arguments )
def testWithoutArguments( self ):
self.execute.expect()
self.__executeProgram( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.__executeProgram( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
<commit_before>import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def testWithoutArguments( self ):
self.execute.expect()
self.program.executeWithArguments( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.program.executeWithArguments( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
<commit_msg>Fix unit tests for CommandContainer<commit_after>
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def __executeProgram( self, *arguments ):
CommandContainer.execute( self.program, *arguments )
def testWithoutArguments( self ):
self.execute.expect()
self.__executeProgram( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.__executeProgram( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def testWithoutArguments( self ):
self.execute.expect()
self.program.executeWithArguments( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.program.executeWithArguments( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
Fix unit tests for CommandContainerimport unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def __executeProgram( self, *arguments ):
CommandContainer.execute( self.program, *arguments )
def testWithoutArguments( self ):
self.execute.expect()
self.__executeProgram( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.__executeProgram( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
<commit_before>import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def testWithoutArguments( self ):
self.execute.expect()
self.program.executeWithArguments( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.program.executeWithArguments( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
<commit_msg>Fix unit tests for CommandContainer<commit_after>import unittest
import MockMockMock
from InteractiveCommandLine import *
class CommandLineCommandExecution( unittest.TestCase ):
def setUp( self ):
self.command = Command()
self.optionHandler = MockMockMock.Mock( "optionHandler" )
self.command.addOption( "option", self.optionHandler )
self.execute = MockMockMock.Mock( "execute" )
self.command.execute = self.execute.object
self.program = Program()
self.program.addCommand( "test", self.command )
def tearDown( self ):
self.execute.tearDown()
def __executeProgram( self, *arguments ):
CommandContainer.execute( self.program, *arguments )
def testWithoutArguments( self ):
self.execute.expect()
self.__executeProgram( "test" )
def testWithArguments( self ):
self.execute.expect( "foo", "bar" )
self.__executeProgram( "test", "foo", "bar" )
# def testWithOption( self ):
# self.optionHandler.expect()
# self.execute.expect()
# self.program.executeWithArguments( "test", "--option" )
unittest.main()
|
22f94c5bb08ee6ae816109bdc06eab9e1974884a
|
app/models/cnes_professional.py
|
app/models/cnes_professional.py
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
cbo = Column(String(2), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'cbo',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
Add CBO column to cnes professional
|
Add CBO column to cnes professional
|
Python
|
mit
|
daniel1409/dataviva-api,DataViva/dataviva-api
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
Add CBO column to cnes professional
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
cbo = Column(String(2), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'cbo',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
<commit_before>from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
<commit_msg>Add CBO column to cnes professional<commit_after>
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
cbo = Column(String(2), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'cbo',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
Add CBO column to cnes professionalfrom sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
cbo = Column(String(2), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'cbo',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
<commit_before>from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
<commit_msg>Add CBO column to cnes professional<commit_after>from sqlalchemy import Column, Integer, String, func
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
cbo = Column(String(2), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'cbo',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['professionals']
|
432a204f209e9470791b68297cb8453ab6ba32a8
|
python3.7/app/main.py
|
python3.7/app/main.py
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
|
Update the message in the default Python3.7 app
|
Update the message in the default Python3.7 app
to show Python3.7 instead of Python3.6
|
Python
|
apache-2.0
|
tiangolo/uwsgi-nginx-docker,tiangolo/uwsgi-nginx-docker
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
Update the message in the default Python3.7 app
to show Python3.7 instead of Python3.6
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
|
<commit_before>def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
<commit_msg>Update the message in the default Python3.7 app
to show Python3.7 instead of Python3.6<commit_after>
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
|
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
Update the message in the default Python3.7 app
to show Python3.7 instead of Python3.6def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
|
<commit_before>def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
<commit_msg>Update the message in the default Python3.7 app
to show Python3.7 instead of Python3.6<commit_after>def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
|
66e0e8e2cb202ca3f4832bf728bfd53c084d6f62
|
appengine_config.py
|
appengine_config.py
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
Improve custom appstats path normalization.
|
Improve custom appstats path normalization.
|
Python
|
apache-2.0
|
rietveld-codereview/rietveld,openlabs/cr.openlabs.co.in,salomon1184/rietveld,foolonhill/rietveld,andyzsf/rietveld,arg0/rietveld,google-code-export/rietveld,berkus/rietveld,aungzanbaw/rietveld,v3ss0n/rietveld,DeanHere/rietveld,draem0507/rietveld,rietveld-codereview/rietveld,andyzsf/rietveld,dushmis/rietveld,robfig/rietveld,gco/rietveld,kscharding/integral-solutions-smxq,fuzan/rietveld,andyzsf/rietveld,xtypebee/rietveld,supriyantomaftuh/rietveld,gavioto/rietveld,sajingeo/rietveld,aungzanbaw/rietveld,Koulio/rietveld,supriyantomaftuh/rietveld,robfig/rietveld,dushmis/rietveld,fuzan/rietveld,salomon1184/rietveld,dushmis/rietveld,draem0507/rietveld,xtypebee/rietveld,salomon1184/rietveld,ericmckean/rietveld,kscharding/integral-solutions-smxq,foolonhill/rietveld,gavioto/rietveld,arg0/rietveld,foolonhill/rietveld,rietveld-codereview/rietveld,aungzanbaw/rietveld,supriyantomaftuh/rietveld,Koulio/rietveld,openlabs/cr.openlabs.co.in,sajingeo/rietveld,berkus/rietveld,sajingeo/rietveld,v3ss0n/rietveld,v3ss0n/rietveld,openlabs/cr.openlabs.co.in,arg0/rietveld,Koulio/rietveld,kscharding/integral-solutions-smxq,gavioto/rietveld,xtypebee/rietveld,google-code-export/rietveld,google-code-export/rietveld,DeanHere/rietveld,draem0507/rietveld,robfig/rietveld,gco/rietveld,berkus/rietveld,gco/rietveld,ericmckean/rietveld,rietveld-codereview/rietveld,fuzan/rietveld,ericmckean/rietveld,DeanHere/rietveld
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
Improve custom appstats path normalization.
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
<commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
<commit_msg>Improve custom appstats path normalization.<commit_after>
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
Improve custom appstats path normalization."""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
<commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
<commit_msg>Improve custom appstats path normalization.<commit_after>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if '/diff/' in path:
return '/X/diff/...'
if '/diff2/' in path:
return '/X/diff2/...'
if '/patch/' in path:
return '/X/patch/...'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.2')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
# Custom Django configuration.
# NOTE: All "main" scripts must import webapp.template before django.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
settings._target = None
|
8868cc4e8379002c62db7f69ca77ec8449930321
|
src/adhocracy_core/adhocracy_core/scripts/import_resources.py
|
src/adhocracy_core/adhocracy_core/scripts/import_resources.py
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from adhocracy_core import scripts
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scripts.import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from . import import_resources as main_import_resources
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
main_import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
Fix import resources command line wrapper
|
Fix import resources command line wrapper
|
Python
|
agpl-3.0
|
liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from adhocracy_core import scripts
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scripts.import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
Fix import resources command line wrapper
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from . import import_resources as main_import_resources
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
main_import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
<commit_before>"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from adhocracy_core import scripts
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scripts.import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
<commit_msg>Fix import resources command line wrapper<commit_after>
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from . import import_resources as main_import_resources
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
main_import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from adhocracy_core import scripts
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scripts.import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
Fix import resources command line wrapper"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from . import import_resources as main_import_resources
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
main_import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
<commit_before>"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from adhocracy_core import scripts
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
scripts.import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
<commit_msg>Fix import resources command line wrapper<commit_after>"""Import/create resources into the system.
This is registered as console script 'import_resources' in setup.py.
"""
# pragma: no cover
import argparse
import inspect
import logging
import sys
import transaction
from pyramid.paster import bootstrap
from . import import_resources as main_import_resources
def import_resources():
"""Import resources from a JSON file.
usage::
bin/import_resources etc/development.ini <filename>
"""
epilog = """The input JSON file contains the interface name of the resource
type to create and a serialization of the sheets data.
Strings having the form 'user_by_login: <username>' are resolved
to the user's path.
"""
docstring = inspect.getdoc(import_resources)
parser = argparse.ArgumentParser(description=docstring, epilog=epilog)
parser.add_argument('ini_file',
help='path to the adhocracy backend ini file')
parser.add_argument('filename',
type=str,
help='file containing the resources descriptions')
args = parser.parse_args()
env = bootstrap(args.ini_file)
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
main_import_resources(env['root'], env['registry'], args.filename)
transaction.commit()
env['closer']()
|
dec3aaaefe2afdf4d3ce19dc808257ea49cc2b00
|
hsml.py
|
hsml.py
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
Fix for old numpy versions without cbrt
|
Fix for old numpy versions without cbrt
|
Python
|
mit
|
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
Fix for old numpy versions without cbrt
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
<commit_before># -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
<commit_msg>Fix for old numpy versions without cbrt<commit_after>
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
Fix for old numpy versions without cbrt# -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
<commit_before># -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
<commit_msg>Fix for old numpy versions without cbrt<commit_after># -*- coding: utf-8 -*-
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
dcfb5116ba5f068afa354d063a4ab33bce853715
|
numba/sigutils.py
|
numba/sigutils.py
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
return isinstance(sig, (str, tuple))
def normalize_signature(sig):
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a valid signature specification (for user-facing
APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
Add docstrings and fix failures
|
Add docstrings and fix failures
|
Python
|
bsd-2-clause
|
pitrou/numba,GaZ3ll3/numba,pitrou/numba,gdementen/numba,ssarangi/numba,gmarkall/numba,stonebig/numba,stonebig/numba,seibert/numba,GaZ3ll3/numba,gmarkall/numba,stonebig/numba,IntelLabs/numba,seibert/numba,pombredanne/numba,numba/numba,seibert/numba,jriehl/numba,pitrou/numba,numba/numba,stefanseefeld/numba,IntelLabs/numba,pombredanne/numba,ssarangi/numba,stonebig/numba,sklam/numba,stefanseefeld/numba,seibert/numba,cpcloud/numba,stuartarchibald/numba,gdementen/numba,stonebig/numba,numba/numba,stuartarchibald/numba,numba/numba,stuartarchibald/numba,ssarangi/numba,GaZ3ll3/numba,stuartarchibald/numba,cpcloud/numba,gdementen/numba,sklam/numba,GaZ3ll3/numba,jriehl/numba,pitrou/numba,IntelLabs/numba,sklam/numba,sklam/numba,jriehl/numba,pombredanne/numba,gdementen/numba,GaZ3ll3/numba,pombredanne/numba,sklam/numba,pombredanne/numba,gmarkall/numba,ssarangi/numba,IntelLabs/numba,jriehl/numba,cpcloud/numba,stefanseefeld/numba,gmarkall/numba,pitrou/numba,IntelLabs/numba,gdementen/numba,gmarkall/numba,jriehl/numba,stefanseefeld/numba,seibert/numba,ssarangi/numba,cpcloud/numba,numba/numba,stuartarchibald/numba,stefanseefeld/numba,cpcloud/numba
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
return isinstance(sig, (str, tuple))
def normalize_signature(sig):
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
Add docstrings and fix failures
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a valid signature specification (for user-facing
APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
<commit_before>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
return isinstance(sig, (str, tuple))
def normalize_signature(sig):
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
<commit_msg>Add docstrings and fix failures<commit_after>
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a valid signature specification (for user-facing
APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
return isinstance(sig, (str, tuple))
def normalize_signature(sig):
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
Add docstrings and fix failuresfrom __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a valid signature specification (for user-facing
APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
<commit_before>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
return isinstance(sig, (str, tuple))
def normalize_signature(sig):
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
<commit_msg>Add docstrings and fix failures<commit_after>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a valid signature specification (for user-facing
APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
return normalize_signature(parse_signature(sig))
elif isinstance(sig, tuple):
return sig, None
elif isinstance(sig, typing.Signature):
return sig.args, sig.return_type
else:
raise TypeError(type(sig))
def parse_signature(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
|
14cd040eb2be66bbdc62b788a4e3da1ef1297458
|
doc/transaction_example.py
|
doc/transaction_example.py
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log, util
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
Remove unused import in the example.
|
Remove unused import in the example.
|
Python
|
mit
|
wulczer/txpostgres
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log, util
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
Remove unused import in the example.
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
<commit_before>from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log, util
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
<commit_msg>Remove unused import in the example.<commit_after>
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log, util
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
Remove unused import in the example.from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
<commit_before>from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log, util
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
<commit_msg>Remove unused import in the example.<commit_after>from txpostgres import txpostgres
from twisted.internet import reactor
from twisted.python import log
# connect to the database
conn = txpostgres.Connection()
d = conn.connect('dbname=postgres')
# define a callable that will execute inside a transaction
def interaction(cur):
# the parameter is a txpostgres Cursor
d = cur.execute('create table test(x integer)')
d.addCallback(lambda _: cur.execute('insert into test values (%s)', (1, )))
return d
# run the interaction, making sure that if the insert fails, the table won't be
# left behind created but empty
d.addCallback(lambda _: conn.runInteraction(interaction))
# close the connection, log any errors and stop the reactor
d.addCallback(lambda _: conn.close())
d.addErrback(log.err)
d.addBoth(lambda _: reactor.stop())
# start the reactor to kick off connection estabilishing
reactor.run()
|
ff28ca5797c4468dbe58d78d55b5df6b8878ac36
|
test_pep438.py
|
test_pep438.py
|
#!/usr/bin/env python
import unittest
import pep438
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import unittest
import sys
from io import StringIO
from clint.textui import core
import pep438
class patch_io(object):
streams = ('stdout', 'stdin', 'stderr')
def __init__(self):
for stream in self.streams:
setattr(self, stream, StringIO())
setattr(self, 'real_%s' % stream, getattr(sys, stream))
self.real_STDOUT = core.STDOUT
self.real_STDERR = core.STDERR
def __enter__(self):
for stream in self.streams:
setattr(sys, stream, getattr(self, stream))
self.STDOUT = self.stdout.write
self.STDERR = self.stderr.write
return self
def __exit__(self, exc_type, exc_value, traceback):
for stream in self.streams:
getattr(sys, stream).close()
setattr(sys, stream, getattr(self, 'real_%s' % stream))
core.STDOUT = self.real_STDOUT
core.STDERR = self.real_STDERR
class CommandLineTests(unittest.TestCase):
def test_version(self):
for args in (['pep438', '-v'], ['pep438', '--version']):
with patch_io() as new:
sys.argv = args
self.assertRaises(SystemExit, pep438.main)
self.assertEqual(new.stdout.getvalue(), "0.1.0\n")
if __name__ == '__main__':
unittest.main()
|
Add a basic command line test
|
Add a basic command line test
|
Python
|
mit
|
treyhunner/pep438
|
#!/usr/bin/env python
import unittest
import pep438
if __name__ == '__main__':
unittest.main()
Add a basic command line test
|
#!/usr/bin/env python
import unittest
import sys
from io import StringIO
from clint.textui import core
import pep438
class patch_io(object):
streams = ('stdout', 'stdin', 'stderr')
def __init__(self):
for stream in self.streams:
setattr(self, stream, StringIO())
setattr(self, 'real_%s' % stream, getattr(sys, stream))
self.real_STDOUT = core.STDOUT
self.real_STDERR = core.STDERR
def __enter__(self):
for stream in self.streams:
setattr(sys, stream, getattr(self, stream))
self.STDOUT = self.stdout.write
self.STDERR = self.stderr.write
return self
def __exit__(self, exc_type, exc_value, traceback):
for stream in self.streams:
getattr(sys, stream).close()
setattr(sys, stream, getattr(self, 'real_%s' % stream))
core.STDOUT = self.real_STDOUT
core.STDERR = self.real_STDERR
class CommandLineTests(unittest.TestCase):
def test_version(self):
for args in (['pep438', '-v'], ['pep438', '--version']):
with patch_io() as new:
sys.argv = args
self.assertRaises(SystemExit, pep438.main)
self.assertEqual(new.stdout.getvalue(), "0.1.0\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
import unittest
import pep438
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a basic command line test<commit_after>
|
#!/usr/bin/env python
import unittest
import sys
from io import StringIO
from clint.textui import core
import pep438
class patch_io(object):
streams = ('stdout', 'stdin', 'stderr')
def __init__(self):
for stream in self.streams:
setattr(self, stream, StringIO())
setattr(self, 'real_%s' % stream, getattr(sys, stream))
self.real_STDOUT = core.STDOUT
self.real_STDERR = core.STDERR
def __enter__(self):
for stream in self.streams:
setattr(sys, stream, getattr(self, stream))
self.STDOUT = self.stdout.write
self.STDERR = self.stderr.write
return self
def __exit__(self, exc_type, exc_value, traceback):
for stream in self.streams:
getattr(sys, stream).close()
setattr(sys, stream, getattr(self, 'real_%s' % stream))
core.STDOUT = self.real_STDOUT
core.STDERR = self.real_STDERR
class CommandLineTests(unittest.TestCase):
def test_version(self):
for args in (['pep438', '-v'], ['pep438', '--version']):
with patch_io() as new:
sys.argv = args
self.assertRaises(SystemExit, pep438.main)
self.assertEqual(new.stdout.getvalue(), "0.1.0\n")
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import unittest
import pep438
if __name__ == '__main__':
unittest.main()
Add a basic command line test#!/usr/bin/env python
import unittest
import sys
from io import StringIO
from clint.textui import core
import pep438
class patch_io(object):
streams = ('stdout', 'stdin', 'stderr')
def __init__(self):
for stream in self.streams:
setattr(self, stream, StringIO())
setattr(self, 'real_%s' % stream, getattr(sys, stream))
self.real_STDOUT = core.STDOUT
self.real_STDERR = core.STDERR
def __enter__(self):
for stream in self.streams:
setattr(sys, stream, getattr(self, stream))
self.STDOUT = self.stdout.write
self.STDERR = self.stderr.write
return self
def __exit__(self, exc_type, exc_value, traceback):
for stream in self.streams:
getattr(sys, stream).close()
setattr(sys, stream, getattr(self, 'real_%s' % stream))
core.STDOUT = self.real_STDOUT
core.STDERR = self.real_STDERR
class CommandLineTests(unittest.TestCase):
def test_version(self):
for args in (['pep438', '-v'], ['pep438', '--version']):
with patch_io() as new:
sys.argv = args
self.assertRaises(SystemExit, pep438.main)
self.assertEqual(new.stdout.getvalue(), "0.1.0\n")
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
import unittest
import pep438
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a basic command line test<commit_after>#!/usr/bin/env python
import unittest
import sys
from io import StringIO
from clint.textui import core
import pep438
class patch_io(object):
streams = ('stdout', 'stdin', 'stderr')
def __init__(self):
for stream in self.streams:
setattr(self, stream, StringIO())
setattr(self, 'real_%s' % stream, getattr(sys, stream))
self.real_STDOUT = core.STDOUT
self.real_STDERR = core.STDERR
def __enter__(self):
for stream in self.streams:
setattr(sys, stream, getattr(self, stream))
self.STDOUT = self.stdout.write
self.STDERR = self.stderr.write
return self
def __exit__(self, exc_type, exc_value, traceback):
for stream in self.streams:
getattr(sys, stream).close()
setattr(sys, stream, getattr(self, 'real_%s' % stream))
core.STDOUT = self.real_STDOUT
core.STDERR = self.real_STDERR
class CommandLineTests(unittest.TestCase):
def test_version(self):
for args in (['pep438', '-v'], ['pep438', '--version']):
with patch_io() as new:
sys.argv = args
self.assertRaises(SystemExit, pep438.main)
self.assertEqual(new.stdout.getvalue(), "0.1.0\n")
if __name__ == '__main__':
unittest.main()
|
a026e6c82cb6391fe7f04c85fb1c09f89fefc7de
|
frigg/deployments/serializers.py
|
frigg/deployments/serializers.py
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'log',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'tasks',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
Add tasks and remove log from deployment serializer
|
Add tasks and remove log from deployment serializer
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'log',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
Add tasks and remove log from deployment serializer
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'tasks',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
<commit_before>from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'log',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
<commit_msg>Add tasks and remove log from deployment serializer<commit_after>
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'tasks',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'log',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
Add tasks and remove log from deployment serializerfrom rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'tasks',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
<commit_before>from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'log',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
<commit_msg>Add tasks and remove log from deployment serializer<commit_after>from rest_framework import serializers
from .models import PRDeployment
class PRDeploymentSerializer(serializers.ModelSerializer):
class Meta:
model = PRDeployment
fields = (
'id',
'image',
'tasks',
'port',
'succeeded',
'start_time',
'ttl',
'is_pending',
'is_alive'
)
|
b35e8fa3cde243aa444aa056d60f7a37b61e825b
|
tests/commands/test_usage.py
|
tests/commands/test_usage.py
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
Add usage tests for check-ufo-sources and check-specification
|
Add usage tests for check-ufo-sources and check-specification
|
Python
|
apache-2.0
|
googlefonts/fontbakery,googlefonts/fontbakery,graphicore/fontbakery,graphicore/fontbakery,moyogo/fontbakery,graphicore/fontbakery,moyogo/fontbakery,moyogo/fontbakery,googlefonts/fontbakery
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
Add usage tests for check-ufo-sources and check-specification
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
<commit_before>import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
<commit_msg>Add usage tests for check-ufo-sources and check-specification<commit_after>
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
Add usage tests for check-ufo-sources and check-specificationimport os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
<commit_before>import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-") for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
<commit_msg>Add usage tests for check-ufo-sources and check-specification<commit_after>import os
import subprocess
import pytest
def test_list_subcommands_has_all_scripts():
"""Tests if the output from running `fontbakery --list-subcommands` matches
the fontbakery scripts within the bin folder."""
import fontbakery.commands
commands_dir = os.path.dirname(fontbakery.commands.__file__)
scripts = [
f.rstrip(".py").replace("_", "-")
for f in os.listdir(commands_dir)
if (f.endswith(".py") and not f.startswith('_'))
]
subcommands = subprocess.check_output(['fontbakery',
'--list-subcommands']).split()
assert sorted(scripts) == sorted(subcommands)
def test_command_check_googlefonts():
"""Test if `fontbakery check-googlefonts` can run successfully`."""
subprocess.check_output(["fontbakery", "check-googlefonts", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-googlefonts"])
def test_command_check_specification():
"""Test if `fontbakery check-specification` can run successfully`."""
subprocess.check_output(["fontbakery", "check-specification", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-specification"])
def test_command_check_ufo_sources():
"""Test if `fontbakery check-ufo-sources` can run successfully`."""
subprocess.check_output(["fontbakery", "check-ufo-sources", "-h"])
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_output(["fontbakery", "check-ufo-sources"])
|
ab78faab8e3536c49f829ccbd71540a93485a7cb
|
website/jdevents/models.py
|
website/jdevents/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class RepeatType(models.Model):
DAILY = 'daily'
WEEKLY = 'weekly',
MONTHLY = 'monthly'
REPEAT_CHOICES = (
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly'))
)
repeat_type = models.CharField(max_length=10, choices=REPEAT_CHOICES)
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
start = models.DateTimeField()
end = models.DateTimeField()
repeat = models.ForeignKey(RepeatType, default=None, blank=True)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
event = models.ForeignKey(Event)
start = models.DateTimeField()
end = models.DateTimeField()
|
Delete database support for repeated events.
|
Delete database support for repeated events.
Events already support multiple occurences, and we will create an
API to automatically add occurences in a repeating manner (every week,
every month).
Including event repeat data in database would make things a lot more
complex.
|
Python
|
mit
|
jonge-democraten/website,jonge-democraten/website,jonge-democraten/website,jonge-democraten/website
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class RepeatType(models.Model):
DAILY = 'daily'
WEEKLY = 'weekly',
MONTHLY = 'monthly'
REPEAT_CHOICES = (
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly'))
)
repeat_type = models.CharField(max_length=10, choices=REPEAT_CHOICES)
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
start = models.DateTimeField()
end = models.DateTimeField()
repeat = models.ForeignKey(RepeatType, default=None, blank=True)
Delete database support for repeated events.
Events already support multiple occurences, and we will create an
API to automatically add occurences in a repeating manner (every week,
every month).
Including event repeat data in database would make things a lot more
complex.
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
event = models.ForeignKey(Event)
start = models.DateTimeField()
end = models.DateTimeField()
|
<commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class RepeatType(models.Model):
DAILY = 'daily'
WEEKLY = 'weekly',
MONTHLY = 'monthly'
REPEAT_CHOICES = (
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly'))
)
repeat_type = models.CharField(max_length=10, choices=REPEAT_CHOICES)
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
start = models.DateTimeField()
end = models.DateTimeField()
repeat = models.ForeignKey(RepeatType, default=None, blank=True)
<commit_msg>Delete database support for repeated events.
Events already support multiple occurences, and we will create an
API to automatically add occurences in a repeating manner (every week,
every month).
Including event repeat data in database would make things a lot more
complex.<commit_after>
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
event = models.ForeignKey(Event)
start = models.DateTimeField()
end = models.DateTimeField()
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class RepeatType(models.Model):
DAILY = 'daily'
WEEKLY = 'weekly',
MONTHLY = 'monthly'
REPEAT_CHOICES = (
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly'))
)
repeat_type = models.CharField(max_length=10, choices=REPEAT_CHOICES)
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
start = models.DateTimeField()
end = models.DateTimeField()
repeat = models.ForeignKey(RepeatType, default=None, blank=True)
Delete database support for repeated events.
Events already support multiple occurences, and we will create an
API to automatically add occurences in a repeating manner (every week,
every month).
Including event repeat data in database would make things a lot more
complex.from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
event = models.ForeignKey(Event)
start = models.DateTimeField()
end = models.DateTimeField()
|
<commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class RepeatType(models.Model):
DAILY = 'daily'
WEEKLY = 'weekly',
MONTHLY = 'monthly'
REPEAT_CHOICES = (
(DAILY, _('Daily')),
(WEEKLY, _('Weekly')),
(MONTHLY, _('Monthly'))
)
repeat_type = models.CharField(max_length=10, choices=REPEAT_CHOICES)
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
start = models.DateTimeField()
end = models.DateTimeField()
repeat = models.ForeignKey(RepeatType, default=None, blank=True)
<commit_msg>Delete database support for repeated events.
Events already support multiple occurences, and we will create an
API to automatically add occurences in a repeating manner (every week,
every month).
Including event repeat data in database would make things a lot more
complex.<commit_after>from django.db import models
from django.utils.translation import ugettext_lazy as _
from mezzanine.core.models import Displayable, RichText
class Event(Displayable, RichText):
"""
Main object for each event.
Derives from Displayable, which by default
- it is related to a certain Site object
- it has a title and a slug
- it has SEO metadata
- it gets automated timestamps when the object is updated
Besides that, it derives from RichText, which provides a WYSIWYG field.
"""
class Occurence(models.Model):
"""
Represents an occurence of an event. Can be automatically repeated
"""
event = models.ForeignKey(Event)
start = models.DateTimeField()
end = models.DateTimeField()
|
a28f4b6562527ac09f765ff72e034b6122b2fa8b
|
yolk/__init__.py
|
yolk/__init__.py
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8a0'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
Increment minor version to 0.8
|
Increment minor version to 0.8
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8a0'
Increment minor version to 0.8
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8a0'
<commit_msg>Increment minor version to 0.8<commit_after>
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8a0'
Increment minor version to 0.8"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8a0'
<commit_msg>Increment minor version to 0.8<commit_after>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
f34a5d682832749dbf0011d162bf4c7c18892b45
|
zerver/apps.py
|
zerver/apps.py
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
Document the weird unused import for signal registration.
|
signals: Document the weird unused import for signal registration.
|
Python
|
apache-2.0
|
timabbott/zulip,tommyip/zulip,zulip/zulip,andersk/zulip,andersk/zulip,eeshangarg/zulip,kou/zulip,eeshangarg/zulip,eeshangarg/zulip,showell/zulip,brainwane/zulip,andersk/zulip,rishig/zulip,synicalsyntax/zulip,andersk/zulip,tommyip/zulip,hackerkid/zulip,kou/zulip,zulip/zulip,showell/zulip,eeshangarg/zulip,shubhamdhama/zulip,rishig/zulip,brainwane/zulip,rht/zulip,timabbott/zulip,timabbott/zulip,shubhamdhama/zulip,andersk/zulip,rht/zulip,brainwane/zulip,punchagan/zulip,hackerkid/zulip,zulip/zulip,brainwane/zulip,hackerkid/zulip,showell/zulip,kou/zulip,kou/zulip,andersk/zulip,brainwane/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,shubhamdhama/zulip,punchagan/zulip,timabbott/zulip,brainwane/zulip,tommyip/zulip,punchagan/zulip,shubhamdhama/zulip,hackerkid/zulip,punchagan/zulip,synicalsyntax/zulip,synicalsyntax/zulip,hackerkid/zulip,synicalsyntax/zulip,showell/zulip,rht/zulip,rishig/zulip,rishig/zulip,showell/zulip,kou/zulip,synicalsyntax/zulip,punchagan/zulip,rht/zulip,eeshangarg/zulip,eeshangarg/zulip,rishig/zulip,timabbott/zulip,tommyip/zulip,shubhamdhama/zulip,zulip/zulip,timabbott/zulip,eeshangarg/zulip,hackerkid/zulip,rishig/zulip,rishig/zulip,tommyip/zulip,tommyip/zulip,shubhamdhama/zulip,zulip/zulip,hackerkid/zulip,kou/zulip,shubhamdhama/zulip,andersk/zulip,showell/zulip,timabbott/zulip,rht/zulip,rht/zulip,synicalsyntax/zulip,synicalsyntax/zulip,brainwane/zulip,tommyip/zulip,punchagan/zulip,showell/zulip,punchagan/zulip
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
signals: Document the weird unused import for signal registration.
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
<commit_before>
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
<commit_msg>signals: Document the weird unused import for signal registration.<commit_after>
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
signals: Document the weird unused import for signal registration.
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
<commit_before>
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
<commit_msg>signals: Document the weird unused import for signal registration.<commit_after>
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
55e506489e93bad1d000acd747a272103e789a59
|
rml/element.py
|
rml/element.py
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
Add support for y field of a pv
|
Add support for y field of a pv
|
Python
|
apache-2.0
|
willrogers/pml,razvanvasile/RML,willrogers/pml
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
Add support for y field of a pv
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
<commit_before>''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
<commit_msg>Add support for y field of a pv<commit_after>
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
Add support for y field of a pv''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
<commit_before>''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# Getting the pv value
self.pv = kwargs.get('pv', None)
self._field = {}
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self._field:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
print 'abc'
return caget(self.pv)
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv = pv_name
self._field[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
<commit_msg>Add support for y field of a pv<commit_after>''' Representation of an element
@param element_type: type of the element
@param length: length of the element
'''
import pkg_resources
from rml.exceptions import ConfigException
pkg_resources.require('cothread')
from cothread.catools import caget
class Element(object):
def __init__(self, element_type, length, **kwargs):
self.element_type = element_type
self.length = length
self.families = set()
# For storing the pv. Dictionary where keys are fields and
# values are pv names
self.pv = dict()
def add_to_family(self, family):
self.families.add(family)
def get_pv(self, field, handle='readback'):
"""
Get pv value for the given field.
Currently only supports readback handle
"""
if not field in self.pv:
raise ConfigException("Field {0} doesn't exist.".format(field))
elif handle == 'readback':
return caget(self.pv[field])
else:
raise ValueError("Unknown handle {0}".format(handle))
def set_pv(self, field, pv_name):
self.pv[field] = pv_name
def get_type(self):
return self.element_type
def get_length(self):
return self.length
def get_families(self):
return self.families
|
eab72cdb7e58b5398ace19c74569b1eb35ea91f8
|
toolbox/plugins/standard_object_features.py
|
toolbox/plugins/standard_object_features.py
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.astype('float32'),
labelImage.astype('uint32'),
ignoreLabel=0)
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.squeeze().astype('float32'),
labelImage.squeeze().astype('uint32'),
ignoreLabel=0)
|
Fix default region feature computation plugin
|
Fix default region feature computation plugin
|
Python
|
mit
|
chaubold/hytra,chaubold/hytra,chaubold/hytra
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.astype('float32'),
labelImage.astype('uint32'),
ignoreLabel=0)
Fix default region feature computation plugin
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.squeeze().astype('float32'),
labelImage.squeeze().astype('uint32'),
ignoreLabel=0)
|
<commit_before>from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.astype('float32'),
labelImage.astype('uint32'),
ignoreLabel=0)
<commit_msg>Fix default region feature computation plugin<commit_after>
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.squeeze().astype('float32'),
labelImage.squeeze().astype('uint32'),
ignoreLabel=0)
|
from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.astype('float32'),
labelImage.astype('uint32'),
ignoreLabel=0)
Fix default region feature computation pluginfrom pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.squeeze().astype('float32'),
labelImage.squeeze().astype('uint32'),
ignoreLabel=0)
|
<commit_before>from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.astype('float32'),
labelImage.astype('uint32'),
ignoreLabel=0)
<commit_msg>Fix default region feature computation plugin<commit_after>from pluginsystem import object_feature_computation_plugin
import vigra
from vigra import numpy as np
class StandardObjectFeatures(object_feature_computation_plugin.ObjectFeatureComputationPlugin):
"""
Computes the standard vigra region features
"""
worksForDimensions = [2, 3]
omittedFeatures = ["Global<Maximum >", "Global<Minimum >", 'Histogram', 'Weighted<RegionCenter>']
def computeFeatures(self, rawImage, labelImage, frameNumber):
return vigra.analysis.extractRegionFeatures(rawImage.squeeze().astype('float32'),
labelImage.squeeze().astype('uint32'),
ignoreLabel=0)
|
0b8075e8eb8fb52a9407bfa92d61e5a5363f8861
|
src/example/dump_camera_capabilities.py
|
src/example/dump_camera_capabilities.py
|
import pysony
import fnmatch
camera = pysony.SonyAPI()
#camera = pysony.SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
import pysony
import time
import fnmatch
print "Searching for camera"
search = pysony.ControlPoint()
cameras = search.discover()
if len(cameras):
camera = pysony.SonyAPI(QX_ADDR=cameras[0])
else:
print "No camera found, aborting"
quit()
mode = camera.getAvailableApiList()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(5)
# and re-read capabilities
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
Add automatic searching capability. Note that camera may return different capabilities depending on its mode dial.
|
Add automatic searching capability.
Note that camera may return different capabilities depending on its mode dial.
|
Python
|
mit
|
Bloodevil/sony_camera_api,mungewell/sony_camera_api,Bloodevil/sony_camera_api
|
import pysony
import fnmatch
camera = pysony.SonyAPI()
#camera = pysony.SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
Add automatic searching capability.
Note that camera may return different capabilities depending on its mode dial.
|
import pysony
import time
import fnmatch
print "Searching for camera"
search = pysony.ControlPoint()
cameras = search.discover()
if len(cameras):
camera = pysony.SonyAPI(QX_ADDR=cameras[0])
else:
print "No camera found, aborting"
quit()
mode = camera.getAvailableApiList()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(5)
# and re-read capabilities
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
<commit_before>import pysony
import fnmatch
camera = pysony.SonyAPI()
#camera = pysony.SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
<commit_msg>Add automatic searching capability.
Note that camera may return different capabilities depending on its mode dial.<commit_after>
|
import pysony
import time
import fnmatch
print "Searching for camera"
search = pysony.ControlPoint()
cameras = search.discover()
if len(cameras):
camera = pysony.SonyAPI(QX_ADDR=cameras[0])
else:
print "No camera found, aborting"
quit()
mode = camera.getAvailableApiList()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(5)
# and re-read capabilities
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
import pysony
import fnmatch
camera = pysony.SonyAPI()
#camera = pysony.SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
Add automatic searching capability.
Note that camera may return different capabilities depending on its mode dial.import pysony
import time
import fnmatch
print "Searching for camera"
search = pysony.ControlPoint()
cameras = search.discover()
if len(cameras):
camera = pysony.SonyAPI(QX_ADDR=cameras[0])
else:
print "No camera found, aborting"
quit()
mode = camera.getAvailableApiList()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(5)
# and re-read capabilities
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
<commit_before>import pysony
import fnmatch
camera = pysony.SonyAPI()
#camera = pysony.SonyAPI(QX_ADDR='http://192.168.122.1:8080/')
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
<commit_msg>Add automatic searching capability.
Note that camera may return different capabilities depending on its mode dial.<commit_after>import pysony
import time
import fnmatch
print "Searching for camera"
search = pysony.ControlPoint()
cameras = search.discover()
if len(cameras):
camera = pysony.SonyAPI(QX_ADDR=cameras[0])
else:
print "No camera found, aborting"
quit()
mode = camera.getAvailableApiList()
# For those cameras which need it
if 'startRecMode' in (mode['result'])[0]:
camera.startRecMode()
time.sleep(5)
# and re-read capabilities
mode = camera.getAvailableApiList()
print "Available calls:"
for x in (mode["result"]):
for y in x:
print y
filtered = fnmatch.filter(x, "*Supported*")
print "--"
for x in filtered:
print x, ":"
function=getattr(camera, x)
params = function()
print params
print
|
fb71c13e0995957f1879d8e8ce047f347777564e
|
testing/test_BioMagick.py
|
testing/test_BioMagick.py
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Disable for debugging
assert True
return
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
Disable end-to-end tests for debugging (will re-enable later!)
|
Disable end-to-end tests for debugging (will re-enable later!)
|
Python
|
mit
|
LeeBergstrand/BioMagick,LeeBergstrand/BioMagick
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)Disable end-to-end tests for debugging (will re-enable later!)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Disable for debugging
assert True
return
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
<commit_before>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)<commit_msg>Disable end-to-end tests for debugging (will re-enable later!)<commit_after>
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Disable for debugging
assert True
return
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)Disable end-to-end tests for debugging (will re-enable later!)#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Disable for debugging
assert True
return
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
<commit_before>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)<commit_msg>Disable end-to-end tests for debugging (will re-enable later!)<commit_after>#!/usr/bin/env python
# ----------------------------------------------------------------------------------------------
# Created by: Lee & Matt
#
# Description: Contains unit test for BioID Class
# ----------------------------------------------------------------------------------------------
# ==============================================================================================
import os
import yaml
from subprocess import call
# Nose test generator to iterate format test files defined in CSVs
class TestConversion(object):
def tests(self):
with open("./testing/conversion_tests.yml", "rU") as tests_file:
test_cases = tests_file.read()
for test_case in yaml.safe_load(test_cases):
inputs = test_case["inputs"]
outputs = test_case["outputs"]
formats = test_case["formats"]
alphabet = test_case["alphabet"] if "alphabet" in test_case else None
yield self.check_conversion, inputs, outputs, formats, alphabet
@staticmethod
def check_conversion(input_files, expected_outputs, output_formats, alphabet):
# Disable for debugging
assert True
return
# Set up CLI arguments
args = "-i %s -f %s -a %s" % (",".join(input_files), ",".join(output_formats), alphabet)
# Do conversion(s)
ret = call("python BioMagick.py " + args)
assert ret == 0
# Check outputs
files = os.listdir("./")
for output_file in expected_outputs:
assert output_file in files
# Clean up each output file as it's verified
os.remove(output_file)
|
3c3ceddf3c7d92e6e66017c0980102421e9bbe43
|
tests/test_integration.py
|
tests/test_integration.py
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = next(self.cloudflare.iter_zones())['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
Use next to get the zone
|
Use next to get the zone
|
Python
|
mit
|
yola/pycloudflare,gnowxilef/pycloudflare
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
Use next to get the zone
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = next(self.cloudflare.iter_zones())['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
<commit_before>import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
<commit_msg>Use next to get the zone<commit_after>
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = next(self.cloudflare.iter_zones())['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
Use next to get the zoneimport os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = next(self.cloudflare.iter_zones())['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
<commit_before>import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = self.cloudflare.get_zones()[0]['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
<commit_msg>Use next to get the zone<commit_after>import os
import types
from unittest import TestCase
from yoconfigurator.base import read_config
from yoconfig import configure_services
from pycloudflare.services import CloudFlareService
app_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
conf = read_config(app_dir)
class ZonesTest(TestCase):
def setUp(self):
configure_services('cloudflare', ['cloudflare'], conf.common)
self.cloudflare = CloudFlareService()
def test_iter_zones(self):
zone = next(self.cloudflare.iter_zones())
self.assertIsInstance(zone, dict)
def test_get_zone(self):
zone_id = next(self.cloudflare.iter_zones())['id']
zone = self.cloudflare.get_zone(zone_id)
self.assertIsInstance(zone, dict)
|
dbb15e4919c5d54d2e755cea700cc287bf164ad4
|
bom_data_parser/climate_data_online.py
|
bom_data_parser/climate_data_online.py
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
filename = fname.split('.')[0]
with zipfile.ZipFile('{0}.zip'.format(filename)) as zf:
#notes = zf.read('{0}_Note.txt'.format(filename))
base_name = os.path.basename(filename)
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
base_name = os.path.basename(fname).split('.')[0]
with zipfile.ZipFile(fname) as zf:
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
Handle just the zip file name when splitting
|
Handle just the zip file name when splitting
This fixes a bug where the zip file name isn't correctly handled when
splitting on the '.'. This causes a failure if passing a relative path
(e.g '../'). Would also be a problem if directories had periods in their
name.
|
Python
|
bsd-3-clause
|
amacd31/bom_data_parser,amacd31/bom_data_parser
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
filename = fname.split('.')[0]
with zipfile.ZipFile('{0}.zip'.format(filename)) as zf:
#notes = zf.read('{0}_Note.txt'.format(filename))
base_name = os.path.basename(filename)
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
Handle just the zip file name when splitting
This fixes a bug where the zip file name isn't correctly handled when
splitting on the '.'. This causes a failure if passing a relative path
(e.g '../'). Would also be a problem if directories had periods in their
name.
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
base_name = os.path.basename(fname).split('.')[0]
with zipfile.ZipFile(fname) as zf:
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
<commit_before>import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
filename = fname.split('.')[0]
with zipfile.ZipFile('{0}.zip'.format(filename)) as zf:
#notes = zf.read('{0}_Note.txt'.format(filename))
base_name = os.path.basename(filename)
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
<commit_msg>Handle just the zip file name when splitting
This fixes a bug where the zip file name isn't correctly handled when
splitting on the '.'. This causes a failure if passing a relative path
(e.g '../'). Would also be a problem if directories had periods in their
name.<commit_after>
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
base_name = os.path.basename(fname).split('.')[0]
with zipfile.ZipFile(fname) as zf:
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
filename = fname.split('.')[0]
with zipfile.ZipFile('{0}.zip'.format(filename)) as zf:
#notes = zf.read('{0}_Note.txt'.format(filename))
base_name = os.path.basename(filename)
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
Handle just the zip file name when splitting
This fixes a bug where the zip file name isn't correctly handled when
splitting on the '.'. This causes a failure if passing a relative path
(e.g '../'). Would also be a problem if directories had periods in their
name.import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
base_name = os.path.basename(fname).split('.')[0]
with zipfile.ZipFile(fname) as zf:
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
<commit_before>import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
filename = fname.split('.')[0]
with zipfile.ZipFile('{0}.zip'.format(filename)) as zf:
#notes = zf.read('{0}_Note.txt'.format(filename))
base_name = os.path.basename(filename)
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
<commit_msg>Handle just the zip file name when splitting
This fixes a bug where the zip file name isn't correctly handled when
splitting on the '.'. This causes a failure if passing a relative path
(e.g '../'). Would also be a problem if directories had periods in their
name.<commit_after>import os
import numpy as np
import pandas as pd
import zipfile
from bom_data_parser import mapper
def read_climate_data_online_csv(fname):
df = pd.read_csv(fname, parse_dates={'Date': [2,3,4]})
column_names = []
for column in df.columns:
column_names.append(mapper.convert_key(column))
df.columns = column_names
df = df.set_index('Date')
redundant_columns = [
'Product code',
'Bureau of Meteorology station number'
]
attributes = {}
for column in redundant_columns:
assert np.all(df[column] == df[column][0])
attributes[column] = df[column][0]
df = df.drop(column, axis = 1)
return df, attributes
def read_climate_data_online_zip(fname):
"""
Read data straight out of zipfile.
..note: Requires the filename to have been unchanged because it is used for identifying the contained data file.
"""
base_name = os.path.basename(fname).split('.')[0]
with zipfile.ZipFile(fname) as zf:
with zf.open('{0}_Data.csv'.format(base_name)) as dfile:
df, attributes = read_climate_data_online_csv(dfile)
return df, attributes
|
e8030cfb3daee6b7e467f50a215fbffc5ef90223
|
api/preprint_providers/serializers.py
|
api/preprint_providers/serializers.py
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
class Meta:
type_ = 'preprint_providers'
|
from rest_framework import serializers as ser
from website.settings import API_DOMAIN
from api.base.settings.defaults import API_BASE
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'preprint_links'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def preprint_links(self, obj):
return '{}{}preprint_providers/{}/preprints/'.format(API_DOMAIN, API_BASE, obj._id)
|
Add links field and related methods to link to a given provider's preprints from the preprint provider serializer
|
Add links field and related methods to link to a given provider's preprints from the preprint provider serializer
|
Python
|
apache-2.0
|
CenterForOpenScience/osf.io,icereval/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,binoculars/osf.io,acshi/osf.io,chrisseto/osf.io,adlius/osf.io,cwisecarver/osf.io,cslzchen/osf.io,erinspace/osf.io,mluo613/osf.io,mluo613/osf.io,mluo613/osf.io,caseyrollins/osf.io,rdhyee/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,alexschiller/osf.io,acshi/osf.io,adlius/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,binoculars/osf.io,felliott/osf.io,alexschiller/osf.io,mattclark/osf.io,TomBaxter/osf.io,mfraezz/osf.io,Nesiehr/osf.io,emetsger/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,adlius/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,brianjgeiger/osf.io,emetsger/osf.io,emetsger/osf.io,crcresearch/osf.io,hmoco/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,crcresearch/osf.io,pattisdr/osf.io,mattclark/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,acshi/osf.io,caneruguz/osf.io,icereval/osf.io,pattisdr/osf.io,Nesiehr/osf.io,chrisseto/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,leb2dg/osf.io,binoculars/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,felliott/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,laurenrevere/osf.io,caneruguz/osf.io,mfraezz/osf.io,mluo613/osf.io,acshi/osf.io,chennan47/osf.io,sloria/osf.io,hmoco/osf.io,sloria/osf.io,mfraezz/osf.io,cwisecarver/osf.io,mluo613/osf.io,felliott/osf.io,leb2dg/osf.io,felliott/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,adlius/osf.io,saradbowman/osf.io,chennan47/osf.io,rdhyee/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,baylee-d/osf.io,baylee-d/osf.io,caneruguz/osf.io,samchrisinger/osf.io,rdhyee/osf.io,aaxelb/osf.io,erinspace/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,acshi/osf.io,mfraezz/osf.io,icereval/osf.io,hmoco/osf.io,chrisseto/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,alexschiller/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,mattclark/osf.io,alexschiller/osf.io,leb2dg/osf.io,brianjgeiger/osf.io
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
class Meta:
type_ = 'preprint_providers'
Add links field and related methods to link to a given provider's preprints from the preprint provider serializer
|
from rest_framework import serializers as ser
from website.settings import API_DOMAIN
from api.base.settings.defaults import API_BASE
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'preprint_links'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def preprint_links(self, obj):
return '{}{}preprint_providers/{}/preprints/'.format(API_DOMAIN, API_BASE, obj._id)
|
<commit_before>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
class Meta:
type_ = 'preprint_providers'
<commit_msg>Add links field and related methods to link to a given provider's preprints from the preprint provider serializer<commit_after>
|
from rest_framework import serializers as ser
from website.settings import API_DOMAIN
from api.base.settings.defaults import API_BASE
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'preprint_links'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def preprint_links(self, obj):
return '{}{}preprint_providers/{}/preprints/'.format(API_DOMAIN, API_BASE, obj._id)
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
class Meta:
type_ = 'preprint_providers'
Add links field and related methods to link to a given provider's preprints from the preprint provider serializerfrom rest_framework import serializers as ser
from website.settings import API_DOMAIN
from api.base.settings.defaults import API_BASE
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'preprint_links'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def preprint_links(self, obj):
return '{}{}preprint_providers/{}/preprints/'.format(API_DOMAIN, API_BASE, obj._id)
|
<commit_before>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
class Meta:
type_ = 'preprint_providers'
<commit_msg>Add links field and related methods to link to a given provider's preprints from the preprint provider serializer<commit_after>from rest_framework import serializers as ser
from website.settings import API_DOMAIN
from api.base.settings.defaults import API_BASE
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'preprint_links'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def preprint_links(self, obj):
return '{}{}preprint_providers/{}/preprints/'.format(API_DOMAIN, API_BASE, obj._id)
|
9be04ea1030b423b7414dbd386ae2db2f4761f07
|
third_party/bunch/bunch/python3_compat.py
|
third_party/bunch/bunch/python3_compat.py
|
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
Fix Python 3 version detection in bunch
|
Fix Python 3 version detection in bunch
|
Python
|
apache-2.0
|
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
|
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
Fix Python 3 version detection in bunch
|
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
<commit_before>import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
<commit_msg>Fix Python 3 version detection in bunch<commit_after>
|
import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
Fix Python 3 version detection in bunchimport sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
<commit_before>import platform
_IS_PYTHON_3 = (platform.version() >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
<commit_msg>Fix Python 3 version detection in bunch<commit_after>import sys
_IS_PYTHON_3 = (sys.version[0] >= '3')
identity = lambda x : x
# u('string') replaces the forwards-incompatible u'string'
if _IS_PYTHON_3:
u = identity
else:
import codecs
def u(string):
return codecs.unicode_escape_decode(string)[0]
# dict.iteritems(), dict.iterkeys() is also incompatible
if _IS_PYTHON_3:
iteritems = dict.items
iterkeys = dict.keys
else:
iteritems = dict.iteritems
iterkeys = dict.iterkeys
|
dd171296a980dcc0349cf54b2afd6d2399cfb981
|
numba/tests/matmul_usecase.py
|
numba/tests/matmul_usecase.py
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
Fix test failure on Numpy 1.9 and Python 3.5
|
Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.
|
Python
|
bsd-2-clause
|
numba/numba,cpcloud/numba,stuartarchibald/numba,numba/numba,stefanseefeld/numba,gmarkall/numba,sklam/numba,stefanseefeld/numba,stefanseefeld/numba,jriehl/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,cpcloud/numba,sklam/numba,cpcloud/numba,stefanseefeld/numba,sklam/numba,seibert/numba,gmarkall/numba,sklam/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,seibert/numba,seibert/numba,jriehl/numba,stuartarchibald/numba,stonebig/numba,numba/numba,IntelLabs/numba,jriehl/numba,numba/numba,sklam/numba,gmarkall/numba,cpcloud/numba,cpcloud/numba,stonebig/numba,jriehl/numba,stefanseefeld/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,gmarkall/numba
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
<commit_before>import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
<commit_msg>Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.<commit_after>
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
<commit_before>import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
<commit_msg>Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.<commit_after>import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
4e62f8292802aade03637dbbd05be56b9fad7d61
|
utils/snapshot_widgets.py
|
utils/snapshot_widgets.py
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
f(**fargs)
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
if ('t' in fargs.keys()):
if fargs['t'] == 0:
fargs['t'] = 0.2
f(**fargs)
|
Use t=0.2 instead of t=0 for static widget output.
|
Use t=0.2 instead of t=0 for static widget output.
|
Python
|
bsd-3-clause
|
maojrs/riemann_book,maojrs/riemann_book,maojrs/riemann_book
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
f(**fargs)
Use t=0.2 instead of t=0 for static widget output.
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
if ('t' in fargs.keys()):
if fargs['t'] == 0:
fargs['t'] = 0.2
f(**fargs)
|
<commit_before>
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
f(**fargs)
<commit_msg>Use t=0.2 instead of t=0 for static widget output.<commit_after>
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
if ('t' in fargs.keys()):
if fargs['t'] == 0:
fargs['t'] = 0.2
f(**fargs)
|
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
f(**fargs)
Use t=0.2 instead of t=0 for static widget output.
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
if ('t' in fargs.keys()):
if fargs['t'] == 0:
fargs['t'] = 0.2
f(**fargs)
|
<commit_before>
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
f(**fargs)
<commit_msg>Use t=0.2 instead of t=0 for static widget output.<commit_after>
"""
Alternative interact function that creates a single static figure that can
be viewed online, e.g. on Github or nbviewer, or for use with nbconvert.
"""
from __future__ import print_function
print("Will create static figures with single value of parameters")
def interact(f, **kwargs):
fargs = {}
for key in kwargs.keys():
try:
fargs[key] = kwargs[key].value
except:
pass # if initial value not set for this parameter
if ('t' in fargs.keys()):
if fargs['t'] == 0:
fargs['t'] = 0.2
f(**fargs)
|
6e5b13859b8a795b08189dde7ce1aab4cca18827
|
address/apps.py
|
address/apps.py
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
default_auto_field = "django.db.models.AutoField"
|
Set default ID field to AutoField
|
:bug: Set default ID field to AutoField
Resolves #168
|
Python
|
bsd-3-clause
|
furious-luke/django-address,furious-luke/django-address,furious-luke/django-address
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
:bug: Set default ID field to AutoField
Resolves #168
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
default_auto_field = "django.db.models.AutoField"
|
<commit_before>from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
<commit_msg>:bug: Set default ID field to AutoField
Resolves #168<commit_after>
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
default_auto_field = "django.db.models.AutoField"
|
from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
:bug: Set default ID field to AutoField
Resolves #168from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
default_auto_field = "django.db.models.AutoField"
|
<commit_before>from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
<commit_msg>:bug: Set default ID field to AutoField
Resolves #168<commit_after>from django.apps import AppConfig
class AddressConfig(AppConfig):
"""
Define config for the member app so that we can hook in signals.
"""
name = "address"
default_auto_field = "django.db.models.AutoField"
|
2095b3f18926a9ab08faeae634f4f4653e4b7590
|
admin/manage.py
|
admin/manage.py
|
from freeposte import manager, db
from freeposte.admin import models
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
Fix the admin creation command
|
Fix the admin creation command
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
from freeposte import manager, db
from freeposte.admin import models
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
Fix the admin creation command
|
from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
<commit_before>from freeposte import manager, db
from freeposte.admin import models
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
<commit_msg>Fix the admin creation command<commit_after>
|
from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
from freeposte import manager, db
from freeposte.admin import models
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
Fix the admin creation commandfrom freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
<commit_before>from freeposte import manager, db
from freeposte.admin import models
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
<commit_msg>Fix the admin creation command<commit_after>from freeposte import manager, db
from freeposte.admin import models
from passlib import hash
@manager.command
def flushdb():
""" Flush the database
"""
db.drop_all()
@manager.command
def initdb():
""" Initialize the database
"""
db.create_all()
@manager.command
def admin(localpart, domain_name, password):
""" Create an admin user
"""
domain = models.Domain(name=domain_name)
user = models.User(
localpart=localpart,
domain=domain,
global_admin=True,
password=hash.sha512_crypt.encrypt(password)
)
db.session.add(domain)
db.session.add(user)
db.session.commit()
if __name__ == "__main__":
manager.run()
|
87e5d0e5e92ed5f94e4238e73453934abc7835dd
|
src/tutorials/code/python/chat/5.py
|
src/tutorials/code/python/chat/5.py
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
from functools import partial
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", partial(print, "RECEIVE:"))
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Handle incoming messages.
def on_message(message):
print("RECEIVE:", message)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", on_message)
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
Replace partial with a function definition
|
Replace partial with a function definition
Fix indentation, as well.
|
Python
|
mit
|
WatchBeam/developers,WatchBeam/developers,WatchBeam/developers,WatchBeam/developers,WatchBeam/developers
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
from functools import partial
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", partial(print, "RECEIVE:"))
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()Replace partial with a function definition
Fix indentation, as well.
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Handle incoming messages.
def on_message(message):
print("RECEIVE:", message)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", on_message)
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
<commit_before>from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
from functools import partial
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", partial(print, "RECEIVE:"))
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()<commit_msg>Replace partial with a function definition
Fix indentation, as well.<commit_after>
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Handle incoming messages.
def on_message(message):
print("RECEIVE:", message)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", on_message)
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
from functools import partial
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", partial(print, "RECEIVE:"))
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()Replace partial with a function definition
Fix indentation, as well.from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Handle incoming messages.
def on_message(message):
print("RECEIVE:", message)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", on_message)
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
<commit_before>from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
from functools import partial
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", partial(print, "RECEIVE:"))
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()<commit_msg>Replace partial with a function definition
Fix indentation, as well.<commit_after>from chatty import create
import config
from tornado.ioloop import PeriodicCallback, IOLoop
if __name__ == "__main__":
chat = create(config)
# Tell chat to authenticate with the beam server. It'll throw
# a chatty.errors.NotAuthenticatedError if it fails.
chat.authenticate(config.CHANNEL)
# Handle incoming messages.
def on_message(message):
print("RECEIVE:", message)
# Listen for incoming messages. When they come in, just print them.
chat.on("message", on_message)
# Create a timer that sends the message "Hi!" every second.
PeriodicCallback(
lambda: chat.message('Hi!'),
1000
).start()
# Start the tornado event loop.
IOLoop.instance().start()
|
b6e40edee998170fafe92096447a0f54e9adb86f
|
tracker/src/main/scripts/launch-workflow.py
|
tracker/src/main/scripts/launch-workflow.py
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = uuid.uuid4()
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = str(uuid.uuid4())
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
Convert run uuid to string
|
Convert run uuid to string
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = uuid.uuid4()
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
Convert run uuid to string
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = str(uuid.uuid4())
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
<commit_before>import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = uuid.uuid4()
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
<commit_msg>Convert run uuid to string<commit_after>
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = str(uuid.uuid4())
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = uuid.uuid4()
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
Convert run uuid to stringimport sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = str(uuid.uuid4())
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
<commit_before>import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = uuid.uuid4()
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
<commit_msg>Convert run uuid to string<commit_after>import sys
import os
import uuid
from time import sleep
if len(sys.argv) != 3:
print "Wrong number of args"
exit(1)
workflow_name = sys.argv[1]
num_runs = int(sys.argv[2])
for this_run in range(num_runs):
run_uuid = str(uuid.uuid4())
launch_command = "airflow trigger_dag -r " + run_uuid + " " + workflow_name
print("Launching workflow with command: " + launch_command)
os.system(launch_command)
print("Workflow %s launched.", run_uuid)
sleep(0.5)
|
d3cc8fdbad2ca6888e33b119faae68d691ab291e
|
tests/system/test_lets-do-dns_script.py
|
tests/system/test_lets-do-dns_script.py
|
import os
import subprocess
from requests import get, delete
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
|
import os
import subprocess
from requests import get, delete, post
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
def test_post_authentication_hook(env):
create_response = post(
'%s/%s/records' % (env.base_uri, env.domain),
headers=env.auth_header,
json={'type': 'TXT',
'name': env.hostname,
'data': env.auth_token})
record_id = create_response.json()['domain_record']['id']
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
'CERTBOT_AUTH_OUTPUT': str(record_id)
})
subprocess.check_call('lets-do-dns')
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
get_response = get(request_uri, headers=env.auth_header)
assert get_response.status_code == 404
|
Test Post-Authentication Hook Process Works
|
Test Post-Authentication Hook Process Works
This is my second system level test, and it's currently failing. My
next steps are to work on the integration test, followed by a slew
of unit tests to start testing driving the final design of the
program to handle this second, and possibly final, piece.
|
Python
|
apache-2.0
|
Jitsusama/lets-do-dns
|
import os
import subprocess
from requests import get, delete
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
Test Post-Authentication Hook Process Works
This is my second system level test, and it's currently failing. My
next steps are to work on the integration test, followed by a slew
of unit tests to start testing driving the final design of the
program to handle this second, and possibly final, piece.
|
import os
import subprocess
from requests import get, delete, post
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
def test_post_authentication_hook(env):
create_response = post(
'%s/%s/records' % (env.base_uri, env.domain),
headers=env.auth_header,
json={'type': 'TXT',
'name': env.hostname,
'data': env.auth_token})
record_id = create_response.json()['domain_record']['id']
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
'CERTBOT_AUTH_OUTPUT': str(record_id)
})
subprocess.check_call('lets-do-dns')
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
get_response = get(request_uri, headers=env.auth_header)
assert get_response.status_code == 404
|
<commit_before>import os
import subprocess
from requests import get, delete
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
<commit_msg>Test Post-Authentication Hook Process Works
This is my second system level test, and it's currently failing. My
next steps are to work on the integration test, followed by a slew
of unit tests to start testing driving the final design of the
program to handle this second, and possibly final, piece.<commit_after>
|
import os
import subprocess
from requests import get, delete, post
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
def test_post_authentication_hook(env):
create_response = post(
'%s/%s/records' % (env.base_uri, env.domain),
headers=env.auth_header,
json={'type': 'TXT',
'name': env.hostname,
'data': env.auth_token})
record_id = create_response.json()['domain_record']['id']
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
'CERTBOT_AUTH_OUTPUT': str(record_id)
})
subprocess.check_call('lets-do-dns')
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
get_response = get(request_uri, headers=env.auth_header)
assert get_response.status_code == 404
|
import os
import subprocess
from requests import get, delete
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
Test Post-Authentication Hook Process Works
This is my second system level test, and it's currently failing. My
next steps are to work on the integration test, followed by a slew
of unit tests to start testing driving the final design of the
program to handle this second, and possibly final, piece.import os
import subprocess
from requests import get, delete, post
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
def test_post_authentication_hook(env):
create_response = post(
'%s/%s/records' % (env.base_uri, env.domain),
headers=env.auth_header,
json={'type': 'TXT',
'name': env.hostname,
'data': env.auth_token})
record_id = create_response.json()['domain_record']['id']
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
'CERTBOT_AUTH_OUTPUT': str(record_id)
})
subprocess.check_call('lets-do-dns')
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
get_response = get(request_uri, headers=env.auth_header)
assert get_response.status_code == 404
|
<commit_before>import os
import subprocess
from requests import get, delete
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
<commit_msg>Test Post-Authentication Hook Process Works
This is my second system level test, and it's currently failing. My
next steps are to work on the integration test, followed by a slew
of unit tests to start testing driving the final design of the
program to handle this second, and possibly final, piece.<commit_after>import os
import subprocess
from requests import get, delete, post
def test_pre_authentication_hook(env):
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
})
record_id = subprocess.check_output('lets-do-dns')
assert int(record_id) > 0
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
response = get(request_uri, headers=env.auth_header)
record_data = response.json()['domain_record']
assert (record_data['type'] == 'TXT' and
record_data['name'] == env.hostname and
record_data['data'] == env.auth_token)
delete(request_uri, headers=env.auth_header)
def test_post_authentication_hook(env):
create_response = post(
'%s/%s/records' % (env.base_uri, env.domain),
headers=env.auth_header,
json={'type': 'TXT',
'name': env.hostname,
'data': env.auth_token})
record_id = create_response.json()['domain_record']['id']
os.environ.update({
'DO_API_KEY': env.key,
'DO_DOMAIN': env.domain,
'CERTBOT_DOMAIN': '%s.%s' % (env.hostname, env.domain),
'CERTBOT_VALIDATION': env.auth_token,
'CERTBOT_AUTH_OUTPUT': str(record_id)
})
subprocess.check_call('lets-do-dns')
request_uri = '%s/%s/records/%s' % (
env.base_uri, env.domain, record_id)
get_response = get(request_uri, headers=env.auth_header)
assert get_response.status_code == 404
|
c96000a231d5bbf60a310e091b9895bfb249c115
|
conditional/blueprints/spring_evals.py
|
conditional/blueprints/spring_evals.py
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
},
{
'name': "James Forcier",
'committee_meetings': 3,
'house_meetings_missed': 5,
'house_meetings_comments': "",
'major_project': 'Bobby Junior',
'major_project_passed': False,
'comments': "Jazzazazazazzz",
'result': 'Failed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
FIx house meeting schema spring evals
|
FIx house meeting schema spring evals
|
Python
|
mit
|
ComputerScienceHouse/conditional,RamZallan/conditional,RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
},
{
'name': "James Forcier",
'committee_meetings': 3,
'house_meetings_missed': 5,
'house_meetings_comments': "",
'major_project': 'Bobby Junior',
'major_project_passed': False,
'comments': "Jazzazazazazzz",
'result': 'Failed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
FIx house meeting schema spring evals
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
<commit_before>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
},
{
'name': "James Forcier",
'committee_meetings': 3,
'house_meetings_missed': 5,
'house_meetings_comments': "",
'major_project': 'Bobby Junior',
'major_project_passed': False,
'comments': "Jazzazazazazzz",
'result': 'Failed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
<commit_msg>FIx house meeting schema spring evals<commit_after>
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
},
{
'name': "James Forcier",
'committee_meetings': 3,
'house_meetings_missed': 5,
'house_meetings_comments': "",
'major_project': 'Bobby Junior',
'major_project_passed': False,
'comments': "Jazzazazazazzz",
'result': 'Failed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
FIx house meeting schema spring evalsfrom flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
<commit_before>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': 0,
'house_meetings_comments': "",
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
},
{
'name': "James Forcier",
'committee_meetings': 3,
'house_meetings_missed': 5,
'house_meetings_comments': "",
'major_project': 'Bobby Junior',
'major_project_passed': False,
'comments': "Jazzazazazazzz",
'result': 'Failed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
<commit_msg>FIx house meeting schema spring evals<commit_after>from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
d8cb207348a86b1bf9593f882a97eccdf48461df
|
lsv_compassion/model/invoice_line.py
|
lsv_compassion/model/invoice_line.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
def _get_child_name(self, cr, uid, ids, name, dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context):
child_name = ''
if line.contract_id and line.contract_id.child_id:
child_name = line.contract_id.child_id.name
res[line.id] = child_name
return res
_columns = {
'child_name': fields.function(
_get_child_name, string='Child name', type='char')
}
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'child_name': fields.related(
'contract_id', 'child_name', string='Child name', type='char')
}
|
Change child_name to related field.
|
Change child_name to related field.
|
Python
|
agpl-3.0
|
Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,Secheron/compassion-switzerland
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
def _get_child_name(self, cr, uid, ids, name, dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context):
child_name = ''
if line.contract_id and line.contract_id.child_id:
child_name = line.contract_id.child_id.name
res[line.id] = child_name
return res
_columns = {
'child_name': fields.function(
_get_child_name, string='Child name', type='char')
}
Change child_name to related field.
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'child_name': fields.related(
'contract_id', 'child_name', string='Child name', type='char')
}
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
def _get_child_name(self, cr, uid, ids, name, dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context):
child_name = ''
if line.contract_id and line.contract_id.child_id:
child_name = line.contract_id.child_id.name
res[line.id] = child_name
return res
_columns = {
'child_name': fields.function(
_get_child_name, string='Child name', type='char')
}
<commit_msg>Change child_name to related field.<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'child_name': fields.related(
'contract_id', 'child_name', string='Child name', type='char')
}
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
def _get_child_name(self, cr, uid, ids, name, dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context):
child_name = ''
if line.contract_id and line.contract_id.child_id:
child_name = line.contract_id.child_id.name
res[line.id] = child_name
return res
_columns = {
'child_name': fields.function(
_get_child_name, string='Child name', type='char')
}
Change child_name to related field.# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'child_name': fields.related(
'contract_id', 'child_name', string='Child name', type='char')
}
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
def _get_child_name(self, cr, uid, ids, name, dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context):
child_name = ''
if line.contract_id and line.contract_id.child_id:
child_name = line.contract_id.child_id.name
res[line.id] = child_name
return res
_columns = {
'child_name': fields.function(
_get_child_name, string='Child name', type='char')
}
<commit_msg>Change child_name to related field.<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm, fields
class invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'child_name': fields.related(
'contract_id', 'child_name', string='Child name', type='char')
}
|
f50f892e2ad2108342f53406ea86f65f89eeaafb
|
PythonScript/Helper/Helper.py
|
PythonScript/Helper/Helper.py
|
# This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
# This Python file uses the following encoding: utf-8
def Convert(content):
tradtionalToSimplified = {
u"「":u"“",
u"」":u"”",
u"『":u"‘",
u"』":u"’",
}
for key in tradtionalToSimplified:
content = content.replace(key, tradtionalToSimplified[key])
return content
def main():
try:
fileName = "MengZi_Traditional - Test.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
Use Convert() to simplify code
|
Use Convert() to simplify code
|
Python
|
mit
|
fan-jiang/Dujing
|
# This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()Use Convert() to simplify code
|
# This Python file uses the following encoding: utf-8
def Convert(content):
tradtionalToSimplified = {
u"「":u"“",
u"」":u"”",
u"『":u"‘",
u"』":u"’",
}
for key in tradtionalToSimplified:
content = content.replace(key, tradtionalToSimplified[key])
return content
def main():
try:
fileName = "MengZi_Traditional - Test.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
<commit_before># This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()<commit_msg>Use Convert() to simplify code<commit_after>
|
# This Python file uses the following encoding: utf-8
def Convert(content):
tradtionalToSimplified = {
u"「":u"“",
u"」":u"”",
u"『":u"‘",
u"』":u"’",
}
for key in tradtionalToSimplified:
content = content.replace(key, tradtionalToSimplified[key])
return content
def main():
try:
fileName = "MengZi_Traditional - Test.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
# This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()Use Convert() to simplify code# This Python file uses the following encoding: utf-8
def Convert(content):
tradtionalToSimplified = {
u"「":u"“",
u"」":u"”",
u"『":u"‘",
u"』":u"’",
}
for key in tradtionalToSimplified:
content = content.replace(key, tradtionalToSimplified[key])
return content
def main():
try:
fileName = "MengZi_Traditional - Test.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
<commit_before># This Python file uses the following encoding: utf-8
def main():
try:
fileName = "MengZi_Traditional.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = content.replace(u"「",u'“')
content = content.replace(u"」",u'”')
content = content.replace(u"『",u'‘')
content = content.replace(u"』",u'’')
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()<commit_msg>Use Convert() to simplify code<commit_after># This Python file uses the following encoding: utf-8
def Convert(content):
tradtionalToSimplified = {
u"「":u"“",
u"」":u"”",
u"『":u"‘",
u"』":u"’",
}
for key in tradtionalToSimplified:
content = content.replace(key, tradtionalToSimplified[key])
return content
def main():
try:
fileName = "MengZi_Traditional - Test.md"
filePath = "../../source/" + fileName
content = None
with open(filePath,'r') as file:
content = file.read().decode("utf-8")
content = Convert(content)
with open(filePath,'w') as file:
file.write(content.encode("utf-8"))
print "OK"
except IOError:
print ("IOError occurs while handling the file (" + filePath + ").")
if __name__ == '__main__':
main()
|
3ef3a5f4b0453af2d6853bc017fe4c44b9ee90ab
|
migrations/versions/45a35ac9bfe_create_lr_uprn_column_with_a_gin_index.py
|
migrations/versions/45a35ac9bfe_create_lr_uprn_column_with_a_gin_index.py
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=False))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=True))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
Fix not nullable field for lr-uprns to be nullable
|
Fix not nullable field for lr-uprns to be nullable
|
Python
|
mit
|
LandRegistry/digital-register-api,LandRegistry/digital-register-api
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=False))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
Fix not nullable field for lr-uprns to be nullable
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=True))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
<commit_before>"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=False))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
<commit_msg>Fix not nullable field for lr-uprns to be nullable<commit_after>
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=True))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=False))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
Fix not nullable field for lr-uprns to be nullable"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=True))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
<commit_before>"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=False))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
<commit_msg>Fix not nullable field for lr-uprns to be nullable<commit_after>"""Create lr_uprn column with a GIN index
Revision ID: 45a35ac9bfe
Revises: 2bbd8de7dcb
Create Date: 2015-09-22 10:36:04.307515
"""
# revision identifiers, used by Alembic.
revision = '45a35ac9bfe'
down_revision = '2bbd8de7dcb'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('title_register_data', sa.Column('lr_uprns', postgresql.ARRAY(sa.String()), nullable=True))
op.create_index('idx_title_uprns', 'title_register_data', ['lr_uprns'], unique=False, postgresql_using='gin')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('idx_title_uprns', table_name='title_register_data')
op.drop_column('title_register_data', 'lr_uprns')
### end Alembic commands ###
|
c2c488210b2c1ec3b1edfba1e510d228fa5e74d2
|
partner_communication_switzerland/migrations/12.0.1.1.2/post-migration.py
|
partner_communication_switzerland/migrations/12.0.1.1.2/post-migration.py
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Copy start_date over onboarding_start_date
cr.execute("""
UPDATE recurring_contract
SET onboarding_start_date = start_date
WHERE is_first_sponsorship = true
""")
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
Include migration for current runnning onboarding processes
|
Include migration for current runnning onboarding processes
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
Include migration for current runnning onboarding processes
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Copy start_date over onboarding_start_date
cr.execute("""
UPDATE recurring_contract
SET onboarding_start_date = start_date
WHERE is_first_sponsorship = true
""")
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
<commit_before>from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
<commit_msg>Include migration for current runnning onboarding processes<commit_after>
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Copy start_date over onboarding_start_date
cr.execute("""
UPDATE recurring_contract
SET onboarding_start_date = start_date
WHERE is_first_sponsorship = true
""")
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
Include migration for current runnning onboarding processesfrom openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Copy start_date over onboarding_start_date
cr.execute("""
UPDATE recurring_contract
SET onboarding_start_date = start_date
WHERE is_first_sponsorship = true
""")
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
<commit_before>from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
<commit_msg>Include migration for current runnning onboarding processes<commit_after>from openupgradelib import openupgrade
def migrate(cr, installed_version):
if not installed_version:
return
# Copy start_date over onboarding_start_date
cr.execute("""
UPDATE recurring_contract
SET onboarding_start_date = start_date
WHERE is_first_sponsorship = true
""")
# Update data
openupgrade.load_xml(
cr, "partner_communication_switzerland", "data/onboarding_process.xml")
|
049e21dd2d4e90120bfe297696cffa5000028854
|
dynd/benchmarks/benchmark_arithmetic.py
|
dynd/benchmarks/benchmark_arithmetic.py
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
Add one bigger size to arithmetic benchmark
|
Add one bigger size to arithmetic benchmark
|
Python
|
bsd-2-clause
|
michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,pombredanne/dynd-python,pombredanne/dynd-python,cpcloud/dynd-python,ContinuumIO/dynd-python,michaelpacer/dynd-python,izaid/dynd-python,michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,izaid/dynd-python,mwiebe/dynd-python,izaid/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,cpcloud/dynd-python,insertinterestingnamehere/dynd-python,mwiebe/dynd-python,ContinuumIO/dynd-python,insertinterestingnamehere/dynd-python,cpcloud/dynd-python,mwiebe/dynd-python,ContinuumIO/dynd-python,cpcloud/dynd-python,ContinuumIO/dynd-python,michaelpacer/dynd-python,pombredanne/dynd-python,izaid/dynd-python
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()Add one bigger size to arithmetic benchmark
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
<commit_before>import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()<commit_msg>Add one bigger size to arithmetic benchmark<commit_after>
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()Add one bigger size to arithmetic benchmarkimport numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
<commit_before>import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()<commit_msg>Add one bigger size to arithmetic benchmark<commit_after>import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
a34a34c1bf897b5681e7a421ea53ca5e9d065ab8
|
src/zeit/cms/tagging/testing.py
|
src/zeit/cms/tagging/testing.py
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
tag.disabled = False
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)
|
Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.cms,ZeitOnline/zeit.cms,ZeitOnline/zeit.cms,ZeitOnline/zeit.cms
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
tag.disabled = False
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
<commit_before># Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
tag.disabled = False
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
<commit_msg>Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)<commit_after>
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
tag.disabled = False
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)# Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
<commit_before># Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
tag.disabled = False
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
<commit_msg>Remove superfluous variable ('disabled' is a concept of zeit.intrafind and doesn't belong here)<commit_after># Copyright (c) 2011 gocept gmbh & co. kg
# See also LICENSE.txt
import mock
import zeit.cms.tagging.tag
class TaggingHelper(object):
"""Mixin for tests which need some tagging infrastrucutre."""
def get_tag(self, code):
tag = zeit.cms.tagging.tag.Tag(code, code)
return tag
def setup_tags(self, *codes):
import stabledict
class Tags(stabledict.StableDict):
pass
tags = Tags()
for code in codes:
tags[code] = self.get_tag(code)
patcher = mock.patch('zeit.cms.tagging.interfaces.ITagger')
self.addCleanup(patcher.stop)
self.tagger = patcher.start()
self.tagger.return_value = tags
tags.updateOrder = mock.Mock()
tags.update = mock.Mock()
return tags
|
cf7086620df23d8af15f7c9898edf39f64965549
|
dbaas/workflow/steps/util/region_migration/check_instances_status.py
|
dbaas/workflow/steps/util/region_migration/check_instances_status.py
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
Add step to check instances status
|
Add step to check instances status
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
Add step to check instances status
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
<commit_before># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
<commit_msg>Add step to check instances status<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
Add step to check instances status# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
<commit_before># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
<commit_msg>Add step to check instances status<commit_after># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
c416c998d73e27713fd57ec97c70bacb2390f8c9
|
DashDoc.py
|
DashDoc.py
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
Use Dash's new CamelCase convention to lookup words that contain whitespace
|
Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"
|
Python
|
apache-2.0
|
farcaller/DashDoc
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
<commit_before>import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
<commit_msg>Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"<commit_after>
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
<commit_before>import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', word)])
<commit_msg>Use Dash's new CamelCase convention to lookup words that contain whitespace
- Example: converting "create table" into "createTable" will lookup "CREATE TABLE"<commit_after>import sublime
import sublime_plugin
import os
import subprocess
def syntax_name(view):
syntax = os.path.basename(view.settings().get('syntax'))
syntax = os.path.splitext(syntax)[0]
return syntax
def camel_case(word):
return ''.join(w.capitalize() if i > 0 else w
for i, w in enumerate(word.split()))
def docset_prefix(view, settings):
syntax_docset_map = settings.get('syntax_docset_map', {})
syntax = syntax_name(view)
if syntax in syntax_docset_map:
return syntax_docset_map[syntax] + ':'
return None
class DashDocCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax_sensitive=False):
selection = self.view.sel()[0]
if len(selection) == 0:
selection = self.view.word(selection)
word = self.view.substr(selection)
settings = sublime.load_settings('DashDoc.sublime-settings')
if syntax_sensitive or settings.get('syntax_sensitive', False):
docset = docset_prefix(self.view, settings)
else:
docset = None
subprocess.call(["open", "dash://%s%s" % (docset or '', camel_case(word))])
|
4983713ae15079872a8838e702b235f095f913b3
|
examples/list_people.py
|
examples/list_people.py
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
Make Python example output identical to C++ and Java by removing redundant spaces.
|
Make Python example output identical to C++ and Java by removing redundant
spaces.
|
Python
|
bsd-3-clause
|
nilavghosh/Protocol-Buffers-Fork,nilavghosh/Protocol-Buffers-Fork,nilavghosh/Protocol-Buffers-Fork,nilavghosh/Protocol-Buffers-Fork,nilavghosh/Protocol-Buffers-Fork
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
Make Python example output identical to C++ and Java by removing redundant
spaces.
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
<commit_before>#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
<commit_msg>Make Python example output identical to C++ and Java by removing redundant
spaces.<commit_after>
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
Make Python example output identical to C++ and Java by removing redundant
spaces.#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
<commit_before>#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
<commit_msg>Make Python example output identical to C++ and Java by removing redundant
spaces.<commit_after>#! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
|
0ce491e39b4cb866b2b749692dbaed8cb1cf6dac
|
plots/views.py
|
plots/views.py
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
if type == "AvgVGRvsProcessor":
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
#if type == "AvgVGRvsProcessor":
return
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
Add a stub return to ensure the project builds.
|
Add a stub return to ensure the project builds.
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
if type == "AvgVGRvsProcessor":
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
Add a stub return to ensure the project builds.
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
#if type == "AvgVGRvsProcessor":
return
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
<commit_before># Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
if type == "AvgVGRvsProcessor":
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
<commit_msg>Add a stub return to ensure the project builds.<commit_after>
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
#if type == "AvgVGRvsProcessor":
return
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
if type == "AvgVGRvsProcessor":
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
Add a stub return to ensure the project builds.# Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
#if type == "AvgVGRvsProcessor":
return
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
<commit_before># Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
if type == "AvgVGRvsProcessor":
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
<commit_msg>Add a stub return to ensure the project builds.<commit_after># Create your views here.
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.http import HttpResponse, Http404
from .models import BenchmarkLogs, MachineInfo
def rawdata(request, type):
#if type == "AvgVGRvsProcessor":
return
def draw(request, type):
type_dict = {'type': type}
return render_to_response("chart.html", type_dict, context_instance=RequestContext(request))
|
02f77babc6195426fdb5c495d44ede6af6fbec8f
|
mangopaysdk/entities/userlegal.py
|
mangopaysdk/entities/userlegal.py
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
return properties
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
self._legalRepresentativeProofOfIdentity = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
properties.append('LegalRepresentativeProofOfIdentity' )
return properties
|
Add LegalRepresentativeProofOfIdentity to legal user
|
Add LegalRepresentativeProofOfIdentity to legal user
|
Python
|
mit
|
chocopoche/mangopay2-python-sdk,Mangopay/mangopay2-python-sdk
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
return properties
Add LegalRepresentativeProofOfIdentity to legal user
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
self._legalRepresentativeProofOfIdentity = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
properties.append('LegalRepresentativeProofOfIdentity' )
return properties
|
<commit_before>from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
return properties
<commit_msg>Add LegalRepresentativeProofOfIdentity to legal user<commit_after>
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
self._legalRepresentativeProofOfIdentity = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
properties.append('LegalRepresentativeProofOfIdentity' )
return properties
|
from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
return properties
Add LegalRepresentativeProofOfIdentity to legal userfrom mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
self._legalRepresentativeProofOfIdentity = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
properties.append('LegalRepresentativeProofOfIdentity' )
return properties
|
<commit_before>from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
return properties
<commit_msg>Add LegalRepresentativeProofOfIdentity to legal user<commit_after>from mangopaysdk.entities.entitybase import EntityBase
from mangopaysdk.entities.user import User
from mangopaysdk.tools.enums import PersonType
from mangopaysdk.tools.enums import KYCLevel
class UserLegal (User):
def __init__(self, id = None):
super(UserLegal, self).__init__(id)
self._setPersonType(PersonType.Legal)
self.Name = None
# Required LegalPersonType: BUSINESS, ORGANIZATION
self.LegalPersonType = None
self.HeadquartersAddress = None
# Required
self.LegalRepresentativeFirstName = None
# Required
self.LegalRepresentativeLastName = None
self.LegalRepresentativeAddress = None
self.LegalRepresentativeEmail = None
# Required
self.LegalRepresentativeBirthday = None
# Required
self.LegalRepresentativeNationality = None
# Required
self.LegalRepresentativeCountryOfResidence = None
self._statute = None
self._proofOfRegistration = None
self._shareholderDeclaration = None
self._legalRepresentativeProofOfIdentity = None
def GetReadOnlyProperties(self):
properties = super(UserLegal, self).GetReadOnlyProperties()
properties.append('Statute' )
properties.append('ProofOfRegistration' )
properties.append('ShareholderDeclaration' )
properties.append('LegalRepresentativeProofOfIdentity' )
return properties
|
732fb749247c94fe0dbf3b98ca691d139970bc48
|
tests/doctests/test_doctests.py
|
tests/doctests/test_doctests.py
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../README.txt", "../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
Fix broken (doc) test looking for the removed README.txt
|
Fix broken (doc) test looking for the removed README.txt
|
Python
|
mit
|
xuru/substrate,xuru/substrate,xuru/substrate
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../README.txt", "../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
Fix broken (doc) test looking for the removed README.txt
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
<commit_before>import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../README.txt", "../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
<commit_msg>Fix broken (doc) test looking for the removed README.txt<commit_after>
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../README.txt", "../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
Fix broken (doc) test looking for the removed README.txtimport os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
<commit_before>import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../README.txt", "../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
<commit_msg>Fix broken (doc) test looking for the removed README.txt<commit_after>import os
import doctest
import unittest
DOCTEST_FILES = []
for root, dirs, files in os.walk("."):
if ".hg%s" % os.path.sep not in root:
for f in files:
if f.endswith(".doctest"):
DOCTEST_FILES.append(f)
DOCTEST_FILES = ["../../lib/restler/__init__.py"] + DOCTEST_FILES
print "Running ", DOCTEST_FILES
#DOCTEST_FILES = []
suite = unittest.TestSuite()
for f in DOCTEST_FILES:
suite.addTest(doctest.DocFileSuite(f))
runner = unittest.TextTestRunner()
runner.run(suite)
|
c8981212ea1d8c9e89c00135d164cde7fc53832d
|
docs/source/conf.py
|
docs/source/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
Add links to source code in documentation
|
Add links to source code in documentation
|
Python
|
mit
|
numberly/mongo-thingy
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
Add links to source code in documentation
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
<commit_msg>Add links to source code in documentation<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
Add links to source code in documentation#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
<commit_msg>Add links to source code in documentation<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Mongo-Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
88ac847fd9a53e25253f72216ee2fad3fa8353a2
|
python/main.py
|
python/main.py
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings("banana", "na"))
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings(b"banana", b"na"))
|
Use byte strings for Python example.
|
Use byte strings for Python example.
|
Python
|
mit
|
zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools,zsiciarz/rust-ffi-stringtools
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings("banana", "na"))
Use byte strings for Python example.
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings(b"banana", b"na"))
|
<commit_before>import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings("banana", "na"))
<commit_msg>Use byte strings for Python example.<commit_after>
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings(b"banana", b"na"))
|
import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings("banana", "na"))
Use byte strings for Python example.import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings(b"banana", b"na"))
|
<commit_before>import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings("banana", "na"))
<commit_msg>Use byte strings for Python example.<commit_after>import ctypes
stringtools = ctypes.CDLL("../target/libstringtools-261cf0fc14ce408c.so")
print(stringtools.count_substrings(b"banana", b"na"))
|
1830c3ba2c124b0ef1d16a0ba2e092fd3281179b
|
myuw_mobile/views/api/library.py
|
myuw_mobile/views/api/library.py
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data()
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data(full_name_format=True)
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
Add param for turning on date formating.
|
Add param for turning on date formating.
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,fanglinfang/myuw,uw-it-aca/myuw
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data()
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
Add param for turning on date formating.
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data(full_name_format=True)
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
<commit_before>import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data()
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
<commit_msg>Add param for turning on date formating.<commit_after>
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data(full_name_format=True)
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data()
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
Add param for turning on date formating.import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data(full_name_format=True)
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
<commit_before>import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data()
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
<commit_msg>Add param for turning on date formating.<commit_after>import logging
from django.http import HttpResponse
from django.utils import simplejson as json
from myuw_mobile.views.rest_dispatch import RESTDispatch, data_not_found
from myuw_mobile.dao.library import get_account_info_for_current_user
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logresp import log_data_not_found_response, log_success_response
class MyLibInfo(RESTDispatch):
"""
Performs actions on resource at /api/v1/library/.
"""
def GET(self, request):
"""
GET returns 200 with the library account balances
of the current user
"""
timer = Timer()
logger = logging.getLogger(__name__)
myaccount = get_account_info_for_current_user()
if myaccount is None:
log_data_not_found_response(logger, timer)
return data_not_found()
log_success_response(logger, timer)
resp_json = myaccount.json_data(full_name_format=True)
logger.debug(resp_json)
return HttpResponse(json.dumps(resp_json))
|
c47769005ba2eccebbd8561fd4b245d6af820821
|
app/settings.py
|
app/settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
Change app name to Ninhursag.
|
Change app name to Ninhursag.
|
Python
|
mit
|
peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
Change app name to Ninhursag.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
<commit_msg>Change app name to Ninhursag.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
Change app name to Ninhursag.#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
<commit_msg>Change app name to Ninhursag.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Ninhursag'
class Default(object):
APP_NAME = project_name
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
DUST_LOG_LEVEL = 'INFO'
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
APP_NAME = project_name + ' dev'
DEBUG = True
JS_LOG_LEVEL = 1
DUST_LOG_LEVEL = 'DEBUG'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
c32118b2157e6c2cfd435461ee23edfa79aa917e
|
api/__init__.py
|
api/__init__.py
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
Set local threads to true for peewee
|
Set local threads to true for peewee
|
Python
|
unlicense
|
karousel/karousel
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
Set local threads to true for peewee
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
<commit_before>import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
<commit_msg>Set local threads to true for peewee<commit_after>
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
Set local threads to true for peeweeimport ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
<commit_before>import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db')
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UserResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
<commit_msg>Set local threads to true for peewee<commit_after>import ConfigParser
from peewee import *
config = ConfigParser.RawConfigParser()
config.read('server.conf')
database = SqliteDatabase('gallery.db', threadlocals=True)
from collection import CollectionModel
from album import AlbumModel
from user import UserModel, UsersResource
from photo import PhotoModel
database.create_tables([PhotoModel, AlbumModel, UserModel, CollectionModel], True)
if UserModel.select().count() == 0:
UserModel.create(
admin = True,
name = 'Admin',
username = 'Admin',
password = '$2a$12$pMtKl1b7h1sFKbMdBvPqbuza1tJN2ZNNAFMEs1RQmwqYTbBwrrKpy'
)
from boto.s3.connection import S3Connection
s3 = S3Connection(config.get('S3', 'AccessKey'), config.get('S3', 'SecretKey'))
if s3.lookup(config.get('S3', 'Bucket')) is None:
s3.create_bucket(config.get('S3', 'Bucket'))
from flask import Flask
from flask.ext.restful import Api
app = Flask(__name__)
api = Api(app)
api.add_resource(UsersResource, '/users/')
|
8c5f317a090a23f10adcc837645bd25a8b5626f8
|
shap/models/_model.py
|
shap/models/_model.py
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
Check SHAP Model call type
|
Check SHAP Model call type
|
Python
|
mit
|
slundberg/shap,slundberg/shap,slundberg/shap,slundberg/shap
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
Check SHAP Model call type
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
<commit_before>import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
<commit_msg>Check SHAP Model call type<commit_after>
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
Check SHAP Model call typeimport numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
<commit_before>import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
<commit_msg>Check SHAP Model call type<commit_after>import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
1ab4e03a0925deb57a7eafab2043d55f480988f1
|
misc/toml_to_json.py
|
misc/toml_to_json.py
|
#!/usr/bin/env python
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
fh = open(sys.argv[1])
try:
data = toml.load(fh)
except toml.TomlDecodeError, e:
print e
sys.exit(1)
print json.dumps(data, indent=4, sort_keys=True)
|
#!/usr/bin/env python3
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
if len(sys.argv) > 1:
fh = open(sys.argv[1])
else:
fh = sys.stdin
try:
data = toml.load(fh)
except toml.TomlDecodeError as e:
print(e)
sys.exit(1)
print(json.dumps(data, indent=4, sort_keys=True))
|
Switch toml to json script to python 3
|
Switch toml to json script to python 3
|
Python
|
mit
|
mivok/tools,mivok/tools,mivok/tools,mivok/tools
|
#!/usr/bin/env python
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
fh = open(sys.argv[1])
try:
data = toml.load(fh)
except toml.TomlDecodeError, e:
print e
sys.exit(1)
print json.dumps(data, indent=4, sort_keys=True)
Switch toml to json script to python 3
|
#!/usr/bin/env python3
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
if len(sys.argv) > 1:
fh = open(sys.argv[1])
else:
fh = sys.stdin
try:
data = toml.load(fh)
except toml.TomlDecodeError as e:
print(e)
sys.exit(1)
print(json.dumps(data, indent=4, sort_keys=True))
|
<commit_before>#!/usr/bin/env python
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
fh = open(sys.argv[1])
try:
data = toml.load(fh)
except toml.TomlDecodeError, e:
print e
sys.exit(1)
print json.dumps(data, indent=4, sort_keys=True)
<commit_msg>Switch toml to json script to python 3<commit_after>
|
#!/usr/bin/env python3
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
if len(sys.argv) > 1:
fh = open(sys.argv[1])
else:
fh = sys.stdin
try:
data = toml.load(fh)
except toml.TomlDecodeError as e:
print(e)
sys.exit(1)
print(json.dumps(data, indent=4, sort_keys=True))
|
#!/usr/bin/env python
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
fh = open(sys.argv[1])
try:
data = toml.load(fh)
except toml.TomlDecodeError, e:
print e
sys.exit(1)
print json.dumps(data, indent=4, sort_keys=True)
Switch toml to json script to python 3#!/usr/bin/env python3
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
if len(sys.argv) > 1:
fh = open(sys.argv[1])
else:
fh = sys.stdin
try:
data = toml.load(fh)
except toml.TomlDecodeError as e:
print(e)
sys.exit(1)
print(json.dumps(data, indent=4, sort_keys=True))
|
<commit_before>#!/usr/bin/env python
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
fh = open(sys.argv[1])
try:
data = toml.load(fh)
except toml.TomlDecodeError, e:
print e
sys.exit(1)
print json.dumps(data, indent=4, sort_keys=True)
<commit_msg>Switch toml to json script to python 3<commit_after>#!/usr/bin/env python3
# Takes in toml, dumps it out as json
# Run pip install toml to install the toml module
import json
import sys
import toml
if len(sys.argv) > 1:
fh = open(sys.argv[1])
else:
fh = sys.stdin
try:
data = toml.load(fh)
except toml.TomlDecodeError as e:
print(e)
sys.exit(1)
print(json.dumps(data, indent=4, sort_keys=True))
|
9dbaf58fd3dc9c3c976afcbf264fa6cfff09a7a0
|
django_prometheus/migrations.py
|
django_prometheus/migrations.py
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
# Import MigrationExecutor lazily. MigrationExecutor checks at
# import time that the apps are ready, and they are not when
# django_prometheus is imported. ExportMigrations() should be
# called in AppConfig.ready(), which signals that all apps are
# ready.
from django.db.migrations.executor import MigrationExecutor
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
Fix the import of MigrationExecutor to be lazy.
|
Fix the import of MigrationExecutor to be lazy.
MigrationExecutor checks at import time whether apps are all
loaded. This doesn't work as they are usually not if your app is
imported by adding it to INSTALLED_APPS. Importing the MigrationExecutor
locally solves this problem as ExportMigration is only called once the
django_prometheus app is signaled that it's ready.
|
Python
|
apache-2.0
|
obytes/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
Fix the import of MigrationExecutor to be lazy.
MigrationExecutor checks at import time whether apps are all
loaded. This doesn't work as they are usually not if your app is
imported by adding it to INSTALLED_APPS. Importing the MigrationExecutor
locally solves this problem as ExportMigration is only called once the
django_prometheus app is signaled that it's ready.
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
# Import MigrationExecutor lazily. MigrationExecutor checks at
# import time that the apps are ready, and they are not when
# django_prometheus is imported. ExportMigrations() should be
# called in AppConfig.ready(), which signals that all apps are
# ready.
from django.db.migrations.executor import MigrationExecutor
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
<commit_before>from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
<commit_msg>Fix the import of MigrationExecutor to be lazy.
MigrationExecutor checks at import time whether apps are all
loaded. This doesn't work as they are usually not if your app is
imported by adding it to INSTALLED_APPS. Importing the MigrationExecutor
locally solves this problem as ExportMigration is only called once the
django_prometheus app is signaled that it's ready.<commit_after>
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
# Import MigrationExecutor lazily. MigrationExecutor checks at
# import time that the apps are ready, and they are not when
# django_prometheus is imported. ExportMigrations() should be
# called in AppConfig.ready(), which signals that all apps are
# ready.
from django.db.migrations.executor import MigrationExecutor
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
Fix the import of MigrationExecutor to be lazy.
MigrationExecutor checks at import time whether apps are all
loaded. This doesn't work as they are usually not if your app is
imported by adding it to INSTALLED_APPS. Importing the MigrationExecutor
locally solves this problem as ExportMigration is only called once the
django_prometheus app is signaled that it's ready.from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
# Import MigrationExecutor lazily. MigrationExecutor checks at
# import time that the apps are ready, and they are not when
# django_prometheus is imported. ExportMigrations() should be
# called in AppConfig.ready(), which signals that all apps are
# ready.
from django.db.migrations.executor import MigrationExecutor
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
<commit_before>from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from django.db.migrations.executor import MigrationExecutor
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
<commit_msg>Fix the import of MigrationExecutor to be lazy.
MigrationExecutor checks at import time whether apps are all
loaded. This doesn't work as they are usually not if your app is
imported by adding it to INSTALLED_APPS. Importing the MigrationExecutor
locally solves this problem as ExportMigration is only called once the
django_prometheus app is signaled that it's ready.<commit_after>from django.db import connections
from django.db.backends.dummy.base import DatabaseWrapper
from prometheus_client import Gauge
unapplied_migrations = Gauge(
'django_migrations_unapplied_total',
'Count of unapplied migrations by database connection',
['connection'])
applied_migrations = Gauge(
'django_migrations_applied_total',
'Count of applied migrations by database connection',
['connection'])
def ExportMigrationsForDatabase(alias, executor):
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
unapplied_migrations.labels(alias).set(len(plan))
applied_migrations.labels(alias).set(len(
executor.loader.applied_migrations))
def ExportMigrations():
"""Exports counts of unapplied migrations.
This is meant to be called during app startup, ideally by
django_prometheus.apps.AppConfig.
"""
# Import MigrationExecutor lazily. MigrationExecutor checks at
# import time that the apps are ready, and they are not when
# django_prometheus is imported. ExportMigrations() should be
# called in AppConfig.ready(), which signals that all apps are
# ready.
from django.db.migrations.executor import MigrationExecutor
if 'default' in connections and (
type(connections['default']) == DatabaseWrapper):
# This is the case where DATABASES = {} in the configuration,
# i.e. the user is not using any databases. Django "helpfully"
# adds a dummy database and then throws when you try to
# actually use it. So we don't do anything, because trying to
# export stats would crash the app on startup.
return
for alias in connections.databases:
executor = MigrationExecutor(connections[alias])
ExportMigrationsForDatabase(alias, executor)
|
746c06ba70cd2854a86ea8bc45fc8e3e6192f67c
|
app.py
|
app.py
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
Add hashtag for currency name and symbol
|
Add hashtag for currency name and symbol
|
Python
|
mit
|
erickgnavar/coinstats
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
Add hashtag for currency name and symbol
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
<commit_before># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
<commit_msg>Add hashtag for currency name and symbol <commit_after>
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
# coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
Add hashtag for currency name and symbol # coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
<commit_before># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
<commit_msg>Add hashtag for currency name and symbol <commit_after># coding: utf-8
import os
import time
from twython import Twython
import requests
APP_KEY = os.environ.get('APP_KEY')
APP_SECRET = os.environ.get('APP_SECRET')
OAUTH_TOKEN = os.environ.get('OAUTH_TOKEN')
OAUTH_TOKEN_SECRET = os.environ.get('OAUTH_TOKEN_SECRET')
twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
def post_tweet(currency):
template = """
{name} - {symbol}
Price: ${price_usd}
Change in 1h: {percent_change_1h}%
Market cap: ${market_cap_usd}
Ranking: {rank}
#{name} #{symbol}
"""
if currency['percent_change_1h'] > 0:
currency['percent_change_1h'] = '+{}'.format(currency['percent_change_1h'])
twitter.update_status(status=template.format(**currency))
def main():
response = requests.get('https://api.coinmarketcap.com/v1/ticker/')
for currency in sorted(response.json(), key=lambda x: x['rank'])[:10]:
post_tweet(currency)
time.sleep(5)
if __name__ == '__main__':
main()
|
8c17e811dd9bf2a2b1c815fefb661260f624e83f
|
app.py
|
app.py
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db'))
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
HOST= '0.0.0.0'
PORT = int(os.environ.get('PORT', 5000))
MONGO_URL = os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db')
DEBUG = True
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(MONGO_URL)
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
app.run(host=HOST, port=PORT, debug=DEBUG)
|
Move configuration variables to constants.
|
Move configuration variables to constants.
|
Python
|
apache-2.0
|
lnickers2004/mongo-web-shell,pilliq/mongo-web-shell,mongodb-labs/mongo-web-shell,pilliq/mongo-web-shell,lnickers2004/mongo-web-shell,10gen-labs/mongo-web-shell,ecbtln/mongo-web-shell,rcchan/mongo-web-shell,FuegoFro/mongo-web-shell,mcomella/mongo-web-shell,mcomella/mongo-web-shell,xl76/mongo-web-shell,10gen-labs/mongo-web-shell,10gen-labs/mongo-web-shell,FuegoFro/mongo-web-shell,lnickers2004/mongo-web-shell,xl76/mongo-web-shell,mongodb-labs/mongo-web-shell,FuegoFro/mongo-web-shell,rcchan/mongo-web-shell,rcchan/mongo-web-shell,mcomella/mongo-web-shell,lnickers2004/mongo-web-shell,ecbtln/mongo-web-shell,pilliq/mongo-web-shell,ecbtln/mongo-web-shell,mongodb-labs/mongo-web-shell
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db'))
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
Move configuration variables to constants.
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
HOST= '0.0.0.0'
PORT = int(os.environ.get('PORT', 5000))
MONGO_URL = os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db')
DEBUG = True
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(MONGO_URL)
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
app.run(host=HOST, port=PORT, debug=DEBUG)
|
<commit_before>from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db'))
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
<commit_msg>Move configuration variables to constants.<commit_after>
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
HOST= '0.0.0.0'
PORT = int(os.environ.get('PORT', 5000))
MONGO_URL = os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db')
DEBUG = True
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(MONGO_URL)
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
app.run(host=HOST, port=PORT, debug=DEBUG)
|
from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db'))
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
Move configuration variables to constants.from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
HOST= '0.0.0.0'
PORT = int(os.environ.get('PORT', 5000))
MONGO_URL = os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db')
DEBUG = True
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(MONGO_URL)
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
app.run(host=HOST, port=PORT, debug=DEBUG)
|
<commit_before>from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db'))
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
<commit_msg>Move configuration variables to constants.<commit_after>from urlparse import urlparse
import logging
import os
from flask import Flask
import pymongo
HOST= '0.0.0.0'
PORT = int(os.environ.get('PORT', 5000))
MONGO_URL = os.environ.get('MONGOHQ_URL', 'http://localhost:27017/db')
DEBUG = True
app = Flask(__name__)
_logger = logging.getLogger(__name__)
db = None
def get_connection():
global db
if db:
return db
config = urlparse(MONGO_URL)
db_name = config.path.rpartition('/')[2]
connection = pymongo.Connection(config.hostname, config.port)
db = connection[db_name]
if config.username:
db.authenticate(config.username, config.password)
return db
@app.route('/')
def hello():
db = get_connection()
emptyset = db.some_collection.find()
return 'Hello World! {0}'.format(emptyset.count())
if __name__ == '__main__':
app.run(host=HOST, port=PORT, debug=DEBUG)
|
07c40f2c47c81843c8fd183f8fad7e489fb2d814
|
sirius/LI_V00/record_names.py
|
sirius/LI_V00/record_names.py
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {'LIPA-MODE':{}}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
Change linac mode pv to fake pvs
|
Change linac mode pv to fake pvs
|
Python
|
mit
|
lnls-fac/sirius
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {'LIPA-MODE':{}}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
Change linac mode pv to fake pvs
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
<commit_before>
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {'LIPA-MODE':{}}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
<commit_msg>Change linac mode pv to fake pvs<commit_after>
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {'LIPA-MODE':{}}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
Change linac mode pv to fake pvs
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
<commit_before>
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {'LIPA-MODE':{}}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
<commit_msg>Change linac mode pv to fake pvs<commit_after>
from . import families as _families
def get_record_names(subsystem=None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
_dict = {}
return _dict
def get_family_names(family=None, prefix=''):
_dict = {}
return _dict
def get_element_names(element=None, prefix=''):
_dict = {}
return _dict
def get_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
def get_pulsed_magnet_names():
# return get_record_names('boma')
_dict = {}
return _dict
|
26c725d3e6b1d5737a0efcbcd2371ff066a13a86
|
tests/test_utils.py
|
tests/test_utils.py
|
from expert_tourist.utils import gmaps_url_to_coords
from tests.tests import BaseTestConfig
class TestUtils(BaseTestConfig):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
from unittest import TestCase
from expert_tourist.utils import gmaps_url_to_coords
class TestUtils(TestCase):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
Refactor test to implement 87ceac3 changes
|
Refactor test to implement 87ceac3 changes
|
Python
|
mit
|
richin13/expert-tourist
|
from expert_tourist.utils import gmaps_url_to_coords
from tests.tests import BaseTestConfig
class TestUtils(BaseTestConfig):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
Refactor test to implement 87ceac3 changes
|
from unittest import TestCase
from expert_tourist.utils import gmaps_url_to_coords
class TestUtils(TestCase):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
<commit_before>from expert_tourist.utils import gmaps_url_to_coords
from tests.tests import BaseTestConfig
class TestUtils(BaseTestConfig):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
<commit_msg>Refactor test to implement 87ceac3 changes<commit_after>
|
from unittest import TestCase
from expert_tourist.utils import gmaps_url_to_coords
class TestUtils(TestCase):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
from expert_tourist.utils import gmaps_url_to_coords
from tests.tests import BaseTestConfig
class TestUtils(BaseTestConfig):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
Refactor test to implement 87ceac3 changesfrom unittest import TestCase
from expert_tourist.utils import gmaps_url_to_coords
class TestUtils(TestCase):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
<commit_before>from expert_tourist.utils import gmaps_url_to_coords
from tests.tests import BaseTestConfig
class TestUtils(BaseTestConfig):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
<commit_msg>Refactor test to implement 87ceac3 changes<commit_after>from unittest import TestCase
from expert_tourist.utils import gmaps_url_to_coords
class TestUtils(TestCase):
def test_url_to_coords(self):
url = 'http://maps.google.co.cr/maps?q=9.8757875656828,-84.03733452782035'
lat, long = gmaps_url_to_coords(url)
self.assertEqual(lat, 9.8757875656828)
self.assertEqual(long, -84.03733452782035)
|
b2aba5cbdfbe59fee7bc595298a732c9aa1f9b51
|
tests/test_utils.py
|
tests/test_utils.py
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
with assert_raises(GraphAPI.FacebookError):
get_application_access_token('<application id>', '<application secret key>')
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError, get_application_access_token,
'<application id>', '<application secret key>'
)
|
Make test_get_application_access_token_raises_error compatible with Python < 2.7
|
Make test_get_application_access_token_raises_error compatible with Python < 2.7
|
Python
|
mit
|
merwok-forks/facepy,jwjohns/facepy,jgorset/facepy,Spockuto/facepy,liorshahverdi/facepy,jwjohns/facepy,buzzfeed/facepy
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
with assert_raises(GraphAPI.FacebookError):
get_application_access_token('<application id>', '<application secret key>')
Make test_get_application_access_token_raises_error compatible with Python < 2.7
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError, get_application_access_token,
'<application id>', '<application secret key>'
)
|
<commit_before>"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
with assert_raises(GraphAPI.FacebookError):
get_application_access_token('<application id>', '<application secret key>')
<commit_msg>Make test_get_application_access_token_raises_error compatible with Python < 2.7<commit_after>
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError, get_application_access_token,
'<application id>', '<application secret key>'
)
|
"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
with assert_raises(GraphAPI.FacebookError):
get_application_access_token('<application id>', '<application secret key>')
Make test_get_application_access_token_raises_error compatible with Python < 2.7"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError, get_application_access_token,
'<application id>', '<application secret key>'
)
|
<commit_before>"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
with assert_raises(GraphAPI.FacebookError):
get_application_access_token('<application id>', '<application secret key>')
<commit_msg>Make test_get_application_access_token_raises_error compatible with Python < 2.7<commit_after>"""Tests for the ``utils`` module."""
from mock import patch, Mock as mock
from nose.tools import *
from facepy import *
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.content = 'access_token=...'
access_token = get_application_access_token('<application id>', '<application secret key>')
mock_request.assert_called_with('GET', 'https://graph.facebook.com/oauth/access_token',
allow_redirects = True,
params = {
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert access_token == '...'
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError, get_application_access_token,
'<application id>', '<application secret key>'
)
|
34e2cf61bf686542f21bc8d840f17b13ca137fe3
|
Main.py
|
Main.py
|
"""Main Module of PDF Splitter"""
import argparse
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
map(lambda f: f.close, opened_files)
|
"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
|
Use flush and fsync to ensure data is written to disk
|
Use flush and fsync to ensure data is written to disk
|
Python
|
mit
|
shunghsiyu/pdf-processor
|
"""Main Module of PDF Splitter"""
import argparse
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
map(lambda f: f.close, opened_files)
Use flush and fsync to ensure data is written to disk
|
"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
|
<commit_before>"""Main Module of PDF Splitter"""
import argparse
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
map(lambda f: f.close, opened_files)
<commit_msg>Use flush and fsync to ensure data is written to disk<commit_after>
|
"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
|
"""Main Module of PDF Splitter"""
import argparse
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
map(lambda f: f.close, opened_files)
Use flush and fsync to ensure data is written to disk"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
|
<commit_before>"""Main Module of PDF Splitter"""
import argparse
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
map(lambda f: f.close, opened_files)
<commit_msg>Use flush and fsync to ensure data is written to disk<commit_after>"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on_condition, concat_pdf_pages
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def width_greater_than_height(page):
box = page.mediaBox
return box.getWidth() > box.getHeight()
if __name__ == '__main__':
args = parser.parse_args()
all_pdf_files = all_pdf_files_in_directory(args.directory)
opened_files = map(lambda path: open(path, 'rb'), all_pdf_files)
all_pages = concat_pdf_pages(opened_files)
for idx, pages in enumerate(split_on_condition(all_pages, predicate=width_greater_than_height), start=1):
pdf_writer = PdfFileWriter()
map(pdf_writer.addPage, pages)
output_filename = '{0:05}.pdf'.format(idx)
with open(output_filename, 'wb') as output_file:
pdf_writer.write(output_file)
output_file.flush()
os.fsync(output_file.fileno())
map(lambda f: f.close, opened_files)
|
727f1de69a3f2211d97a2c5ed412eacb90d11619
|
cov/templatetags/cov_tag.py
|
cov/templatetags/cov_tag.py
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
list_cov = Covoiturage.objects.select_related('poster').all().filter(good_until__gte=timezone.now()).order_by('good_until')[:nb]
print list_cov
ctx_data = {
'cov' : list_cov,
'request': context['request'],
}
return ctx_data
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
return {
'cov' : Covoiturage.objects.select_related('poster').filter(
good_until__gte=timezone.now()
).order_by('good_until')[:nb],
'request': context['request'],
}
|
Fix : cov list is fetch in one queries (before it was fetched twice)
|
Fix : cov list is fetch in one queries (before it was fetched twice)
|
Python
|
agpl-3.0
|
rezometz/paiji2,rezometz/paiji2,rezometz/paiji2
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
list_cov = Covoiturage.objects.select_related('poster').all().filter(good_until__gte=timezone.now()).order_by('good_until')[:nb]
print list_cov
ctx_data = {
'cov' : list_cov,
'request': context['request'],
}
return ctx_data
Fix : cov list is fetch in one queries (before it was fetched twice)
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
return {
'cov' : Covoiturage.objects.select_related('poster').filter(
good_until__gte=timezone.now()
).order_by('good_until')[:nb],
'request': context['request'],
}
|
<commit_before>from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
list_cov = Covoiturage.objects.select_related('poster').all().filter(good_until__gte=timezone.now()).order_by('good_until')[:nb]
print list_cov
ctx_data = {
'cov' : list_cov,
'request': context['request'],
}
return ctx_data
<commit_msg>Fix : cov list is fetch in one queries (before it was fetched twice)<commit_after>
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
return {
'cov' : Covoiturage.objects.select_related('poster').filter(
good_until__gte=timezone.now()
).order_by('good_until')[:nb],
'request': context['request'],
}
|
from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
list_cov = Covoiturage.objects.select_related('poster').all().filter(good_until__gte=timezone.now()).order_by('good_until')[:nb]
print list_cov
ctx_data = {
'cov' : list_cov,
'request': context['request'],
}
return ctx_data
Fix : cov list is fetch in one queries (before it was fetched twice)from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
return {
'cov' : Covoiturage.objects.select_related('poster').filter(
good_until__gte=timezone.now()
).order_by('good_until')[:nb],
'request': context['request'],
}
|
<commit_before>from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
list_cov = Covoiturage.objects.select_related('poster').all().filter(good_until__gte=timezone.now()).order_by('good_until')[:nb]
print list_cov
ctx_data = {
'cov' : list_cov,
'request': context['request'],
}
return ctx_data
<commit_msg>Fix : cov list is fetch in one queries (before it was fetched twice)<commit_after>from django import template
from ..models import Covoiturage
from django.utils import timezone
register = template.Library()
@register.inclusion_tag('cov/cov_block.html', takes_context=True)
def get_cov(context, nb):
return {
'cov' : Covoiturage.objects.select_related('poster').filter(
good_until__gte=timezone.now()
).order_by('good_until')[:nb],
'request': context['request'],
}
|
c1dfbc8e8b3ae29436c584d906636ea541dfb6a8
|
apps/storybase_asset/embedable_resource/__init__.py
|
apps/storybase_asset/embedable_resource/__init__.py
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
Allow embedding of Google Docs by URL
|
Allow embedding of Google Docs by URL
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
Allow embedding of Google Docs by URL
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
<commit_before>import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
<commit_msg>Allow embedding of Google Docs by URL<commit_after>
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
Allow embedding of Google Docs by URLimport re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
<commit_before>import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
<commit_msg>Allow embedding of Google Docs by URL<commit_after>import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
8f7623c4b09d85c09327c37030fa2328e77853b1
|
qfbv.py
|
qfbv.py
|
from config import *
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
|
from config import *
import re
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
def read_variable(self, expr1):
string, = expr1
regex = "round_([0-9]+)_match_([0-9]+)_slot_([0-9]+)"
result = re.match(regex, string)
return int(result.group(1)), int(result.group(2)), int(result.group(3))
|
Read variable data for qf_bv
|
Read variable data for qf_bv
|
Python
|
bsd-2-clause
|
jmorse/numbness
|
from config import *
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
Read variable data for qf_bv
|
from config import *
import re
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
def read_variable(self, expr1):
string, = expr1
regex = "round_([0-9]+)_match_([0-9]+)_slot_([0-9]+)"
result = re.match(regex, string)
return int(result.group(1)), int(result.group(2)), int(result.group(3))
|
<commit_before>from config import *
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
<commit_msg>Read variable data for qf_bv<commit_after>
|
from config import *
import re
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
def read_variable(self, expr1):
string, = expr1
regex = "round_([0-9]+)_match_([0-9]+)_slot_([0-9]+)"
result = re.match(regex, string)
return int(result.group(1)), int(result.group(2)), int(result.group(3))
|
from config import *
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
Read variable data for qf_bvfrom config import *
import re
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
def read_variable(self, expr1):
string, = expr1
regex = "round_([0-9]+)_match_([0-9]+)_slot_([0-9]+)"
result = re.match(regex, string)
return int(result.group(1)), int(result.group(2)), int(result.group(3))
|
<commit_before>from config import *
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
<commit_msg>Read variable data for qf_bv<commit_after>from config import *
import re
class QFBV:
def preamble(self):
print "(set-logic QF_BV)"
print ""
# Enumerate all the variables, for each match round.
for i in range(NUMROUNDS):
for j in range(NUMMATCHES):
for k in range(NUMSLOTS):
print "(declare-fun {0} () (_ BitVec {1}))".format(self.project(i, j, k), TEAMBITS)
def project(self, x, y, z):
return "round_{0}_match_{1}_slot_{2}".format(x, y, z)
pass
def read_variable(self, expr1):
string, = expr1
regex = "round_([0-9]+)_match_([0-9]+)_slot_([0-9]+)"
result = re.match(regex, string)
return int(result.group(1)), int(result.group(2)), int(result.group(3))
|
626c74d727140646d6123e2d86a828401d87abe0
|
spam.py
|
spam.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import Preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled labeled
unlabeled_path, labeled_path, \
unlabeled_class, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# Preprocess
preprocess = Preprocess()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled and labeled data
unlabeled_path, labeled_path, \
_, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# split data into train and test data
train_path, test_path, \
train_class, test_class = train_test_split(
labeled_path,
labeled_class,
test_size=0.2,
random_state=0,
)
unlabeled_data = pd.DataFrame(
data=[preprocess.read_email(path) for path in unlabeled_path],
columns=['email'],
)
train_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in train_path],
'class': [1 if cl == 'spam' else 0 for cl in train_class]
},
columns=['email', 'class'],
)
test_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in test_path],
'class': [1 if cl == 'spam' else 0 for cl in test_class]
},
columns=['email', 'class', 'class2'],
)
unlabeled_data.to_csv('unlabel_data.csv')
train_data.to_csv('train_data.csv')
test_data.to_csv('test_data.csv')
|
Add pandas to generate csv.
|
Add pandas to generate csv.
|
Python
|
mit
|
benigls/spam,benigls/spam
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import Preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled labeled
unlabeled_path, labeled_path, \
unlabeled_class, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# Preprocess
preprocess = Preprocess()
Add pandas to generate csv.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled and labeled data
unlabeled_path, labeled_path, \
_, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# split data into train and test data
train_path, test_path, \
train_class, test_class = train_test_split(
labeled_path,
labeled_class,
test_size=0.2,
random_state=0,
)
unlabeled_data = pd.DataFrame(
data=[preprocess.read_email(path) for path in unlabeled_path],
columns=['email'],
)
train_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in train_path],
'class': [1 if cl == 'spam' else 0 for cl in train_class]
},
columns=['email', 'class'],
)
test_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in test_path],
'class': [1 if cl == 'spam' else 0 for cl in test_class]
},
columns=['email', 'class', 'class2'],
)
unlabeled_data.to_csv('unlabel_data.csv')
train_data.to_csv('train_data.csv')
test_data.to_csv('test_data.csv')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import Preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled labeled
unlabeled_path, labeled_path, \
unlabeled_class, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# Preprocess
preprocess = Preprocess()
<commit_msg>Add pandas to generate csv.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled and labeled data
unlabeled_path, labeled_path, \
_, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# split data into train and test data
train_path, test_path, \
train_class, test_class = train_test_split(
labeled_path,
labeled_class,
test_size=0.2,
random_state=0,
)
unlabeled_data = pd.DataFrame(
data=[preprocess.read_email(path) for path in unlabeled_path],
columns=['email'],
)
train_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in train_path],
'class': [1 if cl == 'spam' else 0 for cl in train_class]
},
columns=['email', 'class'],
)
test_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in test_path],
'class': [1 if cl == 'spam' else 0 for cl in test_class]
},
columns=['email', 'class', 'class2'],
)
unlabeled_data.to_csv('unlabel_data.csv')
train_data.to_csv('train_data.csv')
test_data.to_csv('test_data.csv')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import Preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled labeled
unlabeled_path, labeled_path, \
unlabeled_class, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# Preprocess
preprocess = Preprocess()
Add pandas to generate csv.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled and labeled data
unlabeled_path, labeled_path, \
_, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# split data into train and test data
train_path, test_path, \
train_class, test_class = train_test_split(
labeled_path,
labeled_class,
test_size=0.2,
random_state=0,
)
unlabeled_data = pd.DataFrame(
data=[preprocess.read_email(path) for path in unlabeled_path],
columns=['email'],
)
train_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in train_path],
'class': [1 if cl == 'spam' else 0 for cl in train_class]
},
columns=['email', 'class'],
)
test_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in test_path],
'class': [1 if cl == 'spam' else 0 for cl in test_class]
},
columns=['email', 'class', 'class2'],
)
unlabeled_data.to_csv('unlabel_data.csv')
train_data.to_csv('train_data.csv')
test_data.to_csv('test_data.csv')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import Preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled labeled
unlabeled_path, labeled_path, \
unlabeled_class, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# Preprocess
preprocess = Preprocess()
<commit_msg>Add pandas to generate csv.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.cross_validation import train_test_split
from spam.common import DATASET_META
from spam.common.utils import get_file_path_list
from spam.preprocess import preprocess
file_path_list = get_file_path_list(DATASET_META)
# transform list of tuple into two list
# e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam']
path, classification = zip(*file_path_list)
# split the data into unlabeled and labeled data
unlabeled_path, labeled_path, \
_, labeled_class = train_test_split(
path,
classification,
test_size=0.1,
random_state=0,
)
# split data into train and test data
train_path, test_path, \
train_class, test_class = train_test_split(
labeled_path,
labeled_class,
test_size=0.2,
random_state=0,
)
unlabeled_data = pd.DataFrame(
data=[preprocess.read_email(path) for path in unlabeled_path],
columns=['email'],
)
train_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in train_path],
'class': [1 if cl == 'spam' else 0 for cl in train_class]
},
columns=['email', 'class'],
)
test_data = pd.DataFrame(
data={
'email': [preprocess.read_email(path) for path in test_path],
'class': [1 if cl == 'spam' else 0 for cl in test_class]
},
columns=['email', 'class', 'class2'],
)
unlabeled_data.to_csv('unlabel_data.csv')
train_data.to_csv('train_data.csv')
test_data.to_csv('test_data.csv')
|
987eb13b24fcb6b89b9bbe08a9bc73f40b85538c
|
onirim/card/_door.py
|
onirim/card/_door.py
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
# TODO discard that key card.
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _is_openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_is_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
for card in content.hand:
if _openable(self, card):
content.hand.remove(card)
break
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
Remove key while opening a door
|
Remove key while opening a door
|
Python
|
mit
|
cwahbong/onirim-py
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
# TODO discard that key card.
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
Remove key while opening a door
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _is_openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_is_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
for card in content.hand:
if _openable(self, card):
content.hand.remove(card)
break
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
<commit_before>from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
# TODO discard that key card.
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
<commit_msg>Remove key while opening a door<commit_after>
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _is_openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_is_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
for card in content.hand:
if _openable(self, card):
content.hand.remove(card)
break
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
# TODO discard that key card.
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
Remove key while opening a doorfrom onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _is_openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_is_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
for card in content.hand:
if _openable(self, card):
content.hand.remove(card)
break
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
<commit_before>from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
# TODO discard that key card.
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
<commit_msg>Remove key while opening a door<commit_after>from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _is_openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_is_openable(door_card, card) for card in content.hand)
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.open_door(content, self) if _may_open(self, content) else False
if do_open:
content.opened.append(self)
for card in content.hand:
if _openable(self, card):
content.hand.remove(card)
break
else:
content.deck.put_limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
d7343ac93dbfb62b3f425711b8df9929a1d6f1ad
|
malcolm/core/mapmeta.py
|
malcolm/core/mapmeta.py
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""A meta object to store a set of attribute metas"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict({})
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""An object containing a set of AttributeMeta objects"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict()
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
Remove uneccesary dict definition, clarify doc string
|
Remove uneccesary dict definition, clarify doc string
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""A meta object to store a set of attribute metas"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict({})
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
Remove uneccesary dict definition, clarify doc string
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""An object containing a set of AttributeMeta objects"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict()
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
<commit_before>from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""A meta object to store a set of attribute metas"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict({})
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
<commit_msg>Remove uneccesary dict definition, clarify doc string<commit_after>
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""An object containing a set of AttributeMeta objects"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict()
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""A meta object to store a set of attribute metas"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict({})
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
Remove uneccesary dict definition, clarify doc stringfrom collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""An object containing a set of AttributeMeta objects"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict()
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
<commit_before>from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""A meta object to store a set of attribute metas"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict({})
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
<commit_msg>Remove uneccesary dict definition, clarify doc string<commit_after>from collections import OrderedDict
from loggable import Loggable
from malcolm.core.attributemeta import AttributeMeta
class MapMeta(Loggable):
"""An object containing a set of AttributeMeta objects"""
def __init__(self, name):
super(MapMeta, self).__init__(logger_name=name)
self.name = name
self.elements = OrderedDict()
def add_element(self, attribute_meta, required=False):
"""
Add an element, stating whether it is required.
Args:
attribute_meta(AttributeMeta): Attribute instance to store
required(bool): Whether attribute is required or optional
"""
self.elements[attribute_meta.name] = (attribute_meta, required)
|
8e2187d2519b4008a36f5c85910b7e7e2efc16f9
|
braid/config.py
|
braid/config.py
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
#FIXME: How to handle module level initialization here?
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
env['environment'] = envName
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
Put environment name into environment.
|
Put environment name into environment.
|
Python
|
mit
|
alex/braid,alex/braid
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
Put environment name into environment.
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
#FIXME: How to handle module level initialization here?
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
env['environment'] = envName
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
<commit_before>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
<commit_msg>Put environment name into environment.<commit_after>
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
#FIXME: How to handle module level initialization here?
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
env['environment'] = envName
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
Put environment name into environment."""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
#FIXME: How to handle module level initialization here?
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
env['environment'] = envName
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
<commit_before>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
<commit_msg>Put environment name into environment.<commit_after>"""
Support for multiple environments based on python configuration files.
"""
from __future__ import print_function, absolute_import
import os
from twisted.python.filepath import FilePath
from fabric.api import env, task
from braid.settings import ENVIRONMENTS
CONFIG_DIRS = [
'~/.config/braid',
]
#FIXME: How to handle module level initialization here?
def loadEnvironmentConfig(envFile):
"""
Loads configuration directives for the specified environment into Fabric's
C{env} variable.
This function tries to load a python module from each specified directory
and stores all of its public uppercase attributes as attributes of Fabric's
environment (all attribute names will be lowercased).
"""
envName = os.path.splitext(envFile.basename())[0]
ENVIRONMENTS.setdefault(envName, {})
glob = { '__file__': envFile.path }
exec envFile.getContent() in glob
ENVIRONMENTS[envName].update(glob['ENVIRONMENT'])
def loadEnvironments(directories=CONFIG_DIRS):
for directory in directories:
confDir = FilePath(os.path.expanduser(directory))
for envFile in confDir.globChildren('*.env'):
loadEnvironmentConfig(envFile)
loadEnvironments()
def environment(envName):
"""
Load the passed environment configuration.
This task can be invoked before executing the desired Fabric action.
"""
env.update(ENVIRONMENTS[envName])
env['environment'] = envName
for envName in ENVIRONMENTS:
globals()[envName] = task(name=envName)(lambda: environment(envName))
|
6ac0598982f90b23d772d6b3cba802ad5fad5459
|
watchman/management/commands/watchman.py
|
watchman/management/commands/watchman.py
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
print resp
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
self.stdout.write(resp)
|
Use `self.stdout.write` instead of `print`
|
Use `self.stdout.write` instead of `print`
|
Python
|
bsd-3-clause
|
JBKahn/django-watchman,blag/django-watchman,mwarkentin/django-watchman,blag/django-watchman,JBKahn/django-watchman,ulope/django-watchman,mwarkentin/django-watchman,gerlachry/django-watchman,gerlachry/django-watchman,ulope/django-watchman
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
print resp
Use `self.stdout.write` instead of `print`
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
self.stdout.write(resp)
|
<commit_before>from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
print resp
<commit_msg>Use `self.stdout.write` instead of `print`<commit_after>
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
self.stdout.write(resp)
|
from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
print resp
Use `self.stdout.write` instead of `print`from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
self.stdout.write(resp)
|
<commit_before>from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
print resp
<commit_msg>Use `self.stdout.write` instead of `print`<commit_after>from __future__ import absolute_import
import json
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from watchman.utils import get_checks
class Command(BaseCommand):
help = 'Runs the default django-watchman checks'
option_list = BaseCommand.option_list + (
make_option(
'-c',
'--checks',
dest='checks',
help='A comma-separated list of watchman checks to run (full python dotted paths)'
),
make_option(
'-s',
'--skips',
dest='skips',
help='A comma-separated list of watchman checks to skip (full python dotted paths)'
),
)
def handle(self, *args, **options):
check_list = None
skip_list = None
verbosity = options['verbosity']
print_all_checks = verbosity == '2' or verbosity == '3'
checks = options['checks']
skips = options['skips']
if checks is not None:
check_list = checks.split(',')
if skips is not None:
skip_list = skips.split(',')
for check in get_checks(check_list=check_list, skip_list=skip_list):
if callable(check):
resp = json.dumps(check())
if '"ok": false' in resp:
raise CommandError(resp)
elif print_all_checks:
self.stdout.write(resp)
|
5d5bbcd380300c5fd786fac59bf360a287b99c3b
|
oauthenticator/__init__.py
|
oauthenticator/__init__.py
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from ._version import __version__, version_info
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from .generic import *
from ._version import __version__, version_info
|
Add the GenericHandler to the init file
|
Add the GenericHandler to the init file
|
Python
|
bsd-3-clause
|
santi81/oauthenticator,santi81/oauthenticator
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from ._version import __version__, version_info
Add the GenericHandler to the init file
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from .generic import *
from ._version import __version__, version_info
|
<commit_before># include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from ._version import __version__, version_info
<commit_msg>Add the GenericHandler to the init file<commit_after>
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from .generic import *
from ._version import __version__, version_info
|
# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from ._version import __version__, version_info
Add the GenericHandler to the init file# include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from .generic import *
from ._version import __version__, version_info
|
<commit_before># include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from ._version import __version__, version_info
<commit_msg>Add the GenericHandler to the init file<commit_after># include github, bitbucket, google here for backward-compatibility
# don't add new oauthenticators here.
from .oauth2 import *
from .github import *
from .bitbucket import *
from .google import *
from .generic import *
from ._version import __version__, version_info
|
05cf5f3729ffbceeb2436322b2aac5285d7228de
|
wsgi.py
|
wsgi.py
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webapp
application = webapp.create_app()
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import config
import webapp
application = webapp.create_app()
if config.REGISTRATION_IS_OPEN:
print(" * Registration is OPEN")
else:
print(" * Registration is NOT OPEN: pregistration code is '%s'" %
application.config['PREREGISTRATION_CODE'])
|
Print registration code in WSGI app.
|
Print registration code in WSGI app.
Otherwise, how will we know what it is?
|
Python
|
bsd-2-clause
|
trombonehero/nerf-herder,trombonehero/nerf-herder,trombonehero/nerf-herder
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webapp
application = webapp.create_app()
Print registration code in WSGI app.
Otherwise, how will we know what it is?
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import config
import webapp
application = webapp.create_app()
if config.REGISTRATION_IS_OPEN:
print(" * Registration is OPEN")
else:
print(" * Registration is NOT OPEN: pregistration code is '%s'" %
application.config['PREREGISTRATION_CODE'])
|
<commit_before># Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webapp
application = webapp.create_app()
<commit_msg>Print registration code in WSGI app.
Otherwise, how will we know what it is?<commit_after>
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import config
import webapp
application = webapp.create_app()
if config.REGISTRATION_IS_OPEN:
print(" * Registration is OPEN")
else:
print(" * Registration is NOT OPEN: pregistration code is '%s'" %
application.config['PREREGISTRATION_CODE'])
|
# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webapp
application = webapp.create_app()
Print registration code in WSGI app.
Otherwise, how will we know what it is?# Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import config
import webapp
application = webapp.create_app()
if config.REGISTRATION_IS_OPEN:
print(" * Registration is OPEN")
else:
print(" * Registration is NOT OPEN: pregistration code is '%s'" %
application.config['PREREGISTRATION_CODE'])
|
<commit_before># Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webapp
application = webapp.create_app()
<commit_msg>Print registration code in WSGI app.
Otherwise, how will we know what it is?<commit_after># Copyright 2017 Jonathan Anderson
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import config
import webapp
application = webapp.create_app()
if config.REGISTRATION_IS_OPEN:
print(" * Registration is OPEN")
else:
print(" * Registration is NOT OPEN: pregistration code is '%s'" %
application.config['PREREGISTRATION_CODE'])
|
beb06f3377a5e3e52f5756a1ecbf4197c7a3e99e
|
base/components/correlations/managers.py
|
base/components/correlations/managers.py
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
Remove the Membership special case. We want everything correlated.
|
Remove the Membership special case. We want everything correlated.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
Remove the Membership special case. We want everything correlated.
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
<commit_before># -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
<commit_msg>Remove the Membership special case. We want everything correlated.<commit_after>
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
Remove the Membership special case. We want everything correlated.# -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
<commit_before># -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
<commit_msg>Remove the Membership special case. We want everything correlated.<commit_after># -*- coding: utf-8 -*-
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
1025121c19af4d1ea224abc8f37120cb9f24210d
|
Scripts/RoboFabUFO/ImportFontFromUFO.py
|
Scripts/RoboFabUFO/ImportFontFromUFO.py
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFolder
path = GetFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFileOrFolder
path = GetFileOrFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
Use GetFileOrFolder for the dialog.
|
Use GetFileOrFolder for the dialog.
git-svn-id: 68c5a305180a392494b23cb56ff711ec9d5bf0e2@493 b5fa9d6c-a76f-4ffd-b3cb-f825fc41095c
|
Python
|
bsd-3-clause
|
jamesgk/robofab,daltonmaag/robofab,bitforks/robofab,schriftgestalt/robofab,moyogo/robofab,schriftgestalt/robofab,miguelsousa/robofab,anthrotype/robofab,daltonmaag/robofab,anthrotype/robofab,jamesgk/robofab,miguelsousa/robofab,moyogo/robofab
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFolder
path = GetFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
Use GetFileOrFolder for the dialog.
git-svn-id: 68c5a305180a392494b23cb56ff711ec9d5bf0e2@493 b5fa9d6c-a76f-4ffd-b3cb-f825fc41095c
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFileOrFolder
path = GetFileOrFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
<commit_before>#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFolder
path = GetFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
<commit_msg>Use GetFileOrFolder for the dialog.
git-svn-id: 68c5a305180a392494b23cb56ff711ec9d5bf0e2@493 b5fa9d6c-a76f-4ffd-b3cb-f825fc41095c<commit_after>
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFileOrFolder
path = GetFileOrFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFolder
path = GetFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
Use GetFileOrFolder for the dialog.
git-svn-id: 68c5a305180a392494b23cb56ff711ec9d5bf0e2@493 b5fa9d6c-a76f-4ffd-b3cb-f825fc41095c#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFileOrFolder
path = GetFileOrFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
<commit_before>#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFolder
path = GetFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
<commit_msg>Use GetFileOrFolder for the dialog.
git-svn-id: 68c5a305180a392494b23cb56ff711ec9d5bf0e2@493 b5fa9d6c-a76f-4ffd-b3cb-f825fc41095c<commit_after>#FLM: Import .ufo File into FontLab
from robofab.world import NewFont
from robofab.interface.all.dialogs import GetFileOrFolder
path = GetFileOrFolder("Please select a .ufo")
if path is not None:
font = NewFont()
font.readUFO(path, doProgress=True)
font.update()
print 'DONE!'
|
7eb83427b8134d5fa51357371e95b398d95a5b96
|
dayonetools/services/__init__.py
|
dayonetools/services/__init__.py
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=0,
hour=0,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=00,
hour=10,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
Add hack for broken timezone support in dayone
|
Add hack for broken timezone support in dayone
|
Python
|
mit
|
durden/dayonetools
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=0,
hour=0,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
Add hack for broken timezone support in dayone
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=00,
hour=10,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
<commit_before>"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=0,
hour=0,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
<commit_msg>Add hack for broken timezone support in dayone<commit_after>
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=00,
hour=10,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=0,
hour=0,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
Add hack for broken timezone support in dayone"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=00,
hour=10,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
<commit_before>"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=0,
hour=0,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
<commit_msg>Add hack for broken timezone support in dayone<commit_after>"""Common services code"""
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus']
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601
The timestamp will match midnight but year, month, and day will be replaced
with given arguments.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=00,
hour=10,
second=0,
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
4a5e6373692798eb4d48c294d3262c93902b27de
|
zou/app/blueprints/crud/schedule_item.py
|
zou/app/blueprints/crud/schedule_item.py
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
from zou.app.services import user_service
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def check_update_permissions(self, instance, data):
return user_service.check_manager_project_access(instance["project_id"])
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
Allow schedule edition by supervisors
|
[schedule] Allow schedule edition by supervisors
|
Python
|
agpl-3.0
|
cgwire/zou
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
[schedule] Allow schedule edition by supervisors
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
from zou.app.services import user_service
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def check_update_permissions(self, instance, data):
return user_service.check_manager_project_access(instance["project_id"])
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
<commit_before>from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
<commit_msg>[schedule] Allow schedule edition by supervisors<commit_after>
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
from zou.app.services import user_service
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def check_update_permissions(self, instance, data):
return user_service.check_manager_project_access(instance["project_id"])
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
[schedule] Allow schedule edition by supervisorsfrom zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
from zou.app.services import user_service
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def check_update_permissions(self, instance, data):
return user_service.check_manager_project_access(instance["project_id"])
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
<commit_before>from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
<commit_msg>[schedule] Allow schedule edition by supervisors<commit_after>from zou.app.models.schedule_item import ScheduleItem
from .base import BaseModelResource, BaseModelsResource
from zou.app.services import user_service
class ScheduleItemsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, ScheduleItem)
class ScheduleItemResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, ScheduleItem)
def check_update_permissions(self, instance, data):
return user_service.check_manager_project_access(instance["project_id"])
def update_data(self, data, instance_id):
if isinstance(data.get("man_days", None), str):
data.pop("man_days", None)
for field in self.protected_fields:
data.pop(field, None)
return data
|
fb5bc5d789986df53844d8f0620db95f349e4096
|
sch/__init__.py
|
sch/__init__.py
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
from . sch import *
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
os.add_dll_directory("$<TARGET_FILE_DIR:Boost::serialization>")
from . sch import *
|
Add the Boost DLL directory to the search path as well
|
Add the Boost DLL directory to the search path as well
|
Python
|
bsd-2-clause
|
jrl-umi3218/sch-core-python,jrl-umi3218/sch-core-python
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
from . sch import *
Add the Boost DLL directory to the search path as well
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
os.add_dll_directory("$<TARGET_FILE_DIR:Boost::serialization>")
from . sch import *
|
<commit_before>import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
from . sch import *
<commit_msg>Add the Boost DLL directory to the search path as well<commit_after>
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
os.add_dll_directory("$<TARGET_FILE_DIR:Boost::serialization>")
from . sch import *
|
import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
from . sch import *
Add the Boost DLL directory to the search path as wellimport os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
os.add_dll_directory("$<TARGET_FILE_DIR:Boost::serialization>")
from . sch import *
|
<commit_before>import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
from . sch import *
<commit_msg>Add the Boost DLL directory to the search path as well<commit_after>import os
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory("$<TARGET_FILE_DIR:sch-core::sch-core>")
os.add_dll_directory("$<TARGET_FILE_DIR:Boost::serialization>")
from . sch import *
|
a2d6c32305577640bcd111fa1011bea61d7ca9e7
|
packages/mono-llvm-2-10.py
|
packages/mono-llvm-2-10.py
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Fix llvm so it doesn't corrupt the env when configuring itself
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
Fix llvm so it doesn't corrupt the env when configuring itself
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
<commit_before>GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
<commit_msg>Fix llvm so it doesn't corrupt the env when configuring itself<commit_after>
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
Fix llvm so it doesn't corrupt the env when configuring itselfGitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
<commit_before>GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
<commit_msg>Fix llvm so it doesn't corrupt the env when configuring itself<commit_after>GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
|
1343ca5b426132c0a9c916abd516a69ddd5f3aa4
|
cle/__init__.py
|
cle/__init__.py
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 0, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 1, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
Fix version tuple for 9.1
|
Fix version tuple for 9.1 [ci skip]
|
Python
|
bsd-2-clause
|
angr/cle
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 0, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
Fix version tuple for 9.1 [ci skip]
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 1, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
<commit_before>"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 0, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
<commit_msg>Fix version tuple for 9.1 [ci skip]<commit_after>
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 1, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 0, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
Fix version tuple for 9.1 [ci skip]"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 1, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
<commit_before>"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 0, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
<commit_msg>Fix version tuple for 9.1 [ci skip]<commit_after>"""
CLE is an extensible binary loader. Its main goal is to take an executable program and any libraries it depends on and
produce an address space where that program is loaded and ready to run.
The primary interface to CLE is the Loader class.
"""
__version__ = (9, 1, "gitrolling")
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import logging
logging.getLogger(name=__name__).addHandler(logging.NullHandler())
# pylint: disable=wildcard-import
from . import utils
from .loader import *
from .memory import *
from .errors import *
from .backends import *
from .backends.tls import *
from .backends.externs import *
from .patched_stream import *
from .gdb import *
|
6910fe840a2b54d7848adcaa032d4303aee0ceec
|
dedupe/convenience.py
|
dedupe/convenience.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
Index of the list should be an int
|
Index of the list should be an int
|
Python
|
mit
|
nmiranda/dedupe,davidkunio/dedupe,tfmorris/dedupe,datamade/dedupe,nmiranda/dedupe,01-/dedupe,pombredanne/dedupe,davidkunio/dedupe,dedupeio/dedupe,neozhangthe1/dedupe,neozhangthe1/dedupe,01-/dedupe,datamade/dedupe,pombredanne/dedupe,dedupeio/dedupe,dedupeio/dedupe-examples,tfmorris/dedupe
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Index of the list should be an int
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Index of the list should be an int<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
Index of the list should be an int#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
<commit_msg>Index of the list should be an int<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convenience functions for in memory deduplication
"""
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[int(k1)], data_list[int(k2)]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
1d3bd1fe50806180c8fb6889b1bed28f602608d6
|
couchdb/tests/__main__.py
|
couchdb/tests/__main__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools, \
loader
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
suite.addTest(loader.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Include loader tests in test suite
|
Include loader tests in test suite
|
Python
|
bsd-3-clause
|
djc/couchdb-python,djc/couchdb-python
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Include loader tests in test suite
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools, \
loader
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
suite.addTest(loader.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Include loader tests in test suite<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools, \
loader
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
suite.addTest(loader.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
Include loader tests in test suite# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools, \
loader
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
suite.addTest(loader.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
<commit_msg>Include loader tests in test suite<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
from couchdb.tests import client, couch_tests, design, couchhttp, \
multipart, mapping, view, package, tools, \
loader
def suite():
suite = unittest.TestSuite()
suite.addTest(client.suite())
suite.addTest(design.suite())
suite.addTest(couchhttp.suite())
suite.addTest(multipart.suite())
suite.addTest(mapping.suite())
suite.addTest(view.suite())
suite.addTest(couch_tests.suite())
suite.addTest(package.suite())
suite.addTest(tools.suite())
suite.addTest(loader.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
bf18c698596be9de094d94cdc52d95186fc37e6a
|
configReader.py
|
configReader.py
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keysFile
|
class ConfigReader():
def __init__(self,name="config.txt"):
self.keys={}
self.name = name
#Read Keys from file
def readKeys(self):
keysFile=open(self.name,"r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.find('=')
if pos != -1:
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keys
|
Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't work
|
Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't work
|
Python
|
mit
|
ollien/PyConfigReader
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keysFile
Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't work
|
class ConfigReader():
def __init__(self,name="config.txt"):
self.keys={}
self.name = name
#Read Keys from file
def readKeys(self):
keysFile=open(self.name,"r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.find('=')
if pos != -1:
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keys
|
<commit_before>class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keysFile
<commit_msg>Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't work<commit_after>
|
class ConfigReader():
def __init__(self,name="config.txt"):
self.keys={}
self.name = name
#Read Keys from file
def readKeys(self):
keysFile=open(self.name,"r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.find('=')
if pos != -1:
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keys
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keysFile
Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't workclass ConfigReader():
def __init__(self,name="config.txt"):
self.keys={}
self.name = name
#Read Keys from file
def readKeys(self):
keysFile=open(self.name,"r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.find('=')
if pos != -1:
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keys
|
<commit_before>class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keysFile
<commit_msg>Add mode for changing what file it is, and fix a bug where a line without an equals wouldn't work<commit_after>class ConfigReader():
def __init__(self,name="config.txt"):
self.keys={}
self.name = name
#Read Keys from file
def readKeys(self):
keysFile=open(self.name,"r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.find('=')
if pos != -1:
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys, read allows you to get the keys without re-reading the file.
def getKeys(self,read=True):
if read:
self.readKeys()
return self.keys
|
4e30a58386afb5b34bd83c8115c55e5d09b8f631
|
common/views.py
|
common/views.py
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
Improve performance by prefetching where needed
|
Improve performance by prefetching where needed
|
Python
|
agpl-3.0
|
Pajn/RAXA-Django,Pajn/RAXA-Django
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})Improve performance by prefetching where needed
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
<commit_before>from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})<commit_msg>Improve performance by prefetching where needed<commit_after>
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})Improve performance by prefetching where neededfrom django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
<commit_before>from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})<commit_msg>Improve performance by prefetching where needed<commit_after>from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
e7cbed6df650851b2d44bf48f2b94291822f0b91
|
recipes/sos-notebook/run_test.py
|
recipes/sos-notebook/run_test.py
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\n')
wait_for_idle(kc)
execute(kc=kc, code='%use Python3\n%get a\nb = a + 1')
wait_for_idle(kc)
execute(kc=kc, code='%use SoS\n%get b --from Python3\nprint(b)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '2')
if __name__ == '__main__':
unittest.main()
|
Add test of kernel switch.
|
Add test of kernel switch.
|
Python
|
bsd-3-clause
|
kwilcox/staged-recipes,scopatz/staged-recipes,patricksnape/staged-recipes,asmeurer/staged-recipes,mcs07/staged-recipes,patricksnape/staged-recipes,birdsarah/staged-recipes,mcs07/staged-recipes,hadim/staged-recipes,basnijholt/staged-recipes,goanpeca/staged-recipes,synapticarbors/staged-recipes,kwilcox/staged-recipes,asmeurer/staged-recipes,jjhelmus/staged-recipes,chrisburr/staged-recipes,chrisburr/staged-recipes,jjhelmus/staged-recipes,stuertz/staged-recipes,hadim/staged-recipes,jochym/staged-recipes,basnijholt/staged-recipes,ReimarBauer/staged-recipes,conda-forge/staged-recipes,synapticarbors/staged-recipes,petrushy/staged-recipes,scopatz/staged-recipes,ceholden/staged-recipes,dschreij/staged-recipes,igortg/staged-recipes,mariusvniekerk/staged-recipes,ceholden/staged-recipes,isuruf/staged-recipes,stuertz/staged-recipes,SylvainCorlay/staged-recipes,ReimarBauer/staged-recipes,jakirkham/staged-recipes,ocefpaf/staged-recipes,jakirkham/staged-recipes,jochym/staged-recipes,cpaulik/staged-recipes,Juanlu001/staged-recipes,cpaulik/staged-recipes,conda-forge/staged-recipes,johanneskoester/staged-recipes,ocefpaf/staged-recipes,igortg/staged-recipes,petrushy/staged-recipes,dschreij/staged-recipes,mariusvniekerk/staged-recipes,goanpeca/staged-recipes,johanneskoester/staged-recipes,birdsarah/staged-recipes,SylvainCorlay/staged-recipes,Juanlu001/staged-recipes,isuruf/staged-recipes
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
Add test of kernel switch.
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\n')
wait_for_idle(kc)
execute(kc=kc, code='%use Python3\n%get a\nb = a + 1')
wait_for_idle(kc)
execute(kc=kc, code='%use SoS\n%get b --from Python3\nprint(b)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '2')
if __name__ == '__main__':
unittest.main()
|
<commit_before># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
<commit_msg>Add test of kernel switch.<commit_after>
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\n')
wait_for_idle(kc)
execute(kc=kc, code='%use Python3\n%get a\nb = a + 1')
wait_for_idle(kc)
execute(kc=kc, code='%use SoS\n%get b --from Python3\nprint(b)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '2')
if __name__ == '__main__':
unittest.main()
|
# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
Add test of kernel switch.# Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\n')
wait_for_idle(kc)
execute(kc=kc, code='%use Python3\n%get a\nb = a + 1')
wait_for_idle(kc)
execute(kc=kc, code='%use SoS\n%get b --from Python3\nprint(b)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '2')
if __name__ == '__main__':
unittest.main()
|
<commit_before># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
<commit_msg>Add test of kernel switch.<commit_after># Test that sos kernel is installed
import jupyter_client
try:
jupyter_client.kernelspec.get_kernel_spec('sos')
except jupyter_client.kernelspec.NoSuchKernel:
print('sos kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
# Test that sos kernel is functional
import unittest
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
from sos_notebook.test_utils import sos_kernel
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1\n')
wait_for_idle(kc)
execute(kc=kc, code='%use Python3\n%get a\nb = a + 1')
wait_for_idle(kc)
execute(kc=kc, code='%use SoS\n%get b --from Python3\nprint(b)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr, '')
self.assertEqual(stdout.strip(), '2')
if __name__ == '__main__':
unittest.main()
|
49fd5a646f329be1147b43b8c2c611f1424abe22
|
spacy/about.py
|
spacy/about.py
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2.dev1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = True
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
Set version for 2.0.2 release
|
Set version for 2.0.2 release
|
Python
|
mit
|
honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2.dev1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
Set version for 2.0.2 release
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = True
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2.dev1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
<commit_msg>Set version for 2.0.2 release<commit_after>
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = True
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2.dev1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
Set version for 2.0.2 release# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = True
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2.dev1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = False
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
<commit_msg>Set version for 2.0.2 release<commit_after># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.2'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__release__ = True
__docs_models__ = 'https://spacy.io/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts-v2.json'
|
d101b7f023db1583ca7b65899bfdef296f838ad2
|
openspending/ui/validation/source.py
|
openspending/ui/validation/source.py
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
Fix PEP8 issues in openspending/ui/validation.
|
Fix PEP8 issues in openspending/ui/validation.
|
Python
|
agpl-3.0
|
CivicVision/datahub,openspending/spendb,CivicVision/datahub,spendb/spendb,spendb/spendb,johnjohndoe/spendb,USStateDept/FPA_Core,nathanhilbert/FPA_Core,openspending/spendb,spendb/spendb,USStateDept/FPA_Core,USStateDept/FPA_Core,johnjohndoe/spendb,openspending/spendb,nathanhilbert/FPA_Core,johnjohndoe/spendb,pudo/spendb,nathanhilbert/FPA_Core,CivicVision/datahub,pudo/spendb,pudo/spendb
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
Fix PEP8 issues in openspending/ui/validation.
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
<commit_before>from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
<commit_msg>Fix PEP8 issues in openspending/ui/validation.<commit_after>
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
Fix PEP8 issues in openspending/ui/validation.from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
<commit_before>from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
<commit_msg>Fix PEP8 issues in openspending/ui/validation.<commit_after>from urlparse import urlparse
from openspending.validation.model.common import mapping
from openspending.validation.model.common import key
from openspending.validation.model.predicates import chained, \
nonempty_string
def valid_url(url):
parsed = urlparse(url)
if parsed.scheme.lower() not in ('http', 'https'):
return "Only HTTP/HTTPS web addresses are supported " \
"at the moment."
return True
def source_schema():
schema = mapping('source')
schema.add(key('url', validator=chained(
nonempty_string,
valid_url
)))
return schema
|
fb8921f17d1cecfc1c61612092709c526b70e0ab
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0'
|
Update dsub version to 0.3.0
|
Update dsub version to 0.3.0
PiperOrigin-RevId: 241766455
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0.dev0'
Update dsub version to 0.3.0
PiperOrigin-RevId: 241766455
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0.dev0'
<commit_msg>Update dsub version to 0.3.0
PiperOrigin-RevId: 241766455<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0.dev0'
Update dsub version to 0.3.0
PiperOrigin-RevId: 241766455# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0.dev0'
<commit_msg>Update dsub version to 0.3.0
PiperOrigin-RevId: 241766455<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.0'
|
2769f038a70e1003c23908b1e917abd08058512b
|
aldryn_newsblog/admin.py
|
aldryn_newsblog/admin.py
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from aldryn_people.models import Person
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
try:
person = Person.objects.get(user=request.user)
data['author'] = person.pk
request.GET = data
except Person.DoesNotExist:
pass
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
Set Article.author to a Person instance (if it exists for current user)
|
Set Article.author to a Person instance (if it exists for current user)
|
Python
|
bsd-3-clause
|
czpython/aldryn-newsblog,mkoistinen/aldryn-newsblog,czpython/aldryn-newsblog,mkoistinen/aldryn-newsblog,czpython/aldryn-newsblog,mkoistinen/aldryn-newsblog,czpython/aldryn-newsblog
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
Set Article.author to a Person instance (if it exists for current user)
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from aldryn_people.models import Person
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
try:
person = Person.objects.get(user=request.user)
data['author'] = person.pk
request.GET = data
except Person.DoesNotExist:
pass
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
<commit_before>from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
<commit_msg>Set Article.author to a Person instance (if it exists for current user)<commit_after>
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from aldryn_people.models import Person
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
try:
person = Person.objects.get(user=request.user)
data['author'] = person.pk
request.GET = data
except Person.DoesNotExist:
pass
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
Set Article.author to a Person instance (if it exists for current user)from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from aldryn_people.models import Person
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
try:
person = Person.objects.get(user=request.user)
data['author'] = person.pk
request.GET = data
except Person.DoesNotExist:
pass
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
<commit_before>from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
data['author'] = request.user.id # default author is logged-in user
request.GET = data
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
<commit_msg>Set Article.author to a Person instance (if it exists for current user)<commit_after>from django.contrib import admin
from aldryn_apphooks_config.admin import BaseAppHookConfig
from aldryn_people.models import Person
from cms.admin.placeholderadmin import PlaceholderAdmin, FrontendEditableAdmin
from parler.admin import TranslatableAdmin
from .models import Article, MockCategory, MockTag, NewsBlogConfig
class ArticleAdmin(TranslatableAdmin, PlaceholderAdmin, FrontendEditableAdmin):
# TODO: make possible to edit placeholder
def add_view(self, request, *args, **kwargs):
data = request.GET.copy()
try:
person = Person.objects.get(user=request.user)
data['author'] = person.pk
request.GET = data
except Person.DoesNotExist:
pass
return super(ArticleAdmin, self).add_view(request, *args, **kwargs)
class MockCategoryAdmin(admin.ModelAdmin):
pass
class MockTagAdmin(admin.ModelAdmin):
pass
class NewsBlogConfigAdmin(BaseAppHookConfig):
def get_config_fields(self):
return []
admin.site.register(Article, ArticleAdmin)
admin.site.register(MockTag, MockCategoryAdmin)
admin.site.register(MockCategory, MockTagAdmin)
admin.site.register(NewsBlogConfig, NewsBlogConfigAdmin)
|
23fc2bbd22aa8d45301207b0608634df4414707f
|
panoptes_client/classification.py
|
panoptes_client/classification.py
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
LinkResolver.register(Classification)
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
@classmethod
def where(cls, **kwargs):
scope = kwargs.pop('scope', None)
if not scope:
return super(Classification, cls).where(**kwargs)
return cls.paginated_results(*cls.get(scope, params=kwargs))
LinkResolver.register(Classification)
|
Add scope kwarg to Classification.where()
|
Add scope kwarg to Classification.where()
|
Python
|
apache-2.0
|
zooniverse/panoptes-python-client
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
LinkResolver.register(Classification)
Add scope kwarg to Classification.where()
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
@classmethod
def where(cls, **kwargs):
scope = kwargs.pop('scope', None)
if not scope:
return super(Classification, cls).where(**kwargs)
return cls.paginated_results(*cls.get(scope, params=kwargs))
LinkResolver.register(Classification)
|
<commit_before>from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
LinkResolver.register(Classification)
<commit_msg>Add scope kwarg to Classification.where()<commit_after>
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
@classmethod
def where(cls, **kwargs):
scope = kwargs.pop('scope', None)
if not scope:
return super(Classification, cls).where(**kwargs)
return cls.paginated_results(*cls.get(scope, params=kwargs))
LinkResolver.register(Classification)
|
from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
LinkResolver.register(Classification)
Add scope kwarg to Classification.where()from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
@classmethod
def where(cls, **kwargs):
scope = kwargs.pop('scope', None)
if not scope:
return super(Classification, cls).where(**kwargs)
return cls.paginated_results(*cls.get(scope, params=kwargs))
LinkResolver.register(Classification)
|
<commit_before>from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
LinkResolver.register(Classification)
<commit_msg>Add scope kwarg to Classification.where()<commit_after>from panoptes_client.panoptes import LinkResolver, PanoptesObject
class Classification(PanoptesObject):
_api_slug = 'classifications'
_link_slug = 'classification'
_edit_attributes = ( )
@classmethod
def where(cls, **kwargs):
scope = kwargs.pop('scope', None)
if not scope:
return super(Classification, cls).where(**kwargs)
return cls.paginated_results(*cls.get(scope, params=kwargs))
LinkResolver.register(Classification)
|
24b52024b76206b59a650cee477773ad5836b175
|
labonneboite/importer/conf/lbbdev.py
|
labonneboite/importer/conf/lbbdev.py
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 70
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 75 # On 2019.05.03 departement 77 reached HIGH_SCORE_COMPANIES_DIFF_MAX~=73
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
Fix evolution threshold for dpt 77
|
Fix evolution threshold for dpt 77
The threshold HIGH_SCORE_COMPANIES_DIFF_MAX which is the max percentage
of difference between number of companies in lbb from one importer cycle
to another was set to 70, and it reached 73 for the department 77. This
threshold has now been set to 75.
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 70
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
Fix evolution threshold for dpt 77
The threshold HIGH_SCORE_COMPANIES_DIFF_MAX which is the max percentage
of difference between number of companies in lbb from one importer cycle
to another was set to 70, and it reached 73 for the department 77. This
threshold has now been set to 75.
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 75 # On 2019.05.03 departement 77 reached HIGH_SCORE_COMPANIES_DIFF_MAX~=73
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
<commit_before>import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 70
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
<commit_msg>Fix evolution threshold for dpt 77
The threshold HIGH_SCORE_COMPANIES_DIFF_MAX which is the max percentage
of difference between number of companies in lbb from one importer cycle
to another was set to 70, and it reached 73 for the department 77. This
threshold has now been set to 75.<commit_after>
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 75 # On 2019.05.03 departement 77 reached HIGH_SCORE_COMPANIES_DIFF_MAX~=73
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 70
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
Fix evolution threshold for dpt 77
The threshold HIGH_SCORE_COMPANIES_DIFF_MAX which is the max percentage
of difference between number of companies in lbb from one importer cycle
to another was set to 70, and it reached 73 for the department 77. This
threshold has now been set to 75.import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 75 # On 2019.05.03 departement 77 reached HIGH_SCORE_COMPANIES_DIFF_MAX~=73
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
<commit_before>import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 70
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
<commit_msg>Fix evolution threshold for dpt 77
The threshold HIGH_SCORE_COMPANIES_DIFF_MAX which is the max percentage
of difference between number of companies in lbb from one importer cycle
to another was set to 70, and it reached 73 for the department 77. This
threshold has now been set to 75.<commit_after>import os
# --- importer input directory of DPAE and ETABLISSEMENT exports
INPUT_SOURCE_FOLDER = '/srv/lbb/data'
# --- job 1/8 & 2/8 : check_etablissements & extract_etablissements
JENKINS_ETAB_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties.jenkins")
MINIMUM_OFFICES_TO_BE_EXTRACTED_PER_DEPARTEMENT = 10000
# --- job 3/8 & 4/8 : check_dpae & extract_dpae
JENKINS_DPAE_PROPERTIES_FILENAME = os.path.join(os.environ["WORKSPACE"], "properties_dpae.jenkins")
MAXIMUM_ZIPCODE_ERRORS = 100
MAXIMUM_INVALID_ROWS = 100
# --- job 5/8 : compute_scores
SCORE_COEFFICIENT_OF_VARIATION_MAX = 2.0
RMSE_MAX = 1500 # On 2017.03.15 departement 52 reached RMSE=1141
HIGH_SCORE_COMPANIES_DIFF_MAX = 75 # On 2019.05.03 departement 77 reached HIGH_SCORE_COMPANIES_DIFF_MAX~=73
# --- job 6/8 : validate_scores
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_DPAE = 500
MINIMUM_OFFICES_PER_DEPARTEMENT_FOR_ALTERNANCE = 0
# --- job 8/8 : populate_flags
|
b9882cc9d12aef06091727c76263039b30f0c4ce
|
numscons/tools/ifort.py
|
numscons/tools/ifort.py
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
from numscons.tools.intel_common import get_abi
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate_win32(env):
# Import here to avoid importing msvc tool on every platform
from SCons.Tool.MSCommon.common import get_output, parse_output
abi = get_abi(env, lang='FORTRAN')
# Set up environment
# XXX: detect this properly
batfile = r"C:\Program Files\Intel\Compiler\11.1\038\bin\ifortvars.bat"
out = get_output(batfile, args=abi)
d = parse_output(out)
for k, v in d.items():
env.PrependENVPath(k, v, delete_existing=True)
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
elif sys.platform == 'win32':
return generate_win32(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
Add initial support for win32 fortran compiler support.
|
Add initial support for win32 fortran compiler support.
|
Python
|
bsd-3-clause
|
cournape/numscons,cournape/numscons,cournape/numscons
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
Add initial support for win32 fortran compiler support.
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
from numscons.tools.intel_common import get_abi
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate_win32(env):
# Import here to avoid importing msvc tool on every platform
from SCons.Tool.MSCommon.common import get_output, parse_output
abi = get_abi(env, lang='FORTRAN')
# Set up environment
# XXX: detect this properly
batfile = r"C:\Program Files\Intel\Compiler\11.1\038\bin\ifortvars.bat"
out = get_output(batfile, args=abi)
d = parse_output(out)
for k, v in d.items():
env.PrependENVPath(k, v, delete_existing=True)
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
elif sys.platform == 'win32':
return generate_win32(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
<commit_before>import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
<commit_msg>Add initial support for win32 fortran compiler support.<commit_after>
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
from numscons.tools.intel_common import get_abi
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate_win32(env):
# Import here to avoid importing msvc tool on every platform
from SCons.Tool.MSCommon.common import get_output, parse_output
abi = get_abi(env, lang='FORTRAN')
# Set up environment
# XXX: detect this properly
batfile = r"C:\Program Files\Intel\Compiler\11.1\038\bin\ifortvars.bat"
out = get_output(batfile, args=abi)
d = parse_output(out)
for k, v in d.items():
env.PrependENVPath(k, v, delete_existing=True)
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
elif sys.platform == 'win32':
return generate_win32(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
Add initial support for win32 fortran compiler support.import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
from numscons.tools.intel_common import get_abi
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate_win32(env):
# Import here to avoid importing msvc tool on every platform
from SCons.Tool.MSCommon.common import get_output, parse_output
abi = get_abi(env, lang='FORTRAN')
# Set up environment
# XXX: detect this properly
batfile = r"C:\Program Files\Intel\Compiler\11.1\038\bin\ifortvars.bat"
out = get_output(batfile, args=abi)
d = parse_output(out)
for k, v in d.items():
env.PrependENVPath(k, v, delete_existing=True)
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
elif sys.platform == 'win32':
return generate_win32(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
<commit_before>import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
<commit_msg>Add initial support for win32 fortran compiler support.<commit_after>import sys
import warnings
from SCons.Util import \
WhereIs
from SCons.Tool.ifort import \
generate as old_generate
from numscons.tools.intel_common import get_abi
def generate_linux(env):
ifort = WhereIs('ifort')
if not ifort:
warnings.warn("ifort not found")
return old_generate(env)
def generate_win32(env):
# Import here to avoid importing msvc tool on every platform
from SCons.Tool.MSCommon.common import get_output, parse_output
abi = get_abi(env, lang='FORTRAN')
# Set up environment
# XXX: detect this properly
batfile = r"C:\Program Files\Intel\Compiler\11.1\038\bin\ifortvars.bat"
out = get_output(batfile, args=abi)
d = parse_output(out)
for k, v in d.items():
env.PrependENVPath(k, v, delete_existing=True)
return old_generate(env)
def generate(env):
if sys.platform.startswith('linux'):
return generate_linux(env)
elif sys.platform == 'win32':
return generate_win32(env)
else:
raise RuntimeError('Intel fortran on %s not supported' % sys.platform)
def exists(env):
pass
|
8fd451b266f3441184220061cb25227530c0d256
|
collector/classes/service.py
|
collector/classes/service.py
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***' or datum == None:
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
Handle empty cells in the spreadsheet
|
Handle empty cells in the spreadsheet
|
Python
|
mit
|
alphagov/backdrop-transactions-explorer-collector,alphagov/backdrop-transactions-explorer-collector
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
Handle empty cells in the spreadsheet
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***' or datum == None:
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
<commit_before># -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
<commit_msg>Handle empty cells in the spreadsheet<commit_after>
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***' or datum == None:
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
Handle empty cells in the spreadsheet# -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***' or datum == None:
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
<commit_before># -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***':
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
<commit_msg>Handle empty cells in the spreadsheet<commit_after># -*- coding: utf-8 -*-
import string
def sanitise_string(messy_str):
"""Whitelist characters in a string"""
valid_chars = ' {0}{1}'.format(string.ascii_letters, string.digits)
return u''.join(char for char in messy_str if char in valid_chars).strip()
class Service(object):
def __init__(self, numeric_id, detailed_data):
self.numeric_id = numeric_id
self.detailed_data = detailed_data
def attribute_exists(self, key):
return key in self.detailed_data
def get_datum(self, key):
datum = self.handle_bad_data(self.get(key))
return datum
def get(self, key):
return self.detailed_data[key]
def identifier(self):
"""Return a unique identifier for the service"""
return self.get('Slug')
def service_title(self):
return self.get('Name of service')
def abbreviated_department(self):
return self.get('Abbr')
def handle_bad_data(self, datum):
# TODO: Should we be more explicit about non-requested (***) data?
if datum == '' or datum == '-' or datum == '***' or datum == None:
return None
elif not isinstance(datum, (int, long, float, complex)):
# If the value we get from the spreadsheet is not numeric, send
# that to Backdrop as a null data point
print "Data from the spreadsheet doesn't look numeric: <{0}> (from {1})".format(datum, self.identifier())
return None
else:
return datum
|
fffbf30ab4f64cbfad939529dde416280a68b125
|
addons/osfstorage/settings/defaults.py
|
addons/osfstorage/settings/defaults.py
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'N. Virginia'
DEFAULT_REGION_ID = 'us-east-1'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'United States'
DEFAULT_REGION_ID = 'us'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
Adjust OSF Storage default region names
|
Adjust OSF Storage default region names
|
Python
|
apache-2.0
|
pattisdr/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,adlius/osf.io,aaxelb/osf.io,saradbowman/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,felliott/osf.io,saradbowman/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,erinspace/osf.io,adlius/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,adlius/osf.io,Johnetordoff/osf.io,mattclark/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,caseyrollins/osf.io,cslzchen/osf.io,baylee-d/osf.io,erinspace/osf.io,mattclark/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,adlius/osf.io,pattisdr/osf.io,felliott/osf.io,felliott/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,mfraezz/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,caseyrollins/osf.io,cslzchen/osf.io
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'N. Virginia'
DEFAULT_REGION_ID = 'us-east-1'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
Adjust OSF Storage default region names
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'United States'
DEFAULT_REGION_ID = 'us'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
<commit_before># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'N. Virginia'
DEFAULT_REGION_ID = 'us-east-1'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
<commit_msg>Adjust OSF Storage default region names<commit_after>
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'United States'
DEFAULT_REGION_ID = 'us'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'N. Virginia'
DEFAULT_REGION_ID = 'us-east-1'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
Adjust OSF Storage default region names# encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'United States'
DEFAULT_REGION_ID = 'us'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
<commit_before># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'N. Virginia'
DEFAULT_REGION_ID = 'us-east-1'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
<commit_msg>Adjust OSF Storage default region names<commit_after># encoding: utf-8
import importlib
import os
import logging
from website import settings
logger = logging.getLogger(__name__)
DEFAULT_REGION_NAME = 'United States'
DEFAULT_REGION_ID = 'us'
WATERBUTLER_CREDENTIALS = {
'storage': {}
}
WATERBUTLER_SETTINGS = {
'storage': {
'provider': 'filesystem',
'folder': os.path.join(settings.BASE_PATH, 'osfstoragecache'),
}
}
WATERBUTLER_RESOURCE = 'folder'
DISK_SAVING_MODE = settings.DISK_SAVING_MODE
try:
mod = importlib.import_module('.{}'.format(settings.MIGRATION_ENV), package='addons.osfstorage.settings')
globals().update({k: getattr(mod, k) for k in dir(mod)})
except Exception as ex:
logger.warn('No migration settings loaded for OSFStorage, falling back to local dev. {}'.format(ex))
|
944db8255306a666f290775fec01f5ab33de2eb0
|
test/integration/022_bigquery_test/test_bigquery_adapter_specific.py
|
test/integration/022_bigquery_test/test_bigquery_adapter_specific.py
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
pprint(stdout)
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
Remove unnecessary code for print debug
|
Remove unnecessary code for print debug
|
Python
|
apache-2.0
|
analyst-collective/dbt,analyst-collective/dbt
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
pprint(stdout)
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
Remove unnecessary code for print debug
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
<commit_before>""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
pprint(stdout)
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
<commit_msg>Remove unnecessary code for print debug<commit_after>
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
pprint(stdout)
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
Remove unnecessary code for print debug""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
<commit_before>""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
pprint(stdout)
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
<commit_msg>Remove unnecessary code for print debug<commit_after>""""Test adapter specific config options."""
from pprint import pprint
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
hours_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_hours_to_expiration(self):
_, stdout = self.run_dbt_and_capture(['--debug', 'run'])
self.assertIn(
'expiration_timestamp=TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
2aa81349f2fae2f782c5174720ea7d2ab406c946
|
ditto/multitenancy/middleware.py
|
ditto/multitenancy/middleware.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
(r'^$', RedirectView.as_view(
pattern_name='ditto:home',
permanent=True,
)),
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
Fix 404 when no path in URL
|
Fix 404 when no path in URL
|
Python
|
bsd-3-clause
|
Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto,Kvoti/ditto
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
Fix 404 when no path in URL
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
(r'^$', RedirectView.as_view(
pattern_name='ditto:home',
permanent=True,
)),
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
<commit_msg>Fix 404 when no path in URL<commit_after>
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
(r'^$', RedirectView.as_view(
pattern_name='ditto:home',
permanent=True,
)),
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
Fix 404 when no path in URLfrom django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
(r'^$', RedirectView.as_view(
pattern_name='ditto:home',
permanent=True,
)),
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
<commit_msg>Fix 404 when no path in URL<commit_after>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
class FakeTenant(object):
is_main = lambda self: False
is_configured = lambda self: True
id = 'di'
def chat_host(self):
if settings.DEBUG:
return 'network1.localhost'
return 'network1.ditto.technology'
# NOTE: this MUST come first in the middleware order
class CurrentTenantMiddleware(object):
def process_request(self, request):
request.tenant = FakeTenant()
request.urlconf = _get_urls('di')
def _get_urls(tenant_slug):
# Note, need 'tuple' here otherwise url stuff blows up
return tuple(
patterns(
'',
(r'^$', RedirectView.as_view(
pattern_name='ditto:home',
permanent=True,
)),
url(r'^%s/' % tenant_slug, include('network_urls')),
url(r'^main/', include('multitenancy.urls', namespace="ditto")),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT))
|
721c4d0cd4e99b4c45eeee813375e7d0050ef970
|
doc/pyplots/plot_qualitative2.py
|
doc/pyplots/plot_qualitative2.py
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from typhon.plots import (figsize, mpl_colors)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100)/2
Y = np.random.randn(100)/2
ax.plot(X+c, Y+c, linestyle='none', marker='.', markersize=20)
fig.tight_layout()
plt.show()
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from typhon.plots import (figsize, mpl_colors)
# Create an iterator to conveniently change the marker in the following plot.
markers = (m for m in Line2D.filled_markers)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100) / 2
Y = np.random.randn(100) / 2
ax.plot(X+c, Y+c, linestyle='none', marker=next(markers), markersize=10)
fig.tight_layout()
plt.show()
|
Change marker as an example.
|
Change marker as an example.
|
Python
|
mit
|
atmtools/typhon,atmtools/typhon
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from typhon.plots import (figsize, mpl_colors)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100)/2
Y = np.random.randn(100)/2
ax.plot(X+c, Y+c, linestyle='none', marker='.', markersize=20)
fig.tight_layout()
plt.show()
Change marker as an example.
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from typhon.plots import (figsize, mpl_colors)
# Create an iterator to conveniently change the marker in the following plot.
markers = (m for m in Line2D.filled_markers)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100) / 2
Y = np.random.randn(100) / 2
ax.plot(X+c, Y+c, linestyle='none', marker=next(markers), markersize=10)
fig.tight_layout()
plt.show()
|
<commit_before># -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from typhon.plots import (figsize, mpl_colors)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100)/2
Y = np.random.randn(100)/2
ax.plot(X+c, Y+c, linestyle='none', marker='.', markersize=20)
fig.tight_layout()
plt.show()
<commit_msg>Change marker as an example.<commit_after>
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from typhon.plots import (figsize, mpl_colors)
# Create an iterator to conveniently change the marker in the following plot.
markers = (m for m in Line2D.filled_markers)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100) / 2
Y = np.random.randn(100) / 2
ax.plot(X+c, Y+c, linestyle='none', marker=next(markers), markersize=10)
fig.tight_layout()
plt.show()
|
# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from typhon.plots import (figsize, mpl_colors)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100)/2
Y = np.random.randn(100)/2
ax.plot(X+c, Y+c, linestyle='none', marker='.', markersize=20)
fig.tight_layout()
plt.show()
Change marker as an example.# -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from typhon.plots import (figsize, mpl_colors)
# Create an iterator to conveniently change the marker in the following plot.
markers = (m for m in Line2D.filled_markers)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100) / 2
Y = np.random.randn(100) / 2
ax.plot(X+c, Y+c, linestyle='none', marker=next(markers), markersize=10)
fig.tight_layout()
plt.show()
|
<commit_before># -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from typhon.plots import (figsize, mpl_colors)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100)/2
Y = np.random.randn(100)/2
ax.plot(X+c, Y+c, linestyle='none', marker='.', markersize=20)
fig.tight_layout()
plt.show()
<commit_msg>Change marker as an example.<commit_after># -*- coding: utf-8 -*-
"""Plot to demonstrate the qualitative2 colormap.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from typhon.plots import (figsize, mpl_colors)
# Create an iterator to conveniently change the marker in the following plot.
markers = (m for m in Line2D.filled_markers)
fig, ax = plt.subplots(figsize=figsize(10))
ax.set_prop_cycle(color=mpl_colors('qualitative2', 7))
for c in np.arange(7):
X = np.random.randn(100) / 2
Y = np.random.randn(100) / 2
ax.plot(X+c, Y+c, linestyle='none', marker=next(markers), markersize=10)
fig.tight_layout()
plt.show()
|
fac512af9a65cb07e7f43ad167d32fe9934c1c78
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: fixed PEP8 spacing
|
Update vigenereDicitonaryHacker: fixed PEP8 spacing
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()Update vigenereDicitonaryHacker: fixed PEP8 spacing
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()<commit_msg>Update vigenereDicitonaryHacker: fixed PEP8 spacing<commit_after>
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()Update vigenereDicitonaryHacker: fixed PEP8 spacing# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()<commit_msg>Update vigenereDicitonaryHacker: fixed PEP8 spacing<commit_after># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
197aa546b2043602a622340bdec220bdc67e13dd
|
prince/plot/mpl/util.py
|
prince/plot/mpl/util.py
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return base.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return clr.LinearSegmentedColormap.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
Fix AttributeError on ListedColormap from_list method
|
Fix AttributeError on ListedColormap from_list method
|
Python
|
mit
|
MaxHalford/Prince
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return base.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
Fix AttributeError on ListedColormap from_list method
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return clr.LinearSegmentedColormap.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
<commit_before>import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return base.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
<commit_msg>Fix AttributeError on ListedColormap from_list method<commit_after>
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return clr.LinearSegmentedColormap.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return base.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
Fix AttributeError on ListedColormap from_list methodimport matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return clr.LinearSegmentedColormap.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
<commit_before>import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return base.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
<commit_msg>Fix AttributeError on ListedColormap from_list method<commit_after>import matplotlib.cm as cm
import matplotlib.colors as clr
import matplotlib.pyplot as plt
from ..palettes import SEABORN
def create_discrete_cmap(n):
"""Create an n-bin discrete colormap."""
if n <= len(SEABORN):
colors = list(SEABORN.values())[:n]
else:
base = plt.cm.get_cmap('Paired')
color_list = base([(i + 1) / (n + 1) for i in range(n)])
cmap_name = base.name + str(n)
return clr.LinearSegmentedColormap.from_list(cmap_name, color_list, n)
return clr.ListedColormap(colors)
def add_color_bar(ax, cmap, labels):
"""Add a colorbar to an axis.
Args:
ax (AxesSubplot)
cmap (Colormap): A prepaped colormap of size n.
labels (list of str): A list of strings of size n.
"""
norm = clr.BoundaryNorm(list(range(cmap.N+1)), cmap.N)
smap = cm.ScalarMappable(norm=norm, cmap=cmap)
smap.set_array([])
cbar = plt.colorbar(smap, ax=ax)
cbar.set_ticks([i + 0.5 for i in range(cmap.N)])
cbar.set_ticklabels(labels)
|
c4b7532987958573dafe01621cdd254db63bf8ea
|
bfg9000/builtins/hooks.py
|
bfg9000/builtins/hooks.py
|
import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
Python
|
bsd-3-clause
|
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
|
import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
Change how the wrappers work for builtin functions so that docs get forwarded correctly
|
import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
<commit_before>import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
<commit_msg>Change how the wrappers work for builtin functions so that docs get forwarded correctly<commit_after>
|
import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
Change how the wrappers work for builtin functions so that docs get forwarded correctlyimport functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
<commit_before>import functools
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
# XXX: partial doesn't forward the docstring of the function.
return functools.partial(self._fn, *[kwargs[i] for i in self._args])
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
<commit_msg>Change how the wrappers work for builtin functions so that docs get forwarded correctly<commit_after>import functools
import inspect
import sys
from six import iteritems
_all_builtins = {}
class _Binder(object):
def __init__(self, args, fn):
self._args = args
self._fn = fn
class _FunctionBinder(_Binder):
def bind(self, **kwargs):
pre_args = tuple(kwargs[i] for i in self._args)
@functools.wraps(self._fn)
def wrapped(*args, **kwargs):
return self._fn(*(pre_args + args), **kwargs)
if sys.version_info >= (3, 3):
sig = inspect.signature(wrapped)
params = list(sig.parameters.values())[len(kwargs):]
wrapped.__signature__ = inspect.Signature(params)
return wrapped
class _VariableBinder(_Binder):
def bind(self, **kwargs):
return self._fn(*[kwargs[i] for i in self._args])
class _BuiltinDecorator(object):
def __init__(self, binder):
self.__binder = binder
def __call__(self, *args):
def wrapper(fn):
bound = self.__binder(args, fn)
_all_builtins[fn.__name__] = bound
return bound
return wrapper
builtin = _BuiltinDecorator(_FunctionBinder)()
builtin.globals = _BuiltinDecorator(_FunctionBinder)
builtin.variable = _BuiltinDecorator(_VariableBinder)
def bind(**kwargs):
builtins = {}
for k, v in iteritems(_all_builtins):
builtins[k] = v.bind(builtins=builtins, **kwargs)
return builtins
@builtin.variable('env')
def env(this_env):
return this_env
|
b3b281c9bf789c44a9a2b2d750e4dd8cf789dd1a
|
playserver/webserver.py
|
playserver/webserver.py
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
return "{} by {} - {}".format(song, artist, album)
|
Add song display to root page
|
Add song display to root page
|
Python
|
mit
|
ollien/playserver,ollien/playserver,ollien/playserver
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
Add song display to root page
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
return "{} by {} - {}".format(song, artist, album)
|
<commit_before>import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
<commit_msg>Add song display to root page<commit_after>
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
return "{} by {} - {}".format(song, artist, album)
|
import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
Add song display to root pageimport flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
return "{} by {} - {}".format(song, artist, album)
|
<commit_before>import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
<commit_msg>Add song display to root page<commit_after>import flask
from . import track
app = flask.Flask(__name__)
@app.route("/")
def root():
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
return "{} by {} - {}".format(song, artist, album)
|
6ccc85832aeff2ca9800cd9e2af8461515ff680d
|
cartography/midi_utils.py
|
cartography/midi_utils.py
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
Add dump presets and utils
|
Add dump presets and utils
|
Python
|
mit
|
tingled/synthetic-cartography,tingled/synthetic-cartography
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
Add dump presets and utils
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
<commit_before>import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
<commit_msg>Add dump presets and utils<commit_after>
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
Add dump presets and utilsimport mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
<commit_before>import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name())
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
<commit_msg>Add dump presets and utils<commit_after>import mido
def open_output():
return open_steinberg_output()
def get_steinberg_device_name():
output_names = [n for n in mido.get_output_names() if 'steinberg' in n.lower()]
if len(output_names) != 1:
raise Exception(f"Found the following steinberg MIDI devices: {output_names}. Expected only one")
return output_names[0]
def open_steinberg_output():
return mido.open_output(get_steinberg_device_name(), autoreset=True)
def open_steinberg_input():
return mido.open_input(get_steinberg_device_name())
|
883b8d3ebdf006bb6c9b28b234936231f0eac442
|
l10n_br_nfse/__manifest__.py
|
l10n_br_nfse/__manifest__.py
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura-nopyopenssl",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
Update python lib erpbrasil.assinatura version 1.4.0
|
l10n_br_nfse: Update python lib erpbrasil.assinatura version 1.4.0
|
Python
|
agpl-3.0
|
akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura-nopyopenssl",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
l10n_br_nfse: Update python lib erpbrasil.assinatura version 1.4.0
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
<commit_before># Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura-nopyopenssl",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
<commit_msg>l10n_br_nfse: Update python lib erpbrasil.assinatura version 1.4.0<commit_after>
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura-nopyopenssl",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
l10n_br_nfse: Update python lib erpbrasil.assinatura version 1.4.0# Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
<commit_before># Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura-nopyopenssl",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
<commit_msg>l10n_br_nfse: Update python lib erpbrasil.assinatura version 1.4.0<commit_after># Copyright 2019 KMEE INFORMATICA LTDA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "NFS-e",
"summary": """
NFS-e""",
"version": "14.0.1.7.0",
"license": "AGPL-3",
"author": "KMEE, Odoo Community Association (OCA)",
"maintainers": ["gabrielcardoso21", "mileo", "luismalta", "marcelsavegnago"],
"website": "https://github.com/OCA/l10n-brazil",
"external_dependencies": {
"python": [
"erpbrasil.edoc",
"erpbrasil.assinatura",
"erpbrasil.transmissao",
"erpbrasil.base",
],
},
"depends": [
"l10n_br_fiscal",
],
"data": [
"security/ir.model.access.csv",
"views/document_view.xml",
"views/product_template_view.xml",
"views/product_product_view.xml",
"views/document_line_view.xml",
"views/res_company_view.xml",
"report/danfse.xml",
],
"demo": [
"demo/product_demo.xml",
"demo/fiscal_document_demo.xml",
],
}
|
0eabc95105fecfd4b960b1c135f589f0eea9de2a
|
flaskrst/modules/staticpages/__init__.py
|
flaskrst/modules/staticpages/__init__.py
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
Support of static pages inside of a directory
|
Support of static pages inside of a directory
|
Python
|
bsd-3-clause
|
jarus/flask-rst
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)Support of static pages inside of a directory
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
<commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)<commit_msg>Support of static pages inside of a directory<commit_after>
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)Support of static pages inside of a directory# -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
<commit_before># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_name': 'index'})
@static_pages.route('/<file_name>')
def show(file_name):
rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)<commit_msg>Support of static pages inside of a directory<commit_after># -*- coding: utf-8 -*-
"""
flask-rst.modules.staticfiles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
import os
from flask import current_app, render_template
from flaskrst.parsers import rstDocument
from flaskrst.modules import Blueprint
static_pages = Blueprint('static_pages', __name__, \
template_folder='templates')
@static_pages.route('/', defaults={'file_path': 'index'})
@static_pages.route('/<path:file_path>')
def show(file_path):
if file_path.endswith('/'):
file_path += "index"
rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst')
rst = rstDocument(rst_file)
return render_template("static_page.html", page=rst)
def setup(app, cfg):
app.register_blueprint(static_pages)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.