commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
358b40875647734bb25d2b9e25506d13ca60a740
|
neuroimaging/utils/tests/data/__init__.py
|
neuroimaging/utils/tests/data/__init__.py
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
Extend error message regarding missing test data.
|
Extend error message regarding missing test data.
|
Python
|
bsd-3-clause
|
yarikoptic/NiPy-OLD,yarikoptic/NiPy-OLD
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
Extend error message regarding missing test data.
|
"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
<commit_before>"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
<commit_msg>Extend error message regarding missing test data.<commit_after>
|
"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
Extend error message regarding missing test data."""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
<commit_before>"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
<commit_msg>Extend error message regarding missing test data.<commit_after>"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
2430d4ae362ca22ebff83b405355d60343b3a0c1
|
non_iterable_example/_5_context.py
|
non_iterable_example/_5_context.py
|
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
|
def print_numbers(flag, numbers):
if flag:
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(False, numbers)
else:
numbers = 1, 2, 3
print_numbers(True, numbers)
|
Modify example to emphasise importance of context.
|
Modify example to emphasise importance of context.
|
Python
|
unlicense
|
markshannon/buggy_code
|
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
Modify example to emphasise importance of context.
|
def print_numbers(flag, numbers):
if flag:
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(False, numbers)
else:
numbers = 1, 2, 3
print_numbers(True, numbers)
|
<commit_before>
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
<commit_msg>Modify example to emphasise importance of context.<commit_after>
|
def print_numbers(flag, numbers):
if flag:
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(False, numbers)
else:
numbers = 1, 2, 3
print_numbers(True, numbers)
|
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
Modify example to emphasise importance of context.
def print_numbers(flag, numbers):
if flag:
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(False, numbers)
else:
numbers = 1, 2, 3
print_numbers(True, numbers)
|
<commit_before>
def print_numbers(numbers):
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(numbers)
else:
numbers = 1, 2, 3
print_numbers(numbers)
<commit_msg>Modify example to emphasise importance of context.<commit_after>
def print_numbers(flag, numbers):
if flag:
for n in numbers:
print(n)
if random:
numbers = 1
print_numbers(False, numbers)
else:
numbers = 1, 2, 3
print_numbers(True, numbers)
|
13c6a1527bb5d241989c7b7beb11a48eacc4d69c
|
tests/unit/http_tests.py
|
tests/unit/http_tests.py
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from requests.exceptions import ConnectionError
from pycrawler.http import HttpRequest
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(ConnectionError):
HttpRequest.get(url)
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from pycrawler.http import HttpRequest, UrlNotValidException
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(UrlNotValidException):
HttpRequest.get(url)
|
Change raises class to UrlNotValidException
|
Change raises class to UrlNotValidException
|
Python
|
mit
|
slaveofcode/pycrawler,slaveofcode/pycrawler
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from requests.exceptions import ConnectionError
from pycrawler.http import HttpRequest
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(ConnectionError):
HttpRequest.get(url)
Change raises class to UrlNotValidException
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from pycrawler.http import HttpRequest, UrlNotValidException
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(UrlNotValidException):
HttpRequest.get(url)
|
<commit_before>import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from requests.exceptions import ConnectionError
from pycrawler.http import HttpRequest
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(ConnectionError):
HttpRequest.get(url)
<commit_msg>Change raises class to UrlNotValidException<commit_after>
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from pycrawler.http import HttpRequest, UrlNotValidException
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(UrlNotValidException):
HttpRequest.get(url)
|
import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from requests.exceptions import ConnectionError
from pycrawler.http import HttpRequest
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(ConnectionError):
HttpRequest.get(url)
Change raises class to UrlNotValidExceptionimport unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from pycrawler.http import HttpRequest, UrlNotValidException
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(UrlNotValidException):
HttpRequest.get(url)
|
<commit_before>import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from requests.exceptions import ConnectionError
from pycrawler.http import HttpRequest
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(ConnectionError):
HttpRequest.get(url)
<commit_msg>Change raises class to UrlNotValidException<commit_after>import unittest, os, sys
current_dir = os.path.dirname(__file__)
base_dir = os.path.join(current_dir, os.pardir, os.pardir)
sys.path.append(base_dir)
from pycrawler.http import HttpRequest, UrlNotValidException
class HttpRequestTests(unittest.TestCase):
def test_response_not_empty(self):
url = 'http://www.pasarpanda.com'
http = HttpRequest.get(url)
self.assertIsNotNone(http)
def test_raise_error(self):
url = 'http://www.fake-url-that-not-exist-on-the-internet.com'
with self.assertRaises(UrlNotValidException):
HttpRequest.get(url)
|
2ac5befcfe04be0d8406c539f6900c079b561dfd
|
tests/test_iati_standard.py
|
tests/test_iati_standard.py
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_contains_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
Change test name to be more descriptive
|
Change test name to be more descriptive
Also better conforms to the naming conventions for other tests in this module.
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
Change test name to be more descriptive
Also better conforms to the naming conventions for other tests in this module.
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_contains_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
<commit_before>from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
<commit_msg>Change test name to be more descriptive
Also better conforms to the naming conventions for other tests in this module.<commit_after>
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_contains_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
Change test name to be more descriptive
Also better conforms to the naming conventions for other tests in this module.from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_contains_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
<commit_before>from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
<commit_msg>Change test name to be more descriptive
Also better conforms to the naming conventions for other tests in this module.<commit_after>from web_test_base import *
class TestIATIStandard(WebTestBase):
"""
TODO: Add tests to assert that:
- the number of activities and publishers roughly matches those displayed on the Registry
- a key string appears on the homepage
"""
requests_to_load = {
'IATI Standard Homepage - no www': {
'url': 'http://iatistandard.org'
},
'IATI Standard Homepage - with www': {
'url': 'http://www.iatistandard.org'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
# Selection of header links
assert "/en/news/" in result
assert "/en/about/" in result
assert "/en/iati-standard/" in result
assert "/en/using-data/" in result
# Selection of footer links
assert "/en/contact/" in result
assert "/en/terms-and-conditions/" in result
assert "/en/privacy-policy/" in result
def test_contains_newsletter_signup_form(self, loaded_request):
"""
Tests to confirm that there is always a form to subscribe to the newsletter within the footer.
"""
xpath = '//*[@id="mc-embedded-subscribe-form"]'
result = utility.locate_xpath_result(loaded_request, xpath)
assert len(result) == 1
|
53cb3adb97bb434a896938c2c7f78109e5b5566f
|
tests/test_identify_repo.py
|
tests/test_identify_repo.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage.git"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_github_no_extension():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_gitorious():
repo_url = (
"git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git"
)
assert vcs.identify_repo(repo_url) == "git"
def test_identify_hg_mercurial():
repo_url = "https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket"
assert vcs.identify_repo(repo_url) == "hg"
def test_unknown_repo_type():
repo_url = "http://norepotypespecified.com"
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage.git'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_github_no_extension():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_gitorious():
repo_url = (
'git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git'
)
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_hg_mercurial():
repo_url = 'https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket'
assert vcs.identify_repo(repo_url) == 'hg'
def test_unknown_repo_type():
repo_url = 'http://norepotypespecified.com'
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
Use single quotes instead of double quotes
|
Use single quotes instead of double quotes
|
Python
|
bsd-3-clause
|
audreyr/cookiecutter,audreyr/cookiecutter,sp1rs/cookiecutter,lucius-feng/cookiecutter,luzfcb/cookiecutter,christabor/cookiecutter,jhermann/cookiecutter,terryjbates/cookiecutter,cichm/cookiecutter,terryjbates/cookiecutter,Springerle/cookiecutter,venumech/cookiecutter,moi65/cookiecutter,0k/cookiecutter,ramiroluz/cookiecutter,agconti/cookiecutter,nhomar/cookiecutter,benthomasson/cookiecutter,Vauxoo/cookiecutter,kkujawinski/cookiecutter,vincentbernat/cookiecutter,atlassian/cookiecutter,michaeljoseph/cookiecutter,vintasoftware/cookiecutter,takeflight/cookiecutter,stevepiercy/cookiecutter,nhomar/cookiecutter,christabor/cookiecutter,jhermann/cookiecutter,drgarcia1986/cookiecutter,venumech/cookiecutter,benthomasson/cookiecutter,janusnic/cookiecutter,janusnic/cookiecutter,willingc/cookiecutter,tylerdave/cookiecutter,foodszhang/cookiecutter,dajose/cookiecutter,vintasoftware/cookiecutter,stevepiercy/cookiecutter,sp1rs/cookiecutter,takeflight/cookiecutter,pjbull/cookiecutter,0k/cookiecutter,hackebrot/cookiecutter,moi65/cookiecutter,kkujawinski/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,cguardia/cookiecutter,tylerdave/cookiecutter,lgp171188/cookiecutter,ionelmc/cookiecutter,foodszhang/cookiecutter,vincentbernat/cookiecutter,cichm/cookiecutter,dajose/cookiecutter,lgp171188/cookiecutter,Springerle/cookiecutter,michaeljoseph/cookiecutter,ionelmc/cookiecutter,agconti/cookiecutter,cguardia/cookiecutter,lucius-feng/cookiecutter,luzfcb/cookiecutter,atlassian/cookiecutter,Vauxoo/cookiecutter,drgarcia1986/cookiecutter,willingc/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage.git"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_github_no_extension():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_gitorious():
repo_url = (
"git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git"
)
assert vcs.identify_repo(repo_url) == "git"
def test_identify_hg_mercurial():
repo_url = "https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket"
assert vcs.identify_repo(repo_url) == "hg"
def test_unknown_repo_type():
repo_url = "http://norepotypespecified.com"
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
Use single quotes instead of double quotes
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage.git'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_github_no_extension():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_gitorious():
repo_url = (
'git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git'
)
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_hg_mercurial():
repo_url = 'https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket'
assert vcs.identify_repo(repo_url) == 'hg'
def test_unknown_repo_type():
repo_url = 'http://norepotypespecified.com'
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage.git"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_github_no_extension():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_gitorious():
repo_url = (
"git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git"
)
assert vcs.identify_repo(repo_url) == "git"
def test_identify_hg_mercurial():
repo_url = "https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket"
assert vcs.identify_repo(repo_url) == "hg"
def test_unknown_repo_type():
repo_url = "http://norepotypespecified.com"
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
<commit_msg>Use single quotes instead of double quotes<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage.git'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_github_no_extension():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_gitorious():
repo_url = (
'git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git'
)
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_hg_mercurial():
repo_url = 'https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket'
assert vcs.identify_repo(repo_url) == 'hg'
def test_unknown_repo_type():
repo_url = 'http://norepotypespecified.com'
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage.git"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_github_no_extension():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_gitorious():
repo_url = (
"git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git"
)
assert vcs.identify_repo(repo_url) == "git"
def test_identify_hg_mercurial():
repo_url = "https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket"
assert vcs.identify_repo(repo_url) == "hg"
def test_unknown_repo_type():
repo_url = "http://norepotypespecified.com"
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
Use single quotes instead of double quotes#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage.git'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_github_no_extension():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_gitorious():
repo_url = (
'git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git'
)
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_hg_mercurial():
repo_url = 'https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket'
assert vcs.identify_repo(repo_url) == 'hg'
def test_unknown_repo_type():
repo_url = 'http://norepotypespecified.com'
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage.git"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_github_no_extension():
repo_url = "https://github.com/audreyr/cookiecutter-pypackage"
assert vcs.identify_repo(repo_url) == "git"
def test_identify_git_gitorious():
repo_url = (
"git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git"
)
assert vcs.identify_repo(repo_url) == "git"
def test_identify_hg_mercurial():
repo_url = "https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket"
assert vcs.identify_repo(repo_url) == "hg"
def test_unknown_repo_type():
repo_url = "http://norepotypespecified.com"
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
<commit_msg>Use single quotes instead of double quotes<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_identify_repo
------------------
"""
import pytest
from cookiecutter import exceptions, vcs
def test_identify_git_github():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage.git'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_github_no_extension():
repo_url = 'https://github.com/audreyr/cookiecutter-pypackage'
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_git_gitorious():
repo_url = (
'git@gitorious.org:cookiecutter-gitorious/cookiecutter-gitorious.git'
)
assert vcs.identify_repo(repo_url) == 'git'
def test_identify_hg_mercurial():
repo_url = 'https://audreyr@bitbucket.org/audreyr/cookiecutter-bitbucket'
assert vcs.identify_repo(repo_url) == 'hg'
def test_unknown_repo_type():
repo_url = 'http://norepotypespecified.com'
with pytest.raises(exceptions.UnknownRepoType):
vcs.identify_repo(repo_url)
|
6722f037783580f30e94317e1eb8e5c34b0e7719
|
runtests.py
|
runtests.py
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': 'pgallery',
'PASSWORD': 'pgallery',
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': os.environ.get('DATABASE_USER', 'pgallery'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'pgallery'),
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
Read database user and password from environment.
|
Read database user and password from environment.
|
Python
|
mit
|
zsiciarz/django-pgallery,zsiciarz/django-pgallery
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': 'pgallery',
'PASSWORD': 'pgallery',
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
Read database user and password from environment.
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': os.environ.get('DATABASE_USER', 'pgallery'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'pgallery'),
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
<commit_before>import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': 'pgallery',
'PASSWORD': 'pgallery',
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
<commit_msg>Read database user and password from environment.<commit_after>
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': os.environ.get('DATABASE_USER', 'pgallery'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'pgallery'),
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': 'pgallery',
'PASSWORD': 'pgallery',
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
Read database user and password from environment.import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': os.environ.get('DATABASE_USER', 'pgallery'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'pgallery'),
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
<commit_before>import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': 'pgallery',
'PASSWORD': 'pgallery',
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
<commit_msg>Read database user and password from environment.<commit_after>import os
from django.conf import settings
os.environ['REUSE_DB'] = '1'
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django_nose',
'markitup',
'sorl.thumbnail',
'pgallery',
)
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'pgallery',
'USER': os.environ.get('DATABASE_USER', 'pgallery'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD', 'pgallery'),
'HOST': 'localhost',
}
},
INSTALLED_APPS=INSTALLED_APPS,
MARKITUP_FILTER=('markdown.markdown', {'safe_mode': False, 'extensions': ['codehilite']}),
MARKITUP_SET='markitup/sets/markdown',
)
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['pgallery'])
|
29041cdaf3beca926f1dff1d3f147b7dc07ad8dd
|
pylp/cli/run.py
|
pylp/cli/run.py
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
Make pylpfile path more readable
|
Make pylpfile path more readable
|
Python
|
mit
|
pylp/pylp
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
Make pylpfile path more readable
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
<commit_before>"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
<commit_msg>Make pylpfile path more readable<commit_after>
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
Make pylpfile path more readable"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
<commit_before>"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
<commit_msg>Make pylpfile path more readable<commit_after>"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
90f851b6af1f558cdfb8d5d69b65742effdbdb81
|
uchicagohvz/production_settings.py
|
uchicagohvz/production_settings.py
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
ADMINS = (
('Administrator', 'admin@uchicagohvz.org'),
)
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
Add ADMINS to production settings
|
Add ADMINS to production settings
|
Python
|
mit
|
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueAdd ADMINS to production settings
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
ADMINS = (
('Administrator', 'admin@uchicagohvz.org'),
)
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Add ADMINS to production settings<commit_after>
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
ADMINS = (
('Administrator', 'admin@uchicagohvz.org'),
)
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueAdd ADMINS to production settingsfrom local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
ADMINS = (
('Administrator', 'admin@uchicagohvz.org'),
)
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Add ADMINS to production settings<commit_after>from local_settings import *
DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
ADMINS = (
('Administrator', 'admin@uchicagohvz.org'),
)
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
0db7179ac218d9f84298ce3277b13f591d2a4f07
|
troposphere/datapipeline.py
|
troposphere/datapipeline.py
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': ([basestring, Ref], False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': (basestring, False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
Revert "Allow Ref in StringValue"
|
Revert "Allow Ref in StringValue"
This reverts commit a1957fb04118ce13f2cb37a52bc93718eed0ae41.
|
Python
|
bsd-2-clause
|
kid/troposphere,dmm92/troposphere,7digital/troposphere,inetCatapult/troposphere,mannytoledo/troposphere,Yipit/troposphere,ccortezb/troposphere,alonsodomin/troposphere,wangqiang8511/troposphere,ptoraskar/troposphere,alonsodomin/troposphere,amosshapira/troposphere,cloudtools/troposphere,xxxVxxx/troposphere,johnctitus/troposphere,jdc0589/troposphere,dmm92/troposphere,yxd-hde/troposphere,7digital/troposphere,garnaat/troposphere,WeAreCloudar/troposphere,craigbruce/troposphere,cloudtools/troposphere,johnctitus/troposphere,cryptickp/troposphere,horacio3/troposphere,pas256/troposphere,ikben/troposphere,mhahn/troposphere,pas256/troposphere,LouTheBrew/troposphere,unravelin/troposphere,horacio3/troposphere,micahhausler/troposphere,samcrang/troposphere,ikben/troposphere,nicolaka/troposphere
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': ([basestring, Ref], False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
Revert "Allow Ref in StringValue"
This reverts commit a1957fb04118ce13f2cb37a52bc93718eed0ae41.
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': (basestring, False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
<commit_before>from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': ([basestring, Ref], False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
<commit_msg>Revert "Allow Ref in StringValue"
This reverts commit a1957fb04118ce13f2cb37a52bc93718eed0ae41.<commit_after>
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': (basestring, False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': ([basestring, Ref], False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
Revert "Allow Ref in StringValue"
This reverts commit a1957fb04118ce13f2cb37a52bc93718eed0ae41.from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': (basestring, False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
<commit_before>from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': ([basestring, Ref], False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
<commit_msg>Revert "Allow Ref in StringValue"
This reverts commit a1957fb04118ce13f2cb37a52bc93718eed0ae41.<commit_after>from . import AWSObject, AWSProperty, Ref
from .validators import boolean
class ParameterObjectAttribute(AWSProperty):
props = {
'Key': (basestring, True),
'StringValue': (basestring, False),
}
class ParameterObject(AWSProperty):
props = {
'Attributes': ([ParameterObjectAttribute], True),
'Id': (basestring, True),
}
class ParameterValue(AWSProperty):
props = {
'Id': (basestring, True),
'StringValue': (basestring, True),
}
class ObjectField(AWSProperty):
props = {
'Key': (basestring, True),
'RefValue': ([basestring, Ref], False),
'StringValue': (basestring, False),
}
class PipelineObject(AWSProperty):
props = {
'Fields': ([ObjectField], True),
'Id': (basestring, True),
'Name': (basestring, True),
}
class PipelineTag(AWSProperty):
props = {
'Key': (basestring, True),
'Value': (basestring, True),
}
class Pipeline(AWSObject):
resource_type = "AWS::DataPipeline::Pipeline"
props = {
'Activate': (boolean, False),
'Description': (basestring, False),
'Name': (basestring, True),
'ParameterObjects': ([ParameterObject], False),
'ParameterValues': ([ParameterValue], False),
'PipelineObjects': ([PipelineObject], True),
'PipelineTags': ([PipelineTag], False),
}
|
4dcb0a9860b654a08839a61f5e67af69771de39c
|
tests/test_slow_requests.py
|
tests/test_slow_requests.py
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 5, 'duration too long: {} secs'.format(duration)
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance, based on a bot's behaviour
recently.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 7, 'duration too long: {} secs'.format(duration)
|
Test threshold increased because the Travis server is a bit slower :)
|
Test threshold increased because the Travis server is a bit slower :)
|
Python
|
unlicense
|
thisismyrobot/dnstwister,thisismyrobot/dnstwister,thisismyrobot/dnstwister
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 5, 'duration too long: {} secs'.format(duration)
Test threshold increased because the Travis server is a bit slower :)
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance, based on a bot's behaviour
recently.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 7, 'duration too long: {} secs'.format(duration)
|
<commit_before>import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 5, 'duration too long: {} secs'.format(duration)
<commit_msg>Test threshold increased because the Travis server is a bit slower :)<commit_after>
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance, based on a bot's behaviour
recently.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 7, 'duration too long: {} secs'.format(duration)
|
import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 5, 'duration too long: {} secs'.format(duration)
Test threshold increased because the Travis server is a bit slower :)import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance, based on a bot's behaviour
recently.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 7, 'duration too long: {} secs'.format(duration)
|
<commit_before>import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 5, 'duration too long: {} secs'.format(duration)
<commit_msg>Test threshold increased because the Travis server is a bit slower :)<commit_after>import datetime
import dnstwister.tools
def test2():
"""Looooong domain names highlighted that the idna decoding is slooooow.
This is a basic benchmark for performance, based on a bot's behaviour
recently.
"""
domain = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzppieo.com'
start = datetime.datetime.now()
dnstwister.tools.fuzzy_domains(domain)
duration = (datetime.datetime.now() - start).total_seconds()
assert duration < 7, 'duration too long: {} secs'.format(duration)
|
7aab7b17858fb307e8e4f136038e4448be449f9e
|
runtests.py
|
runtests.py
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info[:2] < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info[:2] > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=1, buffer=True)
runner.run(tests)
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=2, buffer=True)
result = runner.run(tests)
sys.exit(not result.wasSuccessful())
|
Exit non-zero if tests fail
|
Exit non-zero if tests fail
runtests.py was exiting 0 if test either failed or passed (which
confused tox, travis, etc).
Determine the status code based on the success of the test suite run.
|
Python
|
mit
|
cocagne/txdbus
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info[:2] < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info[:2] > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=1, buffer=True)
runner.run(tests)
Exit non-zero if tests fail
runtests.py was exiting 0 if test either failed or passed (which
confused tox, travis, etc).
Determine the status code based on the success of the test suite run.
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=2, buffer=True)
result = runner.run(tests)
sys.exit(not result.wasSuccessful())
|
<commit_before># Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info[:2] < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info[:2] > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=1, buffer=True)
runner.run(tests)
<commit_msg>Exit non-zero if tests fail
runtests.py was exiting 0 if test either failed or passed (which
confused tox, travis, etc).
Determine the status code based on the success of the test suite run.<commit_after>
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=2, buffer=True)
result = runner.run(tests)
sys.exit(not result.wasSuccessful())
|
# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info[:2] < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info[:2] > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=1, buffer=True)
runner.run(tests)
Exit non-zero if tests fail
runtests.py was exiting 0 if test either failed or passed (which
confused tox, travis, etc).
Determine the status code based on the success of the test suite run.# Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=2, buffer=True)
result = runner.run(tests)
sys.exit(not result.wasSuccessful())
|
<commit_before># Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info[:2] < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info[:2] > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=1, buffer=True)
runner.run(tests)
<commit_msg>Exit non-zero if tests fail
runtests.py was exiting 0 if test either failed or passed (which
confused tox, travis, etc).
Determine the status code based on the success of the test suite run.<commit_after># Unit test driver.
import os
import sys
from unittest import TestLoader, TestSuite, TextTestRunner
topdir = os.path.split(os.path.abspath(__file__))[0]
os.chdir(topdir)
loader = TestLoader()
if sys.version_info < (3, 0):
tests = loader.discover('.', 'test_*.py')
elif sys.version_info > (3, 2):
tests = TestSuite()
tests.addTests(loader.discover('.', 'test_marshal.py'))
tests.addTests(loader.discover('.', 'test_message.py'))
else:
tests = TestSuite()
runner = TextTestRunner(verbosity=2, buffer=True)
result = runner.run(tests)
sys.exit(not result.wasSuccessful())
|
6ac70bb24b7fab272adb9805fa0509aa2282add4
|
pysswords/db.py
|
pysswords/db.py
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self, secret=False):
return self.gpg.list_keys(secret=secret)[0]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
Fix get gpg key from database
|
Fix get gpg key from database
|
Python
|
mit
|
scorphus/passpie,marcwebbie/pysswords,marcwebbie/passpie,scorphus/passpie,eiginn/passpie,eiginn/passpie,marcwebbie/passpie
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self, secret=False):
return self.gpg.list_keys(secret=secret)[0]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
Fix get gpg key from database
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
<commit_before>from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self, secret=False):
return self.gpg.list_keys(secret=secret)[0]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
<commit_msg>Fix get gpg key from database<commit_after>
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self, secret=False):
return self.gpg.list_keys(secret=secret)[0]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
Fix get gpg key from databasefrom glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
<commit_before>from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self, secret=False):
return self.gpg.list_keys(secret=secret)[0]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
<commit_msg>Fix get gpg key from database<commit_after>from glob import glob
import os
from .credential import Credential
from .crypt import create_gpg, load_gpg
class Database(object):
def __init__(self, path, gpg):
self.path = path
self.gpg = gpg
@classmethod
def create(cls, path, passphrase, gpg_bin="gpg"):
gpg = create_gpg(gpg_bin, path, passphrase)
return Database(path, gpg)
@classmethod
def from_path(cls, path, gpg_bin="gpg"):
gpg = load_gpg(binary=gpg_bin, database_path=path)
return Database(path, gpg)
@property
def gpg_key(self):
return self.gpg.list_keys(secret=True)[0]["fingerprint"]
def add(self, credential):
encrypted_password = self.gpg.encrypt(
credential.password,
self.gpg_key
)
credential.password = str(encrypted_password)
credential.save(database_path=self.path)
def credential(self, name):
credential_path = os.path.join(self.path, name)
credential = Credential.from_path(credential_path)
return credential
@property
def credentials(self):
return [self.credential(os.path.basename(c))
for c in glob(self.path + "/**")]
|
4d88162d7c1d596f87f0fb1cc18dd5509bc92330
|
i18n.py
|
i18n.py
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=False)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=False)
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=True)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=True)
|
Set fallback to true on get text to avoid crash on missing language file
|
Set fallback to true on get text to avoid crash on missing language file
|
Python
|
mit
|
markleent/simpleValidator
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=False)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=False)
Set fallback to true on get text to avoid crash on missing language file
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=True)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=True)
|
<commit_before># -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=False)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=False)
<commit_msg>Set fallback to true on get text to avoid crash on missing language file<commit_after>
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=True)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=True)
|
# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=False)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=False)
Set fallback to true on get text to avoid crash on missing language file# -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=True)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=True)
|
<commit_before># -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=False)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=False)
<commit_msg>Set fallback to true on get text to avoid crash on missing language file<commit_after># -*- coding: utf-8 -*-
import os
import config
import gettext
# Change this variable to your app name!
# The translation files will be under
# @LOCALE_DIR@/@LANGUAGE@/LC_MESSAGES/@APP_NAME@.mo
APP_NAME = "simpleValidator"
LOCALE_DIR = os.path.abspath('lang') # .mo files will then be located in APP_Dir/i18n/LANGUAGECODE/LC_MESSAGES/
DEFAULT_LANGUAGE = config.LOCALE
#lc, encoding = locale.getdefaultlocale()
#if lc:
# languages = [lc]
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=DEFAULT_LANGUAGE, fallback=True)
def switch_language(lang):
global defaultlang
defaultlang = gettext.translation(APP_NAME, LOCALE_DIR, languages=[lang], fallback=True)
|
850464de61237a7fae64219a39e9c937f7d40c01
|
randcat.py
|
randcat.py
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
#! /usr/bin/python3
import calendar
import time
seed = calendar.timegm(time.gmtime()) # We'll use the epoch time as a seed.
def random (seed):
seed2 = (seed*297642 + 83782)/70000
return int(seed2) % 70000;
p = seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
p = random(p)
print(chr(p % 256), end="")
p = p % 4000
|
Make it so we're using our own seed.
|
Make it so we're using our own seed.
|
Python
|
apache-2.0
|
Tombert/RandCat
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
Make it so we're using our own seed.
|
#! /usr/bin/python3
import calendar
import time
seed = calendar.timegm(time.gmtime()) # We'll use the epoch time as a seed.
def random (seed):
seed2 = (seed*297642 + 83782)/70000
return int(seed2) % 70000;
p = seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
p = random(p)
print(chr(p % 256), end="")
p = p % 4000
|
<commit_before>#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
<commit_msg>Make it so we're using our own seed.<commit_after>
|
#! /usr/bin/python3
import calendar
import time
seed = calendar.timegm(time.gmtime()) # We'll use the epoch time as a seed.
def random (seed):
seed2 = (seed*297642 + 83782)/70000
return int(seed2) % 70000;
p = seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
p = random(p)
print(chr(p % 256), end="")
p = p % 4000
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
Make it so we're using our own seed.#! /usr/bin/python3
import calendar
import time
seed = calendar.timegm(time.gmtime()) # We'll use the epoch time as a seed.
def random (seed):
seed2 = (seed*297642 + 83782)/70000
return int(seed2) % 70000;
p = seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
p = random(p)
print(chr(p % 256), end="")
p = p % 4000
|
<commit_before>#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
<commit_msg>Make it so we're using our own seed.<commit_after>#! /usr/bin/python3
import calendar
import time
seed = calendar.timegm(time.gmtime()) # We'll use the epoch time as a seed.
def random (seed):
seed2 = (seed*297642 + 83782)/70000
return int(seed2) % 70000;
p = seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
p = random(p)
print(chr(p % 256), end="")
p = p % 4000
|
bbd8ed9fe5679d6eb85ddd183515e6bf5c95d5fc
|
filter-test.py
|
filter-test.py
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Probably only of interest only to
# developers
#
# To use it, set up irkerhook.py to file on each commit.
# Then set the filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
#
# This is rather antisocial - imagine thousands of irkerds holding open
# connections to IRCDs. It's better to go through an instance running
# at your forge or set up for shared use by your intranet administrator.
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Intended to be used in the root
# directory of the irker repo. Probably only of interest only to irker
# developers
#
# To use this, set up irkerhook.py to fire on each commit. Creating a
# .git/hooks/post-commit file containing the line "irkerhook.py"; be
# sure to make the opos-commit file executable. Then set the
# filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
Document the test filter better.
|
Document the test filter better.
|
Python
|
bsd-3-clause
|
Trellmor/irker,boklm/irker,dak180/irker,Southen/irker
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Probably only of interest only to
# developers
#
# To use it, set up irkerhook.py to file on each commit.
# Then set the filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
#
# This is rather antisocial - imagine thousands of irkerds holding open
# connections to IRCDs. It's better to go through an instance running
# at your forge or set up for shared use by your intranet administrator.
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
Document the test filter better.
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Intended to be used in the root
# directory of the irker repo. Probably only of interest only to irker
# developers
#
# To use this, set up irkerhook.py to fire on each commit. Creating a
# .git/hooks/post-commit file containing the line "irkerhook.py"; be
# sure to make the opos-commit file executable. Then set the
# filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
<commit_before>#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Probably only of interest only to
# developers
#
# To use it, set up irkerhook.py to file on each commit.
# Then set the filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
#
# This is rather antisocial - imagine thousands of irkerds holding open
# connections to IRCDs. It's better to go through an instance running
# at your forge or set up for shared use by your intranet administrator.
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
<commit_msg>Document the test filter better.<commit_after>
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Intended to be used in the root
# directory of the irker repo. Probably only of interest only to irker
# developers
#
# To use this, set up irkerhook.py to fire on each commit. Creating a
# .git/hooks/post-commit file containing the line "irkerhook.py"; be
# sure to make the opos-commit file executable. Then set the
# filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Probably only of interest only to
# developers
#
# To use it, set up irkerhook.py to file on each commit.
# Then set the filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
#
# This is rather antisocial - imagine thousands of irkerds holding open
# connections to IRCDs. It's better to go through an instance running
# at your forge or set up for shared use by your intranet administrator.
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
Document the test filter better.#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Intended to be used in the root
# directory of the irker repo. Probably only of interest only to irker
# developers
#
# To use this, set up irkerhook.py to fire on each commit. Creating a
# .git/hooks/post-commit file containing the line "irkerhook.py"; be
# sure to make the opos-commit file executable. Then set the
# filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
<commit_before>#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Probably only of interest only to
# developers
#
# To use it, set up irkerhook.py to file on each commit.
# Then set the filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
#
# This is rather antisocial - imagine thousands of irkerds holding open
# connections to IRCDs. It's better to go through an instance running
# at your forge or set up for shared use by your intranet administrator.
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
<commit_msg>Document the test filter better.<commit_after>#!/usr/bin/env python
#
# Test hook to launch an irker instance (if it doesn't already exist)
# just before shipping the notification. We start it in in another terminal
# so you can watch the debug messages. Intended to be used in the root
# directory of the irker repo. Probably only of interest only to irker
# developers
#
# To use this, set up irkerhook.py to fire on each commit. Creating a
# .git/hooks/post-commit file containing the line "irkerhook.py"; be
# sure to make the opos-commit file executable. Then set the
# filtercmd variable in your repo config as follows:
#
# [irker]
# filtercmd = filter-test.py
import os, sys, json, subprocess, time
metadata = json.loads(sys.argv[1])
ps = subprocess.Popen("ps -U %s uh" % os.getenv("LOGNAME"),
shell=True,
stdout=subprocess.PIPE)
data = ps.stdout.read()
irkerd_count = len([x for x in data.split("\n") if x.find("irkerd") != -1])
if not irkerd_count:
os.system("gnome-terminal --title 'irkerd' -e 'irkerd -d 2' &")
time.sleep(0.1) # Avoid a race condition
print json.dumps(metadata)
# end
|
74983020db5cfcd3e81e258837979522f2d1b639
|
flac_errors.py
|
flac_errors.py
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
Use a Set to avoid duplicates
|
Use a Set to avoid duplicates
|
Python
|
mit
|
derekhendrickx/find-my-flac-errors
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')Use a Set to avoid duplicates
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
<commit_before>import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')<commit_msg>Use a Set to avoid duplicates<commit_after>
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')Use a Set to avoid duplicatesimport re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
<commit_before>import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'"(.*.flac)"', line)
if match:
nbErrorsFoobar += 1
print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/[^/]+.flac', line)
if match:
print(match.group())
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')<commit_msg>Use a Set to avoid duplicates<commit_after>import re
errorFile = open('samples/foobar2000-errors.txt', 'r')
errors = errorFile.readlines()
errorsSet = set()
index = -1
for i in range(len(errors)):
line = errors[i]
if re.search(r'List of undecodable items:', line):
index = i
nbErrorsFoobar = 0
for i in range(index, len(errors)):
line = errors[i]
match = re.search(r'\\.+\\([^\\]+.flac)', line)
if match:
errorsSet.add(match.group(1))
nbErrorsFoobar += 1
# print(match.group(1))
errorFile.close()
errorFile = open('samples/dbpoweramp-errors.txt', 'r')
errors = errorFile.readlines()
index = -1
nbErrorsDbPowerAmp = 0
for i in range(len(errors)):
line = errors[i]
match = re.search(r'/.+/([^/]+.flac)', line)
if match:
errorsSet.add(match.group(1))
# print(match.group(1))
if (re.search(r'Encountered', line) or re.search(r'md5 did not match decoded data, file is corrupt.', line)):
nbErrorsDbPowerAmp += 1
print(nbErrorsFoobar, 'FLAC errors from foobar2000')
print(nbErrorsDbPowerAmp, 'FLAC errors from dbPowerAmp')
print(len(errorsSet))
for item in errorsSet:
print(item)
|
42c6d252084fa9336cf5c5d1766de29bc31bf082
|
dbaas/workflow/steps/util/resize/start_database.py
|
dbaas/workflow/steps/util/resize/start_database.py
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
Add sleep on start database
|
Add sleep on start database
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
Add sleep on start database
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
<commit_before># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
<commit_msg>Add sleep on start database<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
Add sleep on start database# -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
<commit_before># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
<commit_msg>Add sleep on start database<commit_after># -*- coding: utf-8 -*-
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
6e31eb9e1049d75ad4e7e1031c0dfa4d6617c48f
|
csaps/__init__.py
|
csaps/__init__.py
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
NdGridSplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'NdGridSplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
Add NdGridSplinePPForm to csaps imports
|
Add NdGridSplinePPForm to csaps imports
|
Python
|
mit
|
espdev/csaps
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
Add NdGridSplinePPForm to csaps imports
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
NdGridSplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'NdGridSplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
<commit_before># -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
<commit_msg>Add NdGridSplinePPForm to csaps imports<commit_after>
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
NdGridSplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'NdGridSplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
Add NdGridSplinePPForm to csaps imports# -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
NdGridSplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'NdGridSplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
<commit_before># -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
<commit_msg>Add NdGridSplinePPForm to csaps imports<commit_after># -*- coding: utf-8 -*-
"""
Cubic spline approximation (smoothing)
"""
from csaps._version import __version__ # noqa
from csaps._base import (
SplinePPForm,
NdGridSplinePPForm,
UnivariateCubicSmoothingSpline,
MultivariateCubicSmoothingSpline,
NdGridCubicSmoothingSpline,
)
from csaps._types import (
UnivariateDataType,
UnivariateVectorizedDataType,
MultivariateDataType,
NdGridDataType,
)
__all__ = [
'SplinePPForm',
'NdGridSplinePPForm',
'UnivariateCubicSmoothingSpline',
'MultivariateCubicSmoothingSpline',
'NdGridCubicSmoothingSpline',
# Type-hints
'UnivariateDataType',
'UnivariateVectorizedDataType',
'MultivariateDataType',
'NdGridDataType',
]
|
6aa8f148b3b3975363d5d4a763f5abb45ea6cbd8
|
databin/parsers/__init__.py
|
databin/parsers/__init__.py
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('Excel copy & paste', 'excel', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Excel copy & paste', 'excel', parse_tsv),
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
Make excel format the default
|
Make excel format the default
|
Python
|
mit
|
LeTristanB/Pastable,pudo/databin,LeTristanB/Pastable
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('Excel copy & paste', 'excel', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
Make excel format the default
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Excel copy & paste', 'excel', parse_tsv),
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
<commit_before>from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('Excel copy & paste', 'excel', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
<commit_msg>Make excel format the default<commit_after>
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Excel copy & paste', 'excel', parse_tsv),
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('Excel copy & paste', 'excel', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
Make excel format the defaultfrom databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Excel copy & paste', 'excel', parse_tsv),
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
<commit_before>from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('Excel copy & paste', 'excel', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
<commit_msg>Make excel format the default<commit_after>from databin.parsers.util import ParseException
from databin.parsers.simple import parse_csv, parse_tsv
from databin.parsers.psql import parse_psql
PARSERS = [
('Excel copy & paste', 'excel', parse_tsv),
('Comma-Separated Values', 'csv', parse_csv),
('Tab-Separated Values', 'tsv', parse_tsv),
('psql Shell', 'psql', parse_psql),
]
def parse(format, data):
for name, key, func in PARSERS:
if key == format:
return func(data)
raise ParseException()
def get_parsers():
for name, key, func in PARSERS:
yield (key, name)
|
301f62a80140c319735d37fdab80b66712722de0
|
h2o-bindings/bin/custom/R/gen_isolationforest.py
|
h2o-bindings/bin/custom/R/gen_isolationforest.py
|
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
Disable validation_response_column in R (only Python supported at first)
|
Disable validation_response_column in R (only Python supported at first)
|
Python
|
apache-2.0
|
michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3
|
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
Disable validation_response_column in R (only Python supported at first)
|
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
<commit_before>def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
<commit_msg>Disable validation_response_column in R (only Python supported at first)<commit_after>
|
def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
Disable validation_response_column in R (only Python supported at first)def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
<commit_before>def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
<commit_msg>Disable validation_response_column in R (only Python supported at first)<commit_after>def update_param(name, param):
if name == 'validation_response_column':
param['name'] = None
return param
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
skip_default_set_params_for=['validation_response_column', 'training_frame', 'ignored_columns'],
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
examples="""
library(h2o)
h2o.init()
# Import the cars dataset
f <- "https://s3.amazonaws.com/h2o-public-test-data/smalldata/junit/cars_20mpg.csv"
cars <- h2o.importFile(f)
# Set the predictors
predictors <- c("displacement", "power", "weight", "acceleration", "year")
# Train the IF model
cars_if <- h2o.isolationForest(x = predictors, training_frame = cars,
seed = 1234, stopping_metric = "anomaly_score",
stopping_rounds = 3, stopping_tolerance = 0.1)
"""
)
|
ed0fcadbcfe3316bd5e997a36155b1847504685a
|
dcsh/main.py
|
dcsh/main.py
|
from __future__ import absolute_import, print_function
import os
import sys
import time
import requests
def main():
script = open(sys.argv[1], 'rb').read()
resp = requests.post(
url="http://localhost:5000/run",
data=script,
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url="http://localhost:5000/job/{0}".format(_id))
info = resp.json()
running = info["running"]
cmd = [
'nc',
info["ip"],
str(info["port"])
]
os.execvp("nc", cmd)
|
from __future__ import absolute_import, print_function
import argparse
import json
import os
import subprocess
import sys
import time
import requests
DISPATCHER = "localhost:5000"
def parse_args():
"""
Parse arguments provided at the command line
returns an ordered pair: (script, public_key) where script is a string with
the contents of the script file to be executed and public_key is a string
with the contents of the public key file to be used for authentication
"""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('script', help="Path of the script file to be executed")
parser.add_argument('--key-file', required=False,
help=("Path to the public key file dcsh should use to "
"identify itself -- omitting this parameter means "
"dcsh will extract the required identity from the "
"running ssh agent"))
args = parser.parse_args()
# TODO positional arguments should be collected and passed to the dispatcher
with open(sys.argv[1], 'rb') as f:
script = f.read()
if args.key_file:
public_key = open(args.key_file, 'rb').read()
else:
public_key = subprocess.check_output(["ssh-add", "-L"])
return script, public_key
def main():
"""
Run a shell script on a datacenter node
"""
script, public_key = parse_args()
resp = requests.post(
url="http://%s/run" % DISPATCHER,
data=json.dumps([script, public_key]),
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url=("http://%s/job/{0}" % DISPATCHER).format(_id))
info = resp.json()
running = info["running"]
cmd = [
'ssh',
"%s@%s" % ("root", info["ip"]),
"-p",
str(info["port"]),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-q",
]
os.execvp("ssh", cmd)
|
Encrypt client connection to DC node using SSH
|
Encrypt client connection to DC node using SSH
|
Python
|
apache-2.0
|
mesosphere/dcsh
|
from __future__ import absolute_import, print_function
import os
import sys
import time
import requests
def main():
script = open(sys.argv[1], 'rb').read()
resp = requests.post(
url="http://localhost:5000/run",
data=script,
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url="http://localhost:5000/job/{0}".format(_id))
info = resp.json()
running = info["running"]
cmd = [
'nc',
info["ip"],
str(info["port"])
]
os.execvp("nc", cmd)
Encrypt client connection to DC node using SSH
|
from __future__ import absolute_import, print_function
import argparse
import json
import os
import subprocess
import sys
import time
import requests
DISPATCHER = "localhost:5000"
def parse_args():
"""
Parse arguments provided at the command line
returns an ordered pair: (script, public_key) where script is a string with
the contents of the script file to be executed and public_key is a string
with the contents of the public key file to be used for authentication
"""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('script', help="Path of the script file to be executed")
parser.add_argument('--key-file', required=False,
help=("Path to the public key file dcsh should use to "
"identify itself -- omitting this parameter means "
"dcsh will extract the required identity from the "
"running ssh agent"))
args = parser.parse_args()
# TODO positional arguments should be collected and passed to the dispatcher
with open(sys.argv[1], 'rb') as f:
script = f.read()
if args.key_file:
public_key = open(args.key_file, 'rb').read()
else:
public_key = subprocess.check_output(["ssh-add", "-L"])
return script, public_key
def main():
"""
Run a shell script on a datacenter node
"""
script, public_key = parse_args()
resp = requests.post(
url="http://%s/run" % DISPATCHER,
data=json.dumps([script, public_key]),
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url=("http://%s/job/{0}" % DISPATCHER).format(_id))
info = resp.json()
running = info["running"]
cmd = [
'ssh',
"%s@%s" % ("root", info["ip"]),
"-p",
str(info["port"]),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-q",
]
os.execvp("ssh", cmd)
|
<commit_before>
from __future__ import absolute_import, print_function
import os
import sys
import time
import requests
def main():
script = open(sys.argv[1], 'rb').read()
resp = requests.post(
url="http://localhost:5000/run",
data=script,
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url="http://localhost:5000/job/{0}".format(_id))
info = resp.json()
running = info["running"]
cmd = [
'nc',
info["ip"],
str(info["port"])
]
os.execvp("nc", cmd)
<commit_msg>Encrypt client connection to DC node using SSH<commit_after>
|
from __future__ import absolute_import, print_function
import argparse
import json
import os
import subprocess
import sys
import time
import requests
DISPATCHER = "localhost:5000"
def parse_args():
"""
Parse arguments provided at the command line
returns an ordered pair: (script, public_key) where script is a string with
the contents of the script file to be executed and public_key is a string
with the contents of the public key file to be used for authentication
"""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('script', help="Path of the script file to be executed")
parser.add_argument('--key-file', required=False,
help=("Path to the public key file dcsh should use to "
"identify itself -- omitting this parameter means "
"dcsh will extract the required identity from the "
"running ssh agent"))
args = parser.parse_args()
# TODO positional arguments should be collected and passed to the dispatcher
with open(sys.argv[1], 'rb') as f:
script = f.read()
if args.key_file:
public_key = open(args.key_file, 'rb').read()
else:
public_key = subprocess.check_output(["ssh-add", "-L"])
return script, public_key
def main():
"""
Run a shell script on a datacenter node
"""
script, public_key = parse_args()
resp = requests.post(
url="http://%s/run" % DISPATCHER,
data=json.dumps([script, public_key]),
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url=("http://%s/job/{0}" % DISPATCHER).format(_id))
info = resp.json()
running = info["running"]
cmd = [
'ssh',
"%s@%s" % ("root", info["ip"]),
"-p",
str(info["port"]),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-q",
]
os.execvp("ssh", cmd)
|
from __future__ import absolute_import, print_function
import os
import sys
import time
import requests
def main():
script = open(sys.argv[1], 'rb').read()
resp = requests.post(
url="http://localhost:5000/run",
data=script,
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url="http://localhost:5000/job/{0}".format(_id))
info = resp.json()
running = info["running"]
cmd = [
'nc',
info["ip"],
str(info["port"])
]
os.execvp("nc", cmd)
Encrypt client connection to DC node using SSH
from __future__ import absolute_import, print_function
import argparse
import json
import os
import subprocess
import sys
import time
import requests
DISPATCHER = "localhost:5000"
def parse_args():
"""
Parse arguments provided at the command line
returns an ordered pair: (script, public_key) where script is a string with
the contents of the script file to be executed and public_key is a string
with the contents of the public key file to be used for authentication
"""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('script', help="Path of the script file to be executed")
parser.add_argument('--key-file', required=False,
help=("Path to the public key file dcsh should use to "
"identify itself -- omitting this parameter means "
"dcsh will extract the required identity from the "
"running ssh agent"))
args = parser.parse_args()
# TODO positional arguments should be collected and passed to the dispatcher
with open(sys.argv[1], 'rb') as f:
script = f.read()
if args.key_file:
public_key = open(args.key_file, 'rb').read()
else:
public_key = subprocess.check_output(["ssh-add", "-L"])
return script, public_key
def main():
"""
Run a shell script on a datacenter node
"""
script, public_key = parse_args()
resp = requests.post(
url="http://%s/run" % DISPATCHER,
data=json.dumps([script, public_key]),
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url=("http://%s/job/{0}" % DISPATCHER).format(_id))
info = resp.json()
running = info["running"]
cmd = [
'ssh',
"%s@%s" % ("root", info["ip"]),
"-p",
str(info["port"]),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-q",
]
os.execvp("ssh", cmd)
|
<commit_before>
from __future__ import absolute_import, print_function
import os
import sys
import time
import requests
def main():
script = open(sys.argv[1], 'rb').read()
resp = requests.post(
url="http://localhost:5000/run",
data=script,
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url="http://localhost:5000/job/{0}".format(_id))
info = resp.json()
running = info["running"]
cmd = [
'nc',
info["ip"],
str(info["port"])
]
os.execvp("nc", cmd)
<commit_msg>Encrypt client connection to DC node using SSH<commit_after>
from __future__ import absolute_import, print_function
import argparse
import json
import os
import subprocess
import sys
import time
import requests
DISPATCHER = "localhost:5000"
def parse_args():
"""
Parse arguments provided at the command line
returns an ordered pair: (script, public_key) where script is a string with
the contents of the script file to be executed and public_key is a string
with the contents of the public key file to be used for authentication
"""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('script', help="Path of the script file to be executed")
parser.add_argument('--key-file', required=False,
help=("Path to the public key file dcsh should use to "
"identify itself -- omitting this parameter means "
"dcsh will extract the required identity from the "
"running ssh agent"))
args = parser.parse_args()
# TODO positional arguments should be collected and passed to the dispatcher
with open(sys.argv[1], 'rb') as f:
script = f.read()
if args.key_file:
public_key = open(args.key_file, 'rb').read()
else:
public_key = subprocess.check_output(["ssh-add", "-L"])
return script, public_key
def main():
"""
Run a shell script on a datacenter node
"""
script, public_key = parse_args()
resp = requests.post(
url="http://%s/run" % DISPATCHER,
data=json.dumps([script, public_key]),
headers={'Content-Type': 'application/octet-stream'})
_id = resp.text
info = {}
running = False
while not running:
time.sleep(1)
resp = requests.get(
url=("http://%s/job/{0}" % DISPATCHER).format(_id))
info = resp.json()
running = info["running"]
cmd = [
'ssh',
"%s@%s" % ("root", info["ip"]),
"-p",
str(info["port"]),
"-o", "UserKnownHostsFile=/dev/null",
"-o", "StrictHostKeyChecking=no",
"-q",
]
os.execvp("ssh", cmd)
|
5ae12aa12cef04704ff90071acf098fdfdc7a91a
|
utils/tests/test_pipeline.py
|
utils/tests/test_pipeline.py
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
class PipelineTestCase(TestCase):
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
|
import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
|
Add test for missing staticfiles.json
|
Add test for missing staticfiles.json
|
Python
|
mit
|
bulv1ne/django-utils,bulv1ne/django-utils
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
class PipelineTestCase(TestCase):
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
Add test for missing staticfiles.json
|
import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
|
<commit_before>from io import StringIO
from django.core.management import call_command
from django.test import TestCase
class PipelineTestCase(TestCase):
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
<commit_msg>Add test for missing staticfiles.json<commit_after>
|
import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
class PipelineTestCase(TestCase):
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
Add test for missing staticfiles.jsonimport os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
|
<commit_before>from io import StringIO
from django.core.management import call_command
from django.test import TestCase
class PipelineTestCase(TestCase):
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
<commit_msg>Add test for missing staticfiles.json<commit_after>import os
from io import StringIO
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
class PipelineTestCase(TestCase):
def setUp(self):
file_path = os.path.join(settings.STATIC_ROOT, 'staticfiles.json')
if os.path.isfile(file_path):
os.remove(file_path)
def test_success(self):
call_command('collectstatic', '--noinput', stdout=StringIO())
call_command('clean_staticfilesjson', stdout=StringIO())
def test_missing_staticfilesjson(self):
with self.assertRaises(CommandError):
call_command('clean_staticfilesjson', stdout=StringIO())
|
d0046ae1dfc6c3cd86477180c3175562834c8f41
|
test/test_datapath.py
|
test/test_datapath.py
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data2/')
self.g = hybra.data( 'news', folder = '', terms = ['yle.json'] )
def test_is_changed( self ):
assert( hybra.data_path() == './data2/' )
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data_empty/')
def test_is_changed( self ):
assert( hybra.data_path() == './data_empty/' )
|
Remove data load from test
|
Remove data load from test
|
Python
|
mit
|
HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data2/')
self.g = hybra.data( 'news', folder = '', terms = ['yle.json'] )
def test_is_changed( self ):
assert( hybra.data_path() == './data2/' )
Remove data load from test
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data_empty/')
def test_is_changed( self ):
assert( hybra.data_path() == './data_empty/' )
|
<commit_before># coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data2/')
self.g = hybra.data( 'news', folder = '', terms = ['yle.json'] )
def test_is_changed( self ):
assert( hybra.data_path() == './data2/' )
<commit_msg>Remove data load from test<commit_after>
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data_empty/')
def test_is_changed( self ):
assert( hybra.data_path() == './data_empty/' )
|
# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data2/')
self.g = hybra.data( 'news', folder = '', terms = ['yle.json'] )
def test_is_changed( self ):
assert( hybra.data_path() == './data2/' )
Remove data load from test# coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data_empty/')
def test_is_changed( self ):
assert( hybra.data_path() == './data_empty/' )
|
<commit_before># coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data2/')
self.g = hybra.data( 'news', folder = '', terms = ['yle.json'] )
def test_is_changed( self ):
assert( hybra.data_path() == './data2/' )
<commit_msg>Remove data load from test<commit_after># coding=UTF8
import pytest
from core import hybra
class TestUM:
def setup(self):
hybra.set_data_path('./data_empty/')
def test_is_changed( self ):
assert( hybra.data_path() == './data_empty/' )
|
fd19236999eccd9cbf049bc5b8917cd603974f97
|
centerline/__init__.py
|
centerline/__init__.py
|
from .centerline import Centerline
__all__ = ['Centerline']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .centerline import Centerline
__all__ = ['utils', 'Centerline']
|
Add the utils module to the package index
|
Add the utils module to the package index
|
Python
|
mit
|
fitodic/polygon-centerline,fitodic/centerline,fitodic/centerline
|
from .centerline import Centerline
__all__ = ['Centerline']
Add the utils module to the package index
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .centerline import Centerline
__all__ = ['utils', 'Centerline']
|
<commit_before>from .centerline import Centerline
__all__ = ['Centerline']
<commit_msg>Add the utils module to the package index<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .centerline import Centerline
__all__ = ['utils', 'Centerline']
|
from .centerline import Centerline
__all__ = ['Centerline']
Add the utils module to the package index# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .centerline import Centerline
__all__ = ['utils', 'Centerline']
|
<commit_before>from .centerline import Centerline
__all__ = ['Centerline']
<commit_msg>Add the utils module to the package index<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import utils
from .centerline import Centerline
__all__ = ['utils', 'Centerline']
|
7488988934a5370b372eed0f5245518ab612fa89
|
utils/mail_utils.py
|
utils/mail_utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=25)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST, MAILSERVER_PORT
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=MAILSERVER_PORT)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
Add possibility to locally forward the mail server
|
Add possibility to locally forward the mail server
|
Python
|
mit
|
MarauderXtreme/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,fgrsnau/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,fgrsnau/sipa,fgrsnau/sipa
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=25)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return FalseAdd possibility to locally forward the mail server
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST, MAILSERVER_PORT
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=MAILSERVER_PORT)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=25)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False<commit_msg>Add possibility to locally forward the mail server<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST, MAILSERVER_PORT
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=MAILSERVER_PORT)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=25)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return FalseAdd possibility to locally forward the mail server#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST, MAILSERVER_PORT
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=MAILSERVER_PORT)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=25)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False<commit_msg>Add possibility to locally forward the mail server<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utils for sending emails via SMTP on localhost.
"""
from email.utils import formatdate
from email.mime.text import MIMEText
import smtplib
import textwrap
from config import MAILSERVER_HOST, MAILSERVER_PORT
def wrap_message(message, chars_in_line=80):
"""Wraps an unformatted block of text to 80 characters
"""
return_text = []
for paragraph in message.split('\n'):
lines = textwrap.wrap(paragraph, chars_in_line)
if not lines:
return_text.append('')
else:
return_text.extend(lines)
return '\n'.join(return_text)
def send_mail(sender, receipient, subject, message):
"""Send a MIME text mail from sender to receipient with subject and message.
The message will be wrapped to 80 characters and encoded to UTF8.
Returns False, if sending from localhost:25 fails.
Else returns True.
"""
message = wrap_message(message)
mail = MIMEText(message, _charset='utf-8')
mail['From'] = sender
mail['To'] = receipient
mail['Subject'] = subject
mail['Date'] = formatdate(localtime=True)
try:
smtp = smtplib.SMTP()
smtp.connect(host=MAILSERVER_HOST, port=MAILSERVER_PORT)
smtp.sendmail(sender, receipient, mail.as_string(0))
smtp.close()
return True
except IOError:
# smtp.connect failed to connect
return False
|
4fd5fd238d4c8353e131e5399a184edbd6de159d
|
ibmcnx/test/loadFunction.py
|
ibmcnx/test/loadFunction.py
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
execfile("filesAdmin.py",globdict,locdict)
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
ldict(globals(),locals())
execfile("filesAdmin.py",globdict,locdict)
|
Customize scripts to work with menu
|
Customize scripts to work with menu
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
execfile("filesAdmin.py",globdict,locdict)
Customize scripts to work with menu
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
ldict(globals(),locals())
execfile("filesAdmin.py",globdict,locdict)
|
<commit_before>
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
execfile("filesAdmin.py",globdict,locdict)
<commit_msg>Customize scripts to work with menu<commit_after>
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
ldict(globals(),locals())
execfile("filesAdmin.py",globdict,locdict)
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
execfile("filesAdmin.py",globdict,locdict)
Customize scripts to work with menu
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
ldict(globals(),locals())
execfile("filesAdmin.py",globdict,locdict)
|
<commit_before>
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
execfile("filesAdmin.py",globdict,locdict)
<commit_msg>Customize scripts to work with menu<commit_after>
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
import lotusConnectionsCommonAdmin
globdict = globals()
locdict = locals()
def loadFilesService():
global globdict
global locdict
ldict(globals(),locals())
execfile("filesAdmin.py",globdict,locdict)
|
76252224293f3b54dafa1cf2356dcc9a2991cf39
|
externaldata/adsbedata/RetrieveHistoricalADSBEdata.py
|
externaldata/adsbedata/RetrieveHistoricalADSBEdata.py
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_archive_zip(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists('output'):
os.makedirs('output')
return res_zip.extractall('output')
aircraft_report.get_aircraft_data_from_files(os.path.join(os.getcwd(), 'output'))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
zip_dir = 'output'
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_and_load_archive_data_by_date(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists(zip_dir):
os.makedirs(zip_dir)
res_zip.extractall(zip_dir)
aircraft_report.get_aircraft_data_from_files(os.path.join(os.path.dirname(os.path.realpath(__file__)), zip_dir))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
Refactor output for zip archive and download
|
Refactor output for zip archive and download
|
Python
|
apache-2.0
|
GISDev01/adsbpostgis
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_archive_zip(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists('output'):
os.makedirs('output')
return res_zip.extractall('output')
aircraft_report.get_aircraft_data_from_files(os.path.join(os.getcwd(), 'output'))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
Refactor output for zip archive and download
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
zip_dir = 'output'
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_and_load_archive_data_by_date(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists(zip_dir):
os.makedirs(zip_dir)
res_zip.extractall(zip_dir)
aircraft_report.get_aircraft_data_from_files(os.path.join(os.path.dirname(os.path.realpath(__file__)), zip_dir))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
<commit_before>"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_archive_zip(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists('output'):
os.makedirs('output')
return res_zip.extractall('output')
aircraft_report.get_aircraft_data_from_files(os.path.join(os.getcwd(), 'output'))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
<commit_msg>Refactor output for zip archive and download<commit_after>
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
zip_dir = 'output'
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_and_load_archive_data_by_date(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists(zip_dir):
os.makedirs(zip_dir)
res_zip.extractall(zip_dir)
aircraft_report.get_aircraft_data_from_files(os.path.join(os.path.dirname(os.path.realpath(__file__)), zip_dir))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_archive_zip(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists('output'):
os.makedirs('output')
return res_zip.extractall('output')
aircraft_report.get_aircraft_data_from_files(os.path.join(os.getcwd(), 'output'))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
Refactor output for zip archive and download"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
zip_dir = 'output'
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_and_load_archive_data_by_date(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists(zip_dir):
os.makedirs(zip_dir)
res_zip.extractall(zip_dir)
aircraft_report.get_aircraft_data_from_files(os.path.join(os.path.dirname(os.path.realpath(__file__)), zip_dir))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
<commit_before>"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_archive_zip(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists('output'):
os.makedirs('output')
return res_zip.extractall('output')
aircraft_report.get_aircraft_data_from_files(os.path.join(os.getcwd(), 'output'))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
<commit_msg>Refactor output for zip archive and download<commit_after>"""
Utilities for downloading historical data in a given AOI.
Python 3.5
"""
import requests
import io
import zipfile
import os
from time import strftime
import logging
import yaml
from model import aircraft_report
from model import report_receiver
from utils import postgres as pg_utils
logger = logging.getLogger(__name__)
zip_dir = 'output'
# current_date_stamp = strftime('%y-%m-%d')
current_date_stamp = '2017-10-01'
# temporary config load while testing
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml'), 'r') as yaml_config_file:
local_config = yaml.load(yaml_config_file)
adsbe_download_base_url = local_config['adsbe_url']
def get_and_load_archive_data_by_date(zip_url):
req = requests.get(zip_url)
res_zip = zipfile.ZipFile(io.BytesIO(req.content))
if not os.path.exists(zip_dir):
os.makedirs(zip_dir)
res_zip.extractall(zip_dir)
aircraft_report.get_aircraft_data_from_files(os.path.join(os.path.dirname(os.path.realpath(__file__)), zip_dir))
# dl_url = adsbe_download_base_url + '{}.zip'.format(current_date_stamp)
# print(dl_url)
# get_archive_zip(dl_url)
|
53ee8d6a3fd773b08003ca6e7a371ac787eab622
|
polling_stations/apps/data_collection/management/commands/import_watford.py
|
polling_stations/apps/data_collection/management/commands/import_watford.py
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]])
}
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[2],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]]),
'polling_district_id': record[2]
}
|
Add polling_district_id in Watford import script
|
Add polling_district_id in Watford import script
|
Python
|
bsd-3-clause
|
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]])
}
Add polling_district_id in Watford import script
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[2],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]]),
'polling_district_id': record[2]
}
|
<commit_before>"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]])
}
<commit_msg>Add polling_district_id in Watford import script<commit_after>
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[2],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]]),
'polling_district_id': record[2]
}
|
"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]])
}
Add polling_district_id in Watford import script"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[2],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]]),
'polling_district_id': record[2]
}
|
<commit_before>"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]])
}
<commit_msg>Add polling_district_id in Watford import script<commit_after>"""
Import Watford
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Watford
"""
council_id = 'E07000103'
districts_name = 'Watford_Polling_Districts'
stations_name = 'Watford_Polling_Stations.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[2],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'postcode' : record[5],
'address' : "\n".join([record[3], record[4]]),
'polling_district_id': record[2]
}
|
761b0a959882499b629d9bc3fbd1b971beaf66a5
|
mymodule/blueprints/api_v1/__init__.py
|
mymodule/blueprints/api_v1/__init__.py
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from mymodule.blueprints.api_v1 import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from . import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
Simplify import of api_v1 subpackage.
|
Simplify import of api_v1 subpackage.
|
Python
|
mit
|
eduardobmc/flask-test
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from mymodule.blueprints.api_v1 import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
Simplify import of api_v1 subpackage.
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from . import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
<commit_before>import flask
from flask import current_app as app
from werkzeug import exceptions
from mymodule.blueprints.api_v1 import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
<commit_msg>Simplify import of api_v1 subpackage.<commit_after>
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from . import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
import flask
from flask import current_app as app
from werkzeug import exceptions
from mymodule.blueprints.api_v1 import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
Simplify import of api_v1 subpackage.import flask
from flask import current_app as app
from werkzeug import exceptions
from . import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
<commit_before>import flask
from flask import current_app as app
from werkzeug import exceptions
from mymodule.blueprints.api_v1 import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
<commit_msg>Simplify import of api_v1 subpackage.<commit_after>import flask
from flask import current_app as app
from werkzeug import exceptions
from . import upload
blueprint = flask.Blueprint('v1', __name__, url_prefix='/api/v1')
blueprint.add_url_rule('/upload', view_func=upload.post, methods=['POST'])
@blueprint.route('/test')
def index():
app.logger.info('TEST %d', 1)
return flask.jsonify(key='value')
@blueprint.route('/error')
def error():
raise Exception('oh, oh!')
@blueprint.route('/keys')
def keys():
code = flask.request.args['code']
return flask.jsonify(key=code)
@blueprint.route('/config')
def config():
ns = app.config.get_namespace('MYMODULE_')
return flask.jsonify(ns)
@blueprint.route('/crud')
def crud_get():
return flask.jsonify(method=flask.request.method)
@blueprint.route('/crud', methods=['POST'])
def crud_post():
payload = flask.request.get_json()
if payload is None:
raise exceptions.BadRequest('no post data')
return flask.jsonify(args=payload)
@blueprint.errorhandler(exceptions.BadRequestKeyError)
def bad_request_key_error(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
|
48c20cdd866299a8b7495038ecb7ec9ea831657e
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
Create documentation of DataSource Settings
|
: Create documentation of DataSource Settings
Task-Url:
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url:
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url: ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
0b4a210031af065f9e8c98d98242283660e2fe7e
|
runtests.py
|
runtests.py
|
# python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
# python setup.py test
# or
# python runtests.py
import sys
from django import VERSION as django_version
from django.conf import settings
APP = 'djrill'
ADMIN = 'django.contrib.admin'
if django_version >= (1, 7):
ADMIN = 'django.contrib.admin.apps.SimpleAdminConfig'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
ADMIN,
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Test against SimpleAdminConfig on Django>=1.7.
|
Test against SimpleAdminConfig on Django>=1.7.
In test cases, use the same admin setup we recommend to users.
|
Python
|
bsd-3-clause
|
idlweb/Djrill,barseghyanartur/Djrill,janusnic/Djrill,brack3t/Djrill,janusnic/Djrill,idlweb/Djrill,barseghyanartur/Djrill
|
# python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Test against SimpleAdminConfig on Django>=1.7.
In test cases, use the same admin setup we recommend to users.
|
# python setup.py test
# or
# python runtests.py
import sys
from django import VERSION as django_version
from django.conf import settings
APP = 'djrill'
ADMIN = 'django.contrib.admin'
if django_version >= (1, 7):
ADMIN = 'django.contrib.admin.apps.SimpleAdminConfig'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
ADMIN,
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
<commit_before># python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Test against SimpleAdminConfig on Django>=1.7.
In test cases, use the same admin setup we recommend to users.<commit_after>
|
# python setup.py test
# or
# python runtests.py
import sys
from django import VERSION as django_version
from django.conf import settings
APP = 'djrill'
ADMIN = 'django.contrib.admin'
if django_version >= (1, 7):
ADMIN = 'django.contrib.admin.apps.SimpleAdminConfig'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
ADMIN,
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
# python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
Test against SimpleAdminConfig on Django>=1.7.
In test cases, use the same admin setup we recommend to users.# python setup.py test
# or
# python runtests.py
import sys
from django import VERSION as django_version
from django.conf import settings
APP = 'djrill'
ADMIN = 'django.contrib.admin'
if django_version >= (1, 7):
ADMIN = 'django.contrib.admin.apps.SimpleAdminConfig'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
ADMIN,
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
<commit_before># python setup.py test
# or
# python runtests.py
import sys
from django.conf import settings
APP = 'djrill'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Test against SimpleAdminConfig on Django>=1.7.
In test cases, use the same admin setup we recommend to users.<commit_after># python setup.py test
# or
# python runtests.py
import sys
from django import VERSION as django_version
from django.conf import settings
APP = 'djrill'
ADMIN = 'django.contrib.admin'
if django_version >= (1, 7):
ADMIN = 'django.contrib.admin.apps.SimpleAdminConfig'
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ROOT_URLCONF=APP+'.urls',
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
ADMIN,
APP,
),
MIDDLEWARE_CLASSES=(
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
)
try:
# Django 1.7+ initialize app registry
from django import setup
setup()
except ImportError:
pass
try:
from django.test.runner import DiscoverRunner as TestRunner # Django 1.6+
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as TestRunner # Django -1.5
def runtests():
test_runner = TestRunner(verbosity=1)
failures = test_runner.run_tests([APP])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
cc2e96a6030840c5221a2cce5042bedb69f8fc55
|
templates/openwisp2/urls.py
|
templates/openwisp2/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
admin.site.site_url = None
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
Hide "view site" link in admin
|
Hide "view site" link in admin
|
Python
|
bsd-3-clause
|
nemesisdesign/ansible-openwisp2,openwisp/ansible-openwisp2,ritwickdsouza/ansible-openwisp2
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
Hide "view site" link in admin
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
admin.site.site_url = None
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Hide "view site" link in admin<commit_after>
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
admin.site.site_url = None
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
Hide "view site" link in adminfrom django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
admin.site.site_url = None
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Hide "view site" link in admin<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
admin.site.site_url = None
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django_netjsonconfig.controller.urls', namespace='controller')),
]
urlpatterns += staticfiles_urlpatterns()
|
dd19919923b5265b913089b46cbcb60d4bec0841
|
tests/unit/common/storage/test_utils.py
|
tests/unit/common/storage/test_utils.py
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://memory'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://:memory:'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
Fix assertion for Testutils to check on sqlite://:memory:
|
Fix assertion for Testutils to check on sqlite://:memory:
The current uses sqlite://memory to test a connection. This is a faulty path to
memory. Corrent path is sqlite://:memory:
Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2
|
Python
|
apache-2.0
|
rackerlabs/marconi,openstack/zaqar,openstack/zaqar,openstack/zaqar,openstack/zaqar
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://memory'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
Fix assertion for Testutils to check on sqlite://:memory:
The current uses sqlite://memory to test a connection. This is a faulty path to
memory. Corrent path is sqlite://:memory:
Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://:memory:'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
<commit_before># Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://memory'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
<commit_msg>Fix assertion for Testutils to check on sqlite://:memory:
The current uses sqlite://memory to test a connection. This is a faulty path to
memory. Corrent path is sqlite://:memory:
Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2<commit_after>
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://:memory:'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://memory'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
Fix assertion for Testutils to check on sqlite://:memory:
The current uses sqlite://memory to test a connection. This is a faulty path to
memory. Corrent path is sqlite://:memory:
Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2# Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://:memory:'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
<commit_before># Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://memory'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
<commit_msg>Fix assertion for Testutils to check on sqlite://:memory:
The current uses sqlite://memory to test a connection. This is a faulty path to
memory. Corrent path is sqlite://:memory:
Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2<commit_after># Copyright (c) 2014 Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ddt
from marconi.queues.storage import utils
from marconi import tests as testing
@ddt.ddt
class TestUtils(testing.TestBase):
@testing.requires_mongodb
def test_can_connect_suceeds_if_good_uri_mongo(self):
self.assertTrue(utils.can_connect('mongodb://localhost:27017'))
def test_can_connect_suceeds_if_good_uri_sqlite(self):
self.assertTrue(utils.can_connect('sqlite://:memory:'))
@ddt.data(
'mongodb://localhost:27018', # wrong port
'localhost:27017', # missing scheme
'redis://localhost:6379' # not supported with default install
)
@testing.requires_mongodb
def test_can_connect_fails_if_bad_uri(self, uri):
self.assertFalse(utils.can_connect(uri))
|
d0dfd2c9055092f64e396177275dbe285ad41efb
|
blo/DBControl.py
|
blo/DBControl.py
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
def create_tables(self):
self.db_conn.execute("""CREATE TABLE IF NOT EXISTS Articles ("
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT,
digest TEXT UNIQUE,
updatedate TEXT)""")
self.db_conn.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )")
self.db_conn.commit()
def close_connect(self):
self.db_conn.close()
|
Add Create tables method and close db connection method.
|
Add Create tables method and close db connection method.
create_table method create article table (if not exists in db) and virtual table for full text search.
|
Python
|
mit
|
10nin/blo,10nin/blo
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
Add Create tables method and close db connection method.
create_table method create article table (if not exists in db) and virtual table for full text search.
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
def create_tables(self):
self.db_conn.execute("""CREATE TABLE IF NOT EXISTS Articles ("
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT,
digest TEXT UNIQUE,
updatedate TEXT)""")
self.db_conn.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )")
self.db_conn.commit()
def close_connect(self):
self.db_conn.close()
|
<commit_before>import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
<commit_msg>Add Create tables method and close db connection method.
create_table method create article table (if not exists in db) and virtual table for full text search.<commit_after>
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
def create_tables(self):
self.db_conn.execute("""CREATE TABLE IF NOT EXISTS Articles ("
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT,
digest TEXT UNIQUE,
updatedate TEXT)""")
self.db_conn.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )")
self.db_conn.commit()
def close_connect(self):
self.db_conn.close()
|
import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
Add Create tables method and close db connection method.
create_table method create article table (if not exists in db) and virtual table for full text search.import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
def create_tables(self):
self.db_conn.execute("""CREATE TABLE IF NOT EXISTS Articles ("
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT,
digest TEXT UNIQUE,
updatedate TEXT)""")
self.db_conn.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )")
self.db_conn.commit()
def close_connect(self):
self.db_conn.close()
|
<commit_before>import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
<commit_msg>Add Create tables method and close db connection method.
create_table method create article table (if not exists in db) and virtual table for full text search.<commit_after>import sqlite3
class DBControl:
def __init__(self, db_name=":memory:"):
self.db_conn = sqlite3.connect(db_name)
def create_tables(self):
self.db_conn.execute("""CREATE TABLE IF NOT EXISTS Articles ("
id INTEGER PRIMARY KEY AUTOINCREMENT,
text TEXT,
digest TEXT UNIQUE,
updatedate TEXT)""")
self.db_conn.execute("CREATE VIRTUAL TABLE Articles_fts USING fts4( words TEXT )")
self.db_conn.commit()
def close_connect(self):
self.db_conn.close()
|
b2ed8de7302cbea0a80b87f3dfe370ca0a60d75a
|
kawasemi/backends/github.py
|
kawasemi/backends/github.py
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
Set Accept header explicitly in GitHubChannel
|
Set Accept header explicitly in GitHubChannel
|
Python
|
mit
|
ymyzk/kawasemi,ymyzk/django-channels
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
Set Accept header explicitly in GitHubChannel
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
<commit_before># -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
<commit_msg>Set Accept header explicitly in GitHubChannel<commit_after>
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
Set Accept header explicitly in GitHubChannel# -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
<commit_before># -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
<commit_msg>Set Accept header explicitly in GitHubChannel<commit_after># -*- coding: utf-8 -*-
import json
import requests
from .base import BaseChannel
from ..exceptions import HttpError
class GitHubChannel(BaseChannel):
def __init__(self, token, owner, repository,
base_url="https://api.github.com", *args, **kwargs):
self.token = token
self.url = base_url + "/repos/" + owner + "/" + repository + "/issues"
def send(self, message, fail_silently=False, options=None):
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": "token " + self.token,
"Content-Type": "application/json"
}
payload = {
"title": message
}
self._set_payload_from_options(payload, options, "github", [
"body", "milestone", "labels", "assignees"])
try:
response = requests.post(self.url,
headers=headers,
data=json.dumps(payload))
if response.status_code != requests.codes.created:
raise HttpError(response.status_code, response.text)
except:
if not fail_silently:
raise
|
c04c9dcfdc2be2368ac2c705ce4852039573767b
|
datac/main.py
|
datac/main.py
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(params, abscissae, abscissa_name):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(abscissae, abscissa_name, params = {}):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
Reorder arg list, set default for static params
|
Reorder arg list, set default for static params
|
Python
|
mit
|
jrsmith3/datac,jrsmith3/datac
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(params, abscissae, abscissa_name):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
Reorder arg list, set default for static params
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(abscissae, abscissa_name, params = {}):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
<commit_before># -*- coding: utf-8 -*-
import copy
def init_abscissa(params, abscissae, abscissa_name):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
<commit_msg>Reorder arg list, set default for static params<commit_after>
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(abscissae, abscissa_name, params = {}):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
# -*- coding: utf-8 -*-
import copy
def init_abscissa(params, abscissae, abscissa_name):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
Reorder arg list, set default for static params# -*- coding: utf-8 -*-
import copy
def init_abscissa(abscissae, abscissa_name, params = {}):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
<commit_before># -*- coding: utf-8 -*-
import copy
def init_abscissa(params, abscissae, abscissa_name):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
<commit_msg>Reorder arg list, set default for static params<commit_after># -*- coding: utf-8 -*-
import copy
def init_abscissa(abscissae, abscissa_name, params = {}):
"""
List of dicts to initialize object w/ calc method
This method generates a list of dicts; each dict is sufficient to initialize an object featuring a calculator method of interest. This list can be thought of as the abscissae of a set of data. Each dict will contain data which remains constant for each calculation, but it nonetheless required to initialize the object. Each dict will also contain a datum which is the abscissa for the calculation and is also required to initialize the object.
:param dict params: Static parameters required to initialize the object featuring the ordinate calculator method.
:param list abscissae: Independent variable also required to initialize object featuring the ordinate calculator method.
:param str abscissa_name: Dictionary key for the abscissa name.
"""
dict_list = []
for abscissa in abscissae:
param_dict = copy.copy(params)
param_dict[abscissa_name] = abscissa
param_dict["abscissa_name"] = abscissa_name
dict_list.append(param_dict)
return dict_list
|
746510dc0b939fe11a2b025805678a0829cf814a
|
handler/minion_server.py
|
handler/minion_server.py
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
"""Start a worker.
Message format:
{
'image': 'image name'
'numprocs': number of workers,
'args': 'extra arguments for "docker run -d image ..."'
}
"""
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
Document message format for minion server
|
Document message format for minion server
|
Python
|
mit
|
waltermoreira/adama-minion
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()Document message format for minion server
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
"""Start a worker.
Message format:
{
'image': 'image name'
'numprocs': number of workers,
'args': 'extra arguments for "docker run -d image ..."'
}
"""
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()<commit_msg>Document message format for minion server<commit_after>
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
"""Start a worker.
Message format:
{
'image': 'image name'
'numprocs': number of workers,
'args': 'extra arguments for "docker run -d image ..."'
}
"""
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()Document message format for minion server#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
"""Start a worker.
Message format:
{
'image': 'image name'
'numprocs': number of workers,
'args': 'extra arguments for "docker run -d image ..."'
}
"""
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()<commit_msg>Document message format for minion server<commit_after>#!/usr/bin/env python
import server
import supervisor
class MinionServer(server.Server):
def __init__(self, ip, port):
super(MinionServer, self).__init__(ip, port)
def handle(self, data):
"""Start a worker.
Message format:
{
'image': 'image name'
'numprocs': number of workers,
'args': 'extra arguments for "docker run -d image ..."'
}
"""
supervisor.start(
'worker.conf',
target='worker_{}'.format(data['image']),
image=data['image'],
numprocs=data.get('numprocs', 1),
args=data.get('args', ''))
return {'status': 'ok'}
def main():
server = MinionServer('*', 1234)
server.start()
server.join()
if __name__ == '__main__':
main()
|
fbb78c22de50274c2fa937929799259042810bac
|
src/sas/sasview/__init__.py
|
src/sas/sasview/__init__.py
|
from distutils.version import StrictVersion
__version__ = "5.0"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
from distutils.version import StrictVersion
__version__ = "5.0.1"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
Update dot version number to enable separate installation space. GH-1412
|
Update dot version number to enable separate installation space.
GH-1412
|
Python
|
bsd-3-clause
|
SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview
|
from distutils.version import StrictVersion
__version__ = "5.0"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"Update dot version number to enable separate installation space.
GH-1412
|
from distutils.version import StrictVersion
__version__ = "5.0.1"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
<commit_before>from distutils.version import StrictVersion
__version__ = "5.0"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"<commit_msg>Update dot version number to enable separate installation space.
GH-1412<commit_after>
|
from distutils.version import StrictVersion
__version__ = "5.0.1"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
from distutils.version import StrictVersion
__version__ = "5.0"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"Update dot version number to enable separate installation space.
GH-1412from distutils.version import StrictVersion
__version__ = "5.0.1"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
<commit_before>from distutils.version import StrictVersion
__version__ = "5.0"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"<commit_msg>Update dot version number to enable separate installation space.
GH-1412<commit_after>from distutils.version import StrictVersion
__version__ = "5.0.1"
StrictVersion(__version__)
__DOI__ = "Zenodo, DOI:10.5281/zenodo.3011184"
__release_date__ = "2019"
__build__ = "GIT_COMMIT"
|
75eed75ee9c70368100d7ce8f3fdcc8169912062
|
lfc/context_processors.py
|
lfc/context_processors.py
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : default_language == current_language,
}
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
is_default_language = default_language == current_language
if current_language == "0" or is_default_language:
link_language = ""
else:
link_language = current_language
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : is_default_language,
"LINK_LANGUAGE" : link_language,
}
|
Return correct language for using within links
|
Improvement: Return correct language for using within links
|
Python
|
bsd-3-clause
|
natea/django-lfc,natea/django-lfc
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : default_language == current_language,
}
Improvement: Return correct language for using within links
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
is_default_language = default_language == current_language
if current_language == "0" or is_default_language:
link_language = ""
else:
link_language = current_language
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : is_default_language,
"LINK_LANGUAGE" : link_language,
}
|
<commit_before># lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : default_language == current_language,
}
<commit_msg>Improvement: Return correct language for using within links<commit_after>
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
is_default_language = default_language == current_language
if current_language == "0" or is_default_language:
link_language = ""
else:
link_language = current_language
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : is_default_language,
"LINK_LANGUAGE" : link_language,
}
|
# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : default_language == current_language,
}
Improvement: Return correct language for using within links# lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
is_default_language = default_language == current_language
if current_language == "0" or is_default_language:
link_language = ""
else:
link_language = current_language
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : is_default_language,
"LINK_LANGUAGE" : link_language,
}
|
<commit_before># lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : default_language == current_language,
}
<commit_msg>Improvement: Return correct language for using within links<commit_after># lfc imports
import lfc.utils
from django.conf import settings
from django.utils import translation
def main(request):
"""context processor for LFC.
"""
current_language = translation.get_language()
default_language = settings.LANGUAGE_CODE
is_default_language = default_language == current_language
if current_language == "0" or is_default_language:
link_language = ""
else:
link_language = current_language
return {
"PORTAL" : lfc.utils.get_portal(),
"LFC_MULTILANGUAGE" : settings.LFC_MULTILANGUAGE,
"DEFAULT_LANGUAGE" : default_language,
"CURRENT_LANGUAGE" : current_language,
"IS_DEFAULT_LANGUAGE" : is_default_language,
"LINK_LANGUAGE" : link_language,
}
|
fb34c787c1d0050436698929d200202329a27bf3
|
app/settings.py
|
app/settings.py
|
import os
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
# postgres://rasmessage:rasmessage@localhost:5432/messages
|
import os
''' This file is the main configuration for the Secure Messaging Service.
It contains a full default configuration
All configuration may be overridden by setting the appropriate environment variable name. '''
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
|
Add docstring to main configuration.
|
Add docstring to main configuration.
|
Python
|
mit
|
qateam123/secure-messaging-api
|
import os
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
# postgres://rasmessage:rasmessage@localhost:5432/messages
Add docstring to main configuration.
|
import os
''' This file is the main configuration for the Secure Messaging Service.
It contains a full default configuration
All configuration may be overridden by setting the appropriate environment variable name. '''
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
|
<commit_before>import os
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
# postgres://rasmessage:rasmessage@localhost:5432/messages
<commit_msg>Add docstring to main configuration.<commit_after>
|
import os
''' This file is the main configuration for the Secure Messaging Service.
It contains a full default configuration
All configuration may be overridden by setting the appropriate environment variable name. '''
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
|
import os
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
# postgres://rasmessage:rasmessage@localhost:5432/messages
Add docstring to main configuration.import os
''' This file is the main configuration for the Secure Messaging Service.
It contains a full default configuration
All configuration may be overridden by setting the appropriate environment variable name. '''
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
|
<commit_before>import os
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
# postgres://rasmessage:rasmessage@localhost:5432/messages
<commit_msg>Add docstring to main configuration.<commit_after>import os
''' This file is the main configuration for the Secure Messaging Service.
It contains a full default configuration
All configuration may be overridden by setting the appropriate environment variable name. '''
SECURE_MESSAGING_DATABASE_URL = os.getenv('SECURE_MESSAGING_DATABASE_URL', 'sqlite:////tmp/messages.db')
|
8570efd42f35b89d9a97d9aa5a5aa47765cd21f6
|
diary/logthread.py
|
diary/logthread.py
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class ElemThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, elem, name="Elementary Logger"):
"""Construct a thread for logging
:param elem: An Elementary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.elem = elem
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.elem.write(self.queue.get())
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class DiaryThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, diary, name="Diary Logger"):
"""Construct a thread for logging
:param diary: An Diary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.diary = diary
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.diary.write(self.queue.get())
|
Make last changes over to diary name
|
Make last changes over to diary name
|
Python
|
mit
|
GreenVars/diary
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class ElemThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, elem, name="Elementary Logger"):
"""Construct a thread for logging
:param elem: An Elementary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.elem = elem
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.elem.write(self.queue.get())
Make last changes over to diary name
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class DiaryThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, diary, name="Diary Logger"):
"""Construct a thread for logging
:param diary: An Diary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.diary = diary
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.diary.write(self.queue.get())
|
<commit_before>from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class ElemThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, elem, name="Elementary Logger"):
"""Construct a thread for logging
:param elem: An Elementary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.elem = elem
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.elem.write(self.queue.get())
<commit_msg>Make last changes over to diary name<commit_after>
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class DiaryThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, diary, name="Diary Logger"):
"""Construct a thread for logging
:param diary: An Diary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.diary = diary
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.diary.write(self.queue.get())
|
from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class ElemThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, elem, name="Elementary Logger"):
"""Construct a thread for logging
:param elem: An Elementary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.elem = elem
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.elem.write(self.queue.get())
Make last changes over to diary namefrom threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class DiaryThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, diary, name="Diary Logger"):
"""Construct a thread for logging
:param diary: An Diary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.diary = diary
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.diary.write(self.queue.get())
|
<commit_before>from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class ElemThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, elem, name="Elementary Logger"):
"""Construct a thread for logging
:param elem: An Elementary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.elem = elem
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.elem.write(self.queue.get())
<commit_msg>Make last changes over to diary name<commit_after>from threading import Thread
try:
from queue import Queue
except ImportError: # python 2
from Queue import Queue
class DiaryThread(Thread):
"""A thread for logging as to not disrupt the logged application"""
def __init__(self, diary, name="Diary Logger"):
"""Construct a thread for logging
:param diary: An Diary instance to handle logging
:param name: A string to represent this thread
"""
Thread.__init__(self, name=name)
self.daemon = True # py2 constructor requires explicit
self.diary = diary
self.queue = Queue()
self.start()
def add(self, event):
"""Add a logged event to queue for logging"""
self.queue.put(event)
def run(self):
"""Main for thread to run"""
while True:
self.diary.write(self.queue.get())
|
ac8ec95b5d9f3f0bca2f6c1e367a08a5fd0ee787
|
bdp/runtests.py
|
bdp/runtests.py
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
for project in projects:
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
if __name__ == "__main__":
run()
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
projects_root = os.getcwd()
for project in projects:
os.chdir(project)
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
os.chdir(projects_root)
if __name__ == "__main__":
run()
|
Change to correct directory when testing
|
Change to correct directory when testing
|
Python
|
apache-2.0
|
telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
for project in projects:
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
if __name__ == "__main__":
run()
Change to correct directory when testing
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
projects_root = os.getcwd()
for project in projects:
os.chdir(project)
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
os.chdir(projects_root)
if __name__ == "__main__":
run()
|
<commit_before>"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
for project in projects:
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
if __name__ == "__main__":
run()
<commit_msg>Change to correct directory when testing<commit_after>
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
projects_root = os.getcwd()
for project in projects:
os.chdir(project)
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
os.chdir(projects_root)
if __name__ == "__main__":
run()
|
"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
for project in projects:
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
if __name__ == "__main__":
run()
Change to correct directory when testing"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
projects_root = os.getcwd()
for project in projects:
os.chdir(project)
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
os.chdir(projects_root)
if __name__ == "__main__":
run()
|
<commit_before>"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
for project in projects:
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
if __name__ == "__main__":
run()
<commit_msg>Change to correct directory when testing<commit_after>"""
Module runtests.py -
Runs all unittests configured in different buildout projects.
"""
import os
import subprocess, shlex
PYPROJECTS = 'platform/frontend'
def run():
"""
Main eintry point
"""
projects = PYPROJECTS.split(',')
projects_root = os.getcwd()
for project in projects:
os.chdir(project)
cmd = shlex.split('bin/django jenkins')
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT)
proc.communicate()
if proc.returncode != 0:
print "Error while testing"
else:
print "Success in testing"
os.chdir(projects_root)
if __name__ == "__main__":
run()
|
43abdf7610ba1ca16eb82f282d754d8b4033b834
|
test/symbols/show_glyphs.py
|
test/symbols/show_glyphs.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e600"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f280"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
Extend vim-devicons display script range
|
Extend vim-devicons display script range
|
Python
|
mit
|
mkofinas/prompt-support,mkofinas/prompt-support
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e600"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f280"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
Extend vim-devicons display script range
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e600"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f280"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
<commit_msg>Extend vim-devicons display script range<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e600"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f280"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
Extend vim-devicons display script range#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e600"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f280"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
<commit_msg>Extend vim-devicons display script range<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
|
4f20f940102d353b6200f44ae5aaa51a85b89aba
|
pxe_manager/tests/test_pxemanager.py
|
pxe_manager/tests/test_pxemanager.py
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
client = ResourceManagerClient()
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
host_client = ResourceManagerClient()
pub_ip_client = ResourceManagerClient(resource_type='public-addresses')
priv_ip_client = ResourceManagerClient(resource_type='private-addresses')
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, host_client, pub_ip_client, priv_ip_client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
Update unit test with new pxemanager signature
|
Update unit test with new pxemanager signature
|
Python
|
apache-2.0
|
tbeckham/DeploymentManager,tbeckham/DeploymentManager,ccassler/DeploymentManager,tbeckham/DeploymentManager,ccassler/DeploymentManager,ccassler/DeploymentManager
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
client = ResourceManagerClient()
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
Update unit test with new pxemanager signature
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
host_client = ResourceManagerClient()
pub_ip_client = ResourceManagerClient(resource_type='public-addresses')
priv_ip_client = ResourceManagerClient(resource_type='private-addresses')
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, host_client, pub_ip_client, priv_ip_client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
<commit_before>from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
client = ResourceManagerClient()
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
<commit_msg>Update unit test with new pxemanager signature<commit_after>
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
host_client = ResourceManagerClient()
pub_ip_client = ResourceManagerClient(resource_type='public-addresses')
priv_ip_client = ResourceManagerClient(resource_type='private-addresses')
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, host_client, pub_ip_client, priv_ip_client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
client = ResourceManagerClient()
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
Update unit test with new pxemanager signaturefrom pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
host_client = ResourceManagerClient()
pub_ip_client = ResourceManagerClient(resource_type='public-addresses')
priv_ip_client = ResourceManagerClient(resource_type='private-addresses')
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, host_client, pub_ip_client, priv_ip_client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
<commit_before>from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
client = ResourceManagerClient()
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
<commit_msg>Update unit test with new pxemanager signature<commit_after>from pxe_manager.pxemanager import PxeManager
from resource_manager.client import ResourceManagerClient
import httpretty
@httpretty.activate
def test_defaults():
host_client = ResourceManagerClient()
pub_ip_client = ResourceManagerClient(resource_type='public-addresses')
priv_ip_client = ResourceManagerClient(resource_type='private-addresses')
cobbler_url = "http://cobbler.example.com/cobbler_api"
cobbler_user = "user"
cobbler_password = "password"
response_body = '''<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Some Value</string></value>
</param>
</params>
</methodResponse>
'''
distro_map = {'esxi51': 'qa-vmwareesxi51u0-x86_64',
'esxi50': 'qa-vmwareesxi50u1-x86_64',
'centos': 'qa-centos6-x86_64-striped-drives',
'rhel': 'qa-rhel6u5-x86_64-striped-drives'}
httpretty.register_uri(httpretty.POST, cobbler_url,
body=response_body)
pxe_manager = PxeManager(cobbler_url, cobbler_user, cobbler_password, host_client, pub_ip_client, priv_ip_client)
for key, value in distro_map.iteritems():
assert pxe_manager.distro[key] == value
|
e307d4e5586ac08559cf607e484e55d70bd4b0ae
|
tests/people/test_models.py
|
tests/people/test_models.py
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
@pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
@pytest.mark.django_db
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
pytestmark = pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
Mark all of these tests too.
|
Mark all of these tests too.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
@pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
@pytest.mark.django_db
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
Mark all of these tests too.
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
pytestmark = pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
<commit_before>import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
@pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
@pytest.mark.django_db
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
<commit_msg>Mark all of these tests too.<commit_after>
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
pytestmark = pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
@pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
@pytest.mark.django_db
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
Mark all of these tests too.import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
pytestmark = pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
<commit_before>import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
@pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
@pytest.mark.django_db
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
@pytest.mark.django_db
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
@pytest.mark.django_db
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
<commit_msg>Mark all of these tests too.<commit_after>import pytest
from components.people.models import Group, Idol, Membership, Staff
from components.people.factories import (GroupFactory, IdolFactory,
MembershipFactory, StaffFactory)
pytestmark = pytest.mark.django_db
def test_group_factory():
factory = GroupFactory()
assert isinstance(factory, Group)
assert 'group' in factory.romanized_name
def test_group_get_absolute_url(client):
factory = GroupFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_idol_factory():
factory = IdolFactory()
assert isinstance(factory, Idol)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_idol_get_absolute_url(client):
factory = IdolFactory()
response = client.get(factory.get_absolute_url())
assert response.status_code == 200
def test_staff_factory():
factory = StaffFactory()
assert isinstance(factory, Staff)
assert 'family' in factory.romanized_family_name
assert 'given' in factory.romanized_given_name
def test_membership_factory():
factory = MembershipFactory()
assert isinstance(factory, Membership)
assert isinstance(factory.group, Group)
assert isinstance(factory.idol, Idol)
|
5cd5f89c7973fc3e5e3e7b4aabc4992050c3643f
|
tests/selenium/test_home.py
|
tests/selenium/test_home.py
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*chrome", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*firefox", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
Change selenium browser to firefox.
|
Change selenium browser to firefox.
|
Python
|
agpl-3.0
|
watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*chrome", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
Change selenium browser to firefox.
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*firefox", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*chrome", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
<commit_msg>Change selenium browser to firefox.<commit_after>
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*firefox", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*chrome", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
Change selenium browser to firefox."""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*firefox", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*chrome", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
<commit_msg>Change selenium browser to firefox.<commit_after>"""
Module to hold basic home Selenium tests.
"""
from selenium import selenium
import unittest, time, re, os, sys, subprocess
def rel_to_abs(path):
"""
Function to take relative path and make absolute
"""
current_dir = os.path.abspath(os.path.dirname(__file__))
return os.path.join(current_dir, path)
class test_home(unittest.TestCase):
"""
Tests that the home page can be reached and home link can be used.
"""
def setUp(self):
self.verificationErrors = []
# Database
# Start application
config = rel_to_abs("../../lighttpd.conf")
self.server_proc = subprocess.Popen(["lighttpd -D -f %s" % config], shell = True)
# Start selenium
self.selenium = selenium("localhost", 4443, "*firefox", "http://localhost:8080/")
self.selenium.start()
def test_test_home(self):
"""
Actual test that checks the home page can be reached and home link can be used.
"""
sel = self.selenium
sel.open("/")
sel.click("link=Change by Us NYC")
sel.wait_for_page_to_load("20000")
def tearDown(self):
self.selenium.stop()
self.assertEqual([], self.verificationErrors)
self.server_proc.kill()
if __name__ == "__main__":
unittest.main()
|
61c4c634807b4adfe9e08152543eba396e256ab9
|
conllu/tree_helpers.py
|
conllu/tree_helpers.py
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format(
deprel=node.data["deprel"],
form=node.data["form"],
tag=node.data["upostag"],
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1)
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
relevant_data = node.data.copy()
map(lambda x: relevant_data.pop(x, None), exclude_fields)
node_repr = " ".join([
"{key}:{value}".format(key=key, value=value)
for key, value in relevant_data.items()
])
print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format(
deprel=node.data["deprel"],
node_repr=node_repr,
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
|
Generalize print_tree to work with different number of columns.
|
Generalize print_tree to work with different number of columns.
|
Python
|
mit
|
EmilStenstrom/conllu
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format(
deprel=node.data["deprel"],
form=node.data["form"],
tag=node.data["upostag"],
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1)
Generalize print_tree to work with different number of columns.
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
relevant_data = node.data.copy()
map(lambda x: relevant_data.pop(x, None), exclude_fields)
node_repr = " ".join([
"{key}:{value}".format(key=key, value=value)
for key, value in relevant_data.items()
])
print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format(
deprel=node.data["deprel"],
node_repr=node_repr,
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
|
<commit_before>from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format(
deprel=node.data["deprel"],
form=node.data["form"],
tag=node.data["upostag"],
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1)
<commit_msg>Generalize print_tree to work with different number of columns.<commit_after>
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
relevant_data = node.data.copy()
map(lambda x: relevant_data.pop(x, None), exclude_fields)
node_repr = " ".join([
"{key}:{value}".format(key=key, value=value)
for key, value in relevant_data.items()
])
print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format(
deprel=node.data["deprel"],
node_repr=node_repr,
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
|
from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format(
deprel=node.data["deprel"],
form=node.data["form"],
tag=node.data["upostag"],
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1)
Generalize print_tree to work with different number of columns.from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
relevant_data = node.data.copy()
map(lambda x: relevant_data.pop(x, None), exclude_fields)
node_repr = " ".join([
"{key}:{value}".format(key=key, value=value)
for key, value in relevant_data.items()
])
print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format(
deprel=node.data["deprel"],
node_repr=node_repr,
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
|
<commit_before>from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
print("\t" * depth + "(deprel:{deprel}) form:{form}, tag:{tag} [{idx}]".format(
deprel=node.data["deprel"],
form=node.data["form"],
tag=node.data["upostag"],
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1)
<commit_msg>Generalize print_tree to work with different number of columns.<commit_after>from __future__ import print_function, unicode_literals
from collections import namedtuple
TreeNode = namedtuple('TreeNode', ['data', 'children'])
def create_tree(node_children_mapping, start=0):
subtree = [
TreeNode(child, create_tree(node_children_mapping, child["id"]))
for child in node_children_mapping[start]
]
return subtree
def print_tree(node, depth=0, indent=4, exclude_fields=["id", "deprel", "xpostag", "feats", "head", "deps", "misc"]):
assert isinstance(node, TreeNode), "node not TreeNode %s" % type(node)
relevant_data = node.data.copy()
map(lambda x: relevant_data.pop(x, None), exclude_fields)
node_repr = " ".join([
"{key}:{value}".format(key=key, value=value)
for key, value in relevant_data.items()
])
print(" " * indent * depth + "(deprel:{deprel}) {node_repr} [{idx}]".format(
deprel=node.data["deprel"],
node_repr=node_repr,
idx=node.data["id"],
))
for child in node.children:
print_tree(child, depth + 1, indent=indent, exclude_fields=exclude_fields)
|
c7deb065bc51661501289b1f695aa0c21260266d
|
python/saliweb/backend/process_jobs.py
|
python/saliweb/backend/process_jobs.py
|
def main(webservice):
web = webservice.get_web_service(webservice.config)
web.do_all_processing()
|
from optparse import OptionParser
import saliweb.backend
import sys
def get_options():
parser = OptionParser()
parser.set_usage("""
%prog [-h] [-v]
Do any necessary processing of incoming, completed, or old jobs.
""")
parser.add_option('-v', '--verbose', action="store_true", dest="verbose",
help="""Print verbose output""")
opts, args = parser.parse_args()
if len(args) != 0:
parser.error("Extra arguments given")
return opts
def main(webservice):
opts = get_options()
web = webservice.get_web_service(webservice.config)
try:
web.do_all_processing()
except saliweb.backend.StateFileError, detail:
if opts.verbose:
raise
# else ignore the exception
|
Add a verbose option; by default, swallow any statefile exception.
|
Add a verbose option; by default, swallow any statefile exception.
|
Python
|
lgpl-2.1
|
salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb
|
def main(webservice):
web = webservice.get_web_service(webservice.config)
web.do_all_processing()
Add a verbose option; by default, swallow any statefile exception.
|
from optparse import OptionParser
import saliweb.backend
import sys
def get_options():
parser = OptionParser()
parser.set_usage("""
%prog [-h] [-v]
Do any necessary processing of incoming, completed, or old jobs.
""")
parser.add_option('-v', '--verbose', action="store_true", dest="verbose",
help="""Print verbose output""")
opts, args = parser.parse_args()
if len(args) != 0:
parser.error("Extra arguments given")
return opts
def main(webservice):
opts = get_options()
web = webservice.get_web_service(webservice.config)
try:
web.do_all_processing()
except saliweb.backend.StateFileError, detail:
if opts.verbose:
raise
# else ignore the exception
|
<commit_before>def main(webservice):
web = webservice.get_web_service(webservice.config)
web.do_all_processing()
<commit_msg>Add a verbose option; by default, swallow any statefile exception.<commit_after>
|
from optparse import OptionParser
import saliweb.backend
import sys
def get_options():
parser = OptionParser()
parser.set_usage("""
%prog [-h] [-v]
Do any necessary processing of incoming, completed, or old jobs.
""")
parser.add_option('-v', '--verbose', action="store_true", dest="verbose",
help="""Print verbose output""")
opts, args = parser.parse_args()
if len(args) != 0:
parser.error("Extra arguments given")
return opts
def main(webservice):
opts = get_options()
web = webservice.get_web_service(webservice.config)
try:
web.do_all_processing()
except saliweb.backend.StateFileError, detail:
if opts.verbose:
raise
# else ignore the exception
|
def main(webservice):
web = webservice.get_web_service(webservice.config)
web.do_all_processing()
Add a verbose option; by default, swallow any statefile exception.from optparse import OptionParser
import saliweb.backend
import sys
def get_options():
parser = OptionParser()
parser.set_usage("""
%prog [-h] [-v]
Do any necessary processing of incoming, completed, or old jobs.
""")
parser.add_option('-v', '--verbose', action="store_true", dest="verbose",
help="""Print verbose output""")
opts, args = parser.parse_args()
if len(args) != 0:
parser.error("Extra arguments given")
return opts
def main(webservice):
opts = get_options()
web = webservice.get_web_service(webservice.config)
try:
web.do_all_processing()
except saliweb.backend.StateFileError, detail:
if opts.verbose:
raise
# else ignore the exception
|
<commit_before>def main(webservice):
web = webservice.get_web_service(webservice.config)
web.do_all_processing()
<commit_msg>Add a verbose option; by default, swallow any statefile exception.<commit_after>from optparse import OptionParser
import saliweb.backend
import sys
def get_options():
parser = OptionParser()
parser.set_usage("""
%prog [-h] [-v]
Do any necessary processing of incoming, completed, or old jobs.
""")
parser.add_option('-v', '--verbose', action="store_true", dest="verbose",
help="""Print verbose output""")
opts, args = parser.parse_args()
if len(args) != 0:
parser.error("Extra arguments given")
return opts
def main(webservice):
opts = get_options()
web = webservice.get_web_service(webservice.config)
try:
web.do_all_processing()
except saliweb.backend.StateFileError, detail:
if opts.verbose:
raise
# else ignore the exception
|
5c296cdb60f448c6dc15720d5ec7a5310a09f1ae
|
troposphere/eventschemas.py
|
troposphere/eventschemas.py
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 10.0.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
'Policy': (dict, True),
'RegistryName': (basestring, True),
'RevisionId': (basestring, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
Update EventSchemas per 2020-04-30 changes
|
Update EventSchemas per 2020-04-30 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 10.0.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
Update EventSchemas per 2020-04-30 changes
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
'Policy': (dict, True),
'RegistryName': (basestring, True),
'RevisionId': (basestring, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
<commit_before># Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 10.0.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
<commit_msg>Update EventSchemas per 2020-04-30 changes<commit_after>
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
'Policy': (dict, True),
'RegistryName': (basestring, True),
'RevisionId': (basestring, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 10.0.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
Update EventSchemas per 2020-04-30 changes# Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
'Policy': (dict, True),
'RegistryName': (basestring, True),
'RevisionId': (basestring, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
<commit_before># Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 10.0.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
<commit_msg>Update EventSchemas per 2020-04-30 changes<commit_after># Copyright (c) 2012-2019, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 14.1.0
from . import AWSObject
from troposphere import Tags
class Discoverer(AWSObject):
resource_type = "AWS::EventSchemas::Discoverer"
props = {
'Description': (basestring, False),
'SourceArn': (basestring, True),
'Tags': (Tags, False),
}
class Registry(AWSObject):
resource_type = "AWS::EventSchemas::Registry"
props = {
'Description': (basestring, False),
'RegistryName': (basestring, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::EventSchemas::RegistryPolicy"
props = {
'Policy': (dict, True),
'RegistryName': (basestring, True),
'RevisionId': (basestring, False),
}
class Schema(AWSObject):
resource_type = "AWS::EventSchemas::Schema"
props = {
'Content': (basestring, True),
'Description': (basestring, False),
'RegistryName': (basestring, True),
'SchemaName': (basestring, False),
'Tags': (Tags, False),
'Type': (basestring, True),
}
|
c30534ae95dd5d8ffbe449a842538fafd808c773
|
python/tests/test_none.py
|
python/tests/test_none.py
|
import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
|
import types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
Improve tests relevant to None comparison
|
[python] Improve tests relevant to None comparison
|
Python
|
mit
|
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
|
import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
[python] Improve tests relevant to None comparison
|
import types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
<commit_before>import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
<commit_msg>[python] Improve tests relevant to None comparison<commit_after>
|
import types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
[python] Improve tests relevant to None comparisonimport types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
<commit_before>import types
def test_none_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def test_none_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_none_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
assert thing == None # dependes on __eq__
assert thing != None
<commit_msg>[python] Improve tests relevant to None comparison<commit_after>import types
def test_singleton(py2):
if py2:
assert isinstance(None, types.NoneType)
else:
# https://stackoverflow.com/questions/21706609
assert type(None)() is None
class Negator(object):
def __eq__(self, other):
return not other # doesn't make sense
def __ne__(self, other): # requried for py2
return not self.__eq__(other)
def test_comparison__use_identity():
none = None
thing = Negator()
assert none is None # singleton
assert thing is not None
def test_comparison__donot_use_equality():
none = None
thing = Negator()
assert none == None
assert not (none != None)
# weird? the result dependes on thing.__eq__()
assert thing == None
assert not (thing != None)
|
b6b627cb4c5d6b7dc1636794de870a2bf6da262b
|
cookiecutter/replay.py
|
cookiecutter/replay.py
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
if not isinstance(context, dict):
raise TypeError('Context is required to be of type dict')
|
Raise a TypeError if context is not a dict
|
Raise a TypeError if context is not a dict
|
Python
|
bsd-3-clause
|
pjbull/cookiecutter,hackebrot/cookiecutter,cguardia/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,agconti/cookiecutter,michaeljoseph/cookiecutter,venumech/cookiecutter,christabor/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,takeflight/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,ramiroluz/cookiecutter,benthomasson/cookiecutter,moi65/cookiecutter,benthomasson/cookiecutter,audreyr/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,agconti/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,cguardia/cookiecutter,dajose/cookiecutter,ramiroluz/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,Springerle/cookiecutter,christabor/cookiecutter,moi65/cookiecutter
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
Raise a TypeError if context is not a dict
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
if not isinstance(context, dict):
raise TypeError('Context is required to be of type dict')
|
<commit_before># -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
<commit_msg>Raise a TypeError if context is not a dict<commit_after>
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
if not isinstance(context, dict):
raise TypeError('Context is required to be of type dict')
|
# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
Raise a TypeError if context is not a dict# -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
if not isinstance(context, dict):
raise TypeError('Context is required to be of type dict')
|
<commit_before># -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
<commit_msg>Raise a TypeError if context is not a dict<commit_after># -*- coding: utf-8 -*-
"""
cookiecutter.replay
-------------------
"""
from __future__ import unicode_literals
from .compat import is_string
def dump(template_name, context):
if not is_string(template_name):
raise TypeError('Template name is required to be of type str')
if not isinstance(context, dict):
raise TypeError('Context is required to be of type dict')
|
40afa196ec94bbe7a2600fc18e612cf5ff267dc0
|
scrapi/harvesters/shareok.py
|
scrapi/harvesters/shareok.py
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
approved_sets = [
'com_11244_14447',
'com_11244_1',
'col_11244_14248',
'com_11244_6231',
'col_11244_7929',
'col_11244_7920',
'col_11244_10476',
'com_11244_10465',
'com_11244_10460',
'col_11244_10466',
'col_11244_10464',
'col_11244_10462'
]
|
Add approves sets to SHAREOK harvester
|
Add approves sets to SHAREOK harvester
|
Python
|
apache-2.0
|
erinspace/scrapi,fabianvf/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,ostwald/scrapi,mehanig/scrapi,icereval/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
Add approves sets to SHAREOK harvester
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
approved_sets = [
'com_11244_14447',
'com_11244_1',
'col_11244_14248',
'com_11244_6231',
'col_11244_7929',
'col_11244_7920',
'col_11244_10476',
'com_11244_10465',
'com_11244_10460',
'col_11244_10466',
'col_11244_10464',
'col_11244_10462'
]
|
<commit_before>"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
<commit_msg>Add approves sets to SHAREOK harvester<commit_after>
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
approved_sets = [
'com_11244_14447',
'com_11244_1',
'col_11244_14248',
'com_11244_6231',
'col_11244_7929',
'col_11244_7920',
'col_11244_10476',
'com_11244_10465',
'com_11244_10460',
'col_11244_10466',
'col_11244_10464',
'col_11244_10462'
]
|
"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
Add approves sets to SHAREOK harvester"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
approved_sets = [
'com_11244_14447',
'com_11244_1',
'col_11244_14248',
'com_11244_6231',
'col_11244_7929',
'col_11244_7920',
'col_11244_10476',
'com_11244_10465',
'com_11244_10460',
'col_11244_10466',
'col_11244_10464',
'col_11244_10462'
]
|
<commit_before>"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
<commit_msg>Add approves sets to SHAREOK harvester<commit_after>"""
Harvester for the SHAREOK Repository Repository for the SHARE project
Example API call: https://shareok.org/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class ShareOKHarvester(OAIHarvester):
short_name = 'shareok'
long_name = 'SHAREOK Repository'
url = 'https://shareok.org'
timezone_granularity = True
base_url = 'https://shareok.org/oai/request'
# TODO - add date back in once we fix elasticsearch mapping
property_list = [
'type', 'source', 'format',
'description', 'setSpec'
]
approved_sets = [
'com_11244_14447',
'com_11244_1',
'col_11244_14248',
'com_11244_6231',
'col_11244_7929',
'col_11244_7920',
'col_11244_10476',
'com_11244_10465',
'com_11244_10460',
'col_11244_10466',
'col_11244_10464',
'col_11244_10462'
]
|
0c17311f7fd511f5dae8f8e4acc2dce1a2de3cf5
|
main.py
|
main.py
|
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-5, 5, 20)
y_data = np.random.normal(0.0, 1.0, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
import math
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 30)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
Change to sin() function with noise
|
Change to sin() function with noise
|
Python
|
mit
|
MorganR/basic-gaussian-process
|
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-5, 5, 20)
y_data = np.random.normal(0.0, 1.0, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
Change to sin() function with noise
|
import math
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 30)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
<commit_before>import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-5, 5, 20)
y_data = np.random.normal(0.0, 1.0, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
<commit_msg>Change to sin() function with noise<commit_after>
|
import math
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 30)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-5, 5, 20)
y_data = np.random.normal(0.0, 1.0, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
Change to sin() function with noiseimport math
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 30)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
<commit_before>import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-5, 5, 20)
y_data = np.random.normal(0.0, 1.0, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
<commit_msg>Change to sin() function with noise<commit_after>import math
import numpy as np
import matplotlib.pyplot as plt
# generate sample data
x_data = np.linspace(-math.pi, math.pi, 30)
y_data = np.sin(x_data) + np.random.normal(0.0, 0.1, x_data.size)
plt.plot(x_data, y_data, 'o')
plt.show()
|
03b0f1fafd813afd928ce1e665373837105369f3
|
app.py
|
app.py
|
from flask import Flask, request, jsonify, send_from_directory
from werkzeug import secure_filename
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, uuid.uuid4().__str__() + ".wav"))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
Return new file name when uploading
|
Return new file name when uploading
|
Python
|
mit
|
spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api
|
from flask import Flask, request, jsonify, send_from_directory
from werkzeug import secure_filename
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, uuid.uuid4().__str__() + ".wav"))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
Return new file name when uploading
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request, jsonify, send_from_directory
from werkzeug import secure_filename
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, uuid.uuid4().__str__() + ".wav"))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
<commit_msg>Return new file name when uploading<commit_after>
|
from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
from flask import Flask, request, jsonify, send_from_directory
from werkzeug import secure_filename
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, uuid.uuid4().__str__() + ".wav"))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
Return new file name when uploadingfrom flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
<commit_before>from flask import Flask, request, jsonify, send_from_directory
from werkzeug import secure_filename
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, uuid.uuid4().__str__() + ".wav"))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
<commit_msg>Return new file name when uploading<commit_after>from flask import Flask, request, jsonify, send_from_directory
import os
import uuid
app = Flask(__name__)
UPLOAD_FOLDER = "uploads/"
@app.route("/")
def index():
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
projects = os.listdir(UPLOAD_FOLDER)
return projects.__repr__() + "\n"
@app.route("/sounds/<path:path>")
def serve_static(path):
return send_from_directory(UPLOAD_FOLDER, path)
@app.route("/upload", methods=["POST"])
def upload_file():
file = request.files["sound"]
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = uuid.uuid4().__str__() + ".wav"
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(host = "0.0.0.0", debug=True)
|
41477996e16e2d8b917bfc9f4428e64f1fa560dc
|
app.py
|
app.py
|
import json
from flask import Flask, request, abort
from flask.ext.restful import Resource, Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model, ModelResource
from collecting.routes import CollectorListResource, CollectorResource
from helpers import get_request_json
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
from flask import Flask
from flask.ext.restful import Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model
from collecting.routes import CollectorListResource, CollectorResource
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
Remove unused imports (and try out tmux)
|
Remove unused imports (and try out tmux)
|
Python
|
apache-2.0
|
jmcomets/twitto-feels,jmcomets/twitto-feels
|
import json
from flask import Flask, request, abort
from flask.ext.restful import Resource, Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model, ModelResource
from collecting.routes import CollectorListResource, CollectorResource
from helpers import get_request_json
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
Remove unused imports (and try out tmux)
|
from flask import Flask
from flask.ext.restful import Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model
from collecting.routes import CollectorListResource, CollectorResource
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
<commit_before>import json
from flask import Flask, request, abort
from flask.ext.restful import Resource, Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model, ModelResource
from collecting.routes import CollectorListResource, CollectorResource
from helpers import get_request_json
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
<commit_msg>Remove unused imports (and try out tmux)<commit_after>
|
from flask import Flask
from flask.ext.restful import Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model
from collecting.routes import CollectorListResource, CollectorResource
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
import json
from flask import Flask, request, abort
from flask.ext.restful import Resource, Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model, ModelResource
from collecting.routes import CollectorListResource, CollectorResource
from helpers import get_request_json
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
Remove unused imports (and try out tmux)from flask import Flask
from flask.ext.restful import Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model
from collecting.routes import CollectorListResource, CollectorResource
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
<commit_before>import json
from flask import Flask, request, abort
from flask.ext.restful import Resource, Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model, ModelResource
from collecting.routes import CollectorListResource, CollectorResource
from helpers import get_request_json
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
<commit_msg>Remove unused imports (and try out tmux)<commit_after>from flask import Flask
from flask.ext.restful import Api
from mongoengine import connect
from models import Tweet, Topic
from models.routing import register_api_model
from collecting.routes import CollectorListResource, CollectorResource
import config
app = Flask(__name__, static_url_path='')
api = Api(app, prefix='/api')
connect(config.db_name, host=config.db_host, port=config.db_port,
username=config.db_user, password=config.db_pass)
@app.route('/')
def index():
return app.send_static_file('index.html')
# register api models
register_api_model(api, Topic)
register_api_model(api, Tweet)
# collectors resource
api.add_resource(CollectorListResource, '/collectors')
api.add_resource(CollectorResource, '/collectors/<topic_pk>')
if __name__ == '__main__':
import sys
app.run(debug='--debug' in sys.argv)
|
65074720eee3f86f819046607e986d293c7d400f
|
api/commands.py
|
api/commands.py
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids():
reg_ids = [device.registration_id for device in Device.objects.filter(active=True)]
return reg_ids
def send(data, collapse_key=None):
reg_ids = get_reg_ids()
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data)
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids(alias):
devices = Device.objects.filter(active=True)
if alias:
devices = devices.filter(alias=alias)
reg_ids = [device.registration_id for device in devices]
return reg_ids
def send(data, collapse_key=None, to=None):
reg_ids = get_reg_ids(to)
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body, alias=None):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data, to=alias)
|
Send message only to one device if its alias was specified.
|
Send message only to one device if its alias was specified.
|
Python
|
mit
|
jchmura/suchary-django,jchmura/suchary-django,jchmura/suchary-django
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids():
reg_ids = [device.registration_id for device in Device.objects.filter(active=True)]
return reg_ids
def send(data, collapse_key=None):
reg_ids = get_reg_ids()
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data)Send message only to one device if its alias was specified.
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids(alias):
devices = Device.objects.filter(active=True)
if alias:
devices = devices.filter(alias=alias)
reg_ids = [device.registration_id for device in devices]
return reg_ids
def send(data, collapse_key=None, to=None):
reg_ids = get_reg_ids(to)
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body, alias=None):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data, to=alias)
|
<commit_before>import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids():
reg_ids = [device.registration_id for device in Device.objects.filter(active=True)]
return reg_ids
def send(data, collapse_key=None):
reg_ids = get_reg_ids()
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data)<commit_msg>Send message only to one device if its alias was specified.<commit_after>
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids(alias):
devices = Device.objects.filter(active=True)
if alias:
devices = devices.filter(alias=alias)
reg_ids = [device.registration_id for device in devices]
return reg_ids
def send(data, collapse_key=None, to=None):
reg_ids = get_reg_ids(to)
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body, alias=None):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data, to=alias)
|
import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids():
reg_ids = [device.registration_id for device in Device.objects.filter(active=True)]
return reg_ids
def send(data, collapse_key=None):
reg_ids = get_reg_ids()
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data)Send message only to one device if its alias was specified.import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids(alias):
devices = Device.objects.filter(active=True)
if alias:
devices = devices.filter(alias=alias)
reg_ids = [device.registration_id for device in devices]
return reg_ids
def send(data, collapse_key=None, to=None):
reg_ids = get_reg_ids(to)
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body, alias=None):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data, to=alias)
|
<commit_before>import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids():
reg_ids = [device.registration_id for device in Device.objects.filter(active=True)]
return reg_ids
def send(data, collapse_key=None):
reg_ids = get_reg_ids()
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data)<commit_msg>Send message only to one device if its alias was specified.<commit_after>import json
import requests
from Suchary.local_settings import GCM_API_KEY
from api.models import Device
URL = 'https://android.googleapis.com/gcm/send'
HEADER = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
def get_reg_ids(alias):
devices = Device.objects.filter(active=True)
if alias:
devices = devices.filter(alias=alias)
reg_ids = [device.registration_id for device in devices]
return reg_ids
def send(data, collapse_key=None, to=None):
reg_ids = get_reg_ids(to)
payload = {'registration_ids': reg_ids, 'data': data}
if collapse_key is not None:
payload.update({'collapse_key': collapse_key})
r = requests.post(URL, data=json.dumps(payload), headers=HEADER)
def edit_joke(key):
data = {
'type': 'edit',
'key': key
}
send(data)
def new_jokes():
data = {
'type': 'new'
}
send(data, 'new')
def delete_joke(key):
data = {
'type': 'delete',
'key': key
}
send(data)
def send_message(title, body, alias=None):
data = {
'type': 'message',
'title': title,
'text': body
}
send(data, to=alias)
|
08adcf2402f46dfc3332146cac1705e149b18e32
|
tree/108.py
|
tree/108.py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
#non-ercursive solution
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = (len(nums)-1) // 2
l = []
root = TreeNode(nums[pivot])
l.append([root, 0, pivot-1])
l.append([root, pivot+1, len(nums)-1])
while l:
cur, left, right = l.pop()
if left <= right:
pivot = left + (right - left) // 2 #注意与(right - left) // 2的区别
node = TreeNode(nums[pivot])
if nums[pivot] <= cur.val:
cur.left = node
else:
cur.right = node
l.append([node, left, pivot-1])
l.append([node, pivot+1, right])
return root
|
Convert Sorted Array to Binary Search Tree
|
Convert Sorted Array to Binary Search Tree
|
Python
|
apache-2.0
|
MingfeiPan/leetcode,MingfeiPan/leetcode,MingfeiPan/leetcode,MingfeiPan/leetcode,MingfeiPan/leetcode
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
Convert Sorted Array to Binary Search Tree
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
#non-ercursive solution
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = (len(nums)-1) // 2
l = []
root = TreeNode(nums[pivot])
l.append([root, 0, pivot-1])
l.append([root, pivot+1, len(nums)-1])
while l:
cur, left, right = l.pop()
if left <= right:
pivot = left + (right - left) // 2 #注意与(right - left) // 2的区别
node = TreeNode(nums[pivot])
if nums[pivot] <= cur.val:
cur.left = node
else:
cur.right = node
l.append([node, left, pivot-1])
l.append([node, pivot+1, right])
return root
|
<commit_before># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
<commit_msg>Convert Sorted Array to Binary Search Tree<commit_after>
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
#non-ercursive solution
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = (len(nums)-1) // 2
l = []
root = TreeNode(nums[pivot])
l.append([root, 0, pivot-1])
l.append([root, pivot+1, len(nums)-1])
while l:
cur, left, right = l.pop()
if left <= right:
pivot = left + (right - left) // 2 #注意与(right - left) // 2的区别
node = TreeNode(nums[pivot])
if nums[pivot] <= cur.val:
cur.left = node
else:
cur.right = node
l.append([node, left, pivot-1])
l.append([node, pivot+1, right])
return root
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
Convert Sorted Array to Binary Search Tree# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
#non-ercursive solution
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = (len(nums)-1) // 2
l = []
root = TreeNode(nums[pivot])
l.append([root, 0, pivot-1])
l.append([root, pivot+1, len(nums)-1])
while l:
cur, left, right = l.pop()
if left <= right:
pivot = left + (right - left) // 2 #注意与(right - left) // 2的区别
node = TreeNode(nums[pivot])
if nums[pivot] <= cur.val:
cur.left = node
else:
cur.right = node
l.append([node, left, pivot-1])
l.append([node, pivot+1, right])
return root
|
<commit_before># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
<commit_msg>Convert Sorted Array to Binary Search Tree<commit_after># Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#recursive solution
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = len(nums) // 2
root = TreeNode(nums[pivot])
root.left = self.sortedArrayToBST(nums[:pivot])
root.right = self.sortedArrayToBST(nums[pivot+1:])
return root
#non-ercursive solution
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
if not nums:
return None
pivot = (len(nums)-1) // 2
l = []
root = TreeNode(nums[pivot])
l.append([root, 0, pivot-1])
l.append([root, pivot+1, len(nums)-1])
while l:
cur, left, right = l.pop()
if left <= right:
pivot = left + (right - left) // 2 #注意与(right - left) // 2的区别
node = TreeNode(nums[pivot])
if nums[pivot] <= cur.val:
cur.left = node
else:
cur.right = node
l.append([node, left, pivot-1])
l.append([node, pivot+1, right])
return root
|
39bc88808d9286f7d6a74120b8d8bade9888e41c
|
example_app/app.py
|
example_app/app.py
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=False,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
Enable global by default in example.
|
Enable global by default in example.
|
Python
|
mit
|
christabor/flask_jsondash,christabor/flask_jsondash,christabor/flask_jsondash
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=False,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
Enable global by default in example.
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
<commit_before>"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=False,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
<commit_msg>Enable global by default in example.<commit_after>
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=False,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
Enable global by default in example."""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
<commit_before>"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=False,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
<commit_msg>Enable global by default in example.<commit_after>"""This is an example app, demonstrating usage."""
import os
from flask import Flask
from flask_jsondash.charts_builder import charts
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=False,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_edit_global():
return True
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
static=dict(
js_path='js/vendor/',
css_path='css/vendor/',
),
auth=dict(
edit_global=_can_edit_global,
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
221d672368f8989508aaf5b36f6a4f9f5bd5425a
|
winthrop/books/migrations/0008_add-digital-edition.py
|
winthrop/books/migrations/0008_add-digital-edition.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
|
Fix migration so it works with actual existing djiffy migrations
|
Fix migration so it works with actual existing djiffy migrations
|
Python
|
apache-2.0
|
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
Fix migration so it works with actual existing djiffy migrations
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
<commit_msg>Fix migration so it works with actual existing djiffy migrations<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
Fix migration so it works with actual existing djiffy migrations# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
<commit_msg>Fix migration so it works with actual existing djiffy migrations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-17 18:19
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
|
0f77c9a48e84a3185794f97c5f15c7b13ae1d505
|
tests/test_vector2_angle.py
|
tests/test_vector2_angle.py
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45)
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
Add some additional test cases
|
Add some additional test cases
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45)
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
Add some additional test cases
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
<commit_before>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45)
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
<commit_msg>Add some additional test cases<commit_after>
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45)
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
Add some additional test casesfrom ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
<commit_before>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45)
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
<commit_msg>Add some additional test cases<commit_after>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
44990f83fc9f68486dc54999fc038c564e516f95
|
tests/test_vector2_angle.py
|
tests/test_vector2_angle.py
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
Fix a clearly wrong sign
|
Fix a clearly wrong sign
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
Fix a clearly wrong sign
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
<commit_before>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
<commit_msg>Fix a clearly wrong sign<commit_after>
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
Fix a clearly wrong signfrom ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
<commit_before>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), 135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
<commit_msg>Fix a clearly wrong sign<commit_after>from ppb_vector import Vector2
from math import isclose
import pytest
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), 45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
assert isclose(left.angle(right), expected)
assert isclose(right.angle(left), expected)
|
11022b79ded961bdd2e9a6bff0c4f4a03097084c
|
scripts/install_new_database.py
|
scripts/install_new_database.py
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
Add a couple of sanity checks so we don't break the database.
|
Add a couple of sanity checks so we don't break the database.
Part of #139.
|
Python
|
mit
|
guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,eggpi/citationhunt
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
Add a couple of sanity checks so we don't break the database.
Part of #139.
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
<commit_before>#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
<commit_msg>Add a couple of sanity checks so we don't break the database.
Part of #139.<commit_after>
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
Add a couple of sanity checks so we don't break the database.
Part of #139.#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
<commit_before>#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
if __name__ == '__main__':
chdb.install_scratch_db()
<commit_msg>Add a couple of sanity checks so we don't break the database.
Part of #139.<commit_after>#!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
|
4836cb93b03eae38c0e1eebeee831f9b4fc012eb
|
cozify/config.py
|
cozify/config.py
|
import configparser
import os
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# prime ephemeral storage
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
|
import configparser
import os
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
ephemeral = None
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# allow setting the ephemeral storage location.
# Useful especially for testing without affecting your normal state
def setStatePath(filepath):
global ephemeralFile
ephemeralFile = filepath
_initState()
def _initState():
global ephemeral
# prime ephemeral storage
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
_initState()
|
Support for changing ephemeral state storage mid-run. Mostly useful for debugging and testing without hosing your main state
|
Support for changing ephemeral state storage mid-run.
Mostly useful for debugging and testing without hosing your main state
|
Python
|
mit
|
Artanicus/python-cozify,Artanicus/python-cozify
|
import configparser
import os
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# prime ephemeral storage
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
Support for changing ephemeral state storage mid-run.
Mostly useful for debugging and testing without hosing your main state
|
import configparser
import os
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
ephemeral = None
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# allow setting the ephemeral storage location.
# Useful especially for testing without affecting your normal state
def setStatePath(filepath):
global ephemeralFile
ephemeralFile = filepath
_initState()
def _initState():
global ephemeral
# prime ephemeral storage
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
_initState()
|
<commit_before>import configparser
import os
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# prime ephemeral storage
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
<commit_msg>Support for changing ephemeral state storage mid-run.
Mostly useful for debugging and testing without hosing your main state<commit_after>
|
import configparser
import os
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
ephemeral = None
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# allow setting the ephemeral storage location.
# Useful especially for testing without affecting your normal state
def setStatePath(filepath):
global ephemeralFile
ephemeralFile = filepath
_initState()
def _initState():
global ephemeral
# prime ephemeral storage
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
_initState()
|
import configparser
import os
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# prime ephemeral storage
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
Support for changing ephemeral state storage mid-run.
Mostly useful for debugging and testing without hosing your main stateimport configparser
import os
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
ephemeral = None
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# allow setting the ephemeral storage location.
# Useful especially for testing without affecting your normal state
def setStatePath(filepath):
global ephemeralFile
ephemeralFile = filepath
_initState()
def _initState():
global ephemeral
# prime ephemeral storage
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
_initState()
|
<commit_before>import configparser
import os
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# prime ephemeral storage
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
<commit_msg>Support for changing ephemeral state storage mid-run.
Mostly useful for debugging and testing without hosing your main state<commit_after>import configparser
import os
ephemeralFile = "%s/.config/python-cozify.cfg" % os.path.expanduser('~')
ephemeral = None
def ephemeralWrite():
with open(ephemeralFile, 'w') as configfile:
ephemeral.write(configfile)
# allow setting the ephemeral storage location.
# Useful especially for testing without affecting your normal state
def setStatePath(filepath):
global ephemeralFile
ephemeralFile = filepath
_initState()
def _initState():
global ephemeral
# prime ephemeral storage
try:
file = open(ephemeralFile, 'r')
except IOError:
file = open(ephemeralFile, 'w+')
os.chmod(ephemeralFile, 0o600) # set to user readwrite only to protect tokens
ephemeral = configparser.ConfigParser()
ephemeral.read(ephemeralFile)
# make sure config is in roughly a valid state
for key in [ 'Cloud', 'Hubs' ]:
if key not in ephemeral:
ephemeral[key] = {}
ephemeralWrite()
_initState()
|
9cccd1de4c9b9a6ca101c36edf288f0f4efec842
|
mea.py
|
mea.py
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
if __name__ == '__main__':
pass
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import warnings
import numpy
import matplotlib.pyplot as plt
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
def get_monomer(self, model, agent):
'''
Return the monomer from a model corresponding to a given
agent.
'''
try:
monomer = model.monomers[agent.name]
except KeyError:
warnings.warn('Monomer of interest %s could not be '
'found in model.' % agent.name)
monomer = None
return monomer
def get_create_observable(self, model, obs_name, obs_pattern):
'''
Try to create an observable with the given name and pattern or
if it already exists in the model then return it.
'''
try:
obs = pysb.Observable(obs_name, obs_pattern)
model.add_component(obs)
except pysb.ComponentDuplicateNameError:
return model.observables[obs_name]
return obs
def get_obs_name(self, model, monomer):
# TODO: how do we know that we are looking for an active species?
return monomer.name + '_act'
def simulate_model(self, model, agent_target):
'''
Simulate a model and return the observed dynamics of
a given target agent.
'''
monomer = self.get_monomer(model, agent_target)
obs_name = self.get_obs_name(model, monomer)
obs_pattern = monomer(act='active')
self.get_create_observable(model, obs_name, obs_pattern)
# TODO: where does the maximal time point come from?
ts = numpy.linspace(0, 100, 100)
try:
solver = Solver(model, ts)
except pysb.bng.GenerateNetworkError:
warnings.warn('Could not generate network')
return None
solver.run()
yobs_target = solver.yobs[obs_name]
plt.ion()
plt.plot(ts, yobs_target, label=obs_name)
plt.show()
plt.legend()
return yobs_target
if __name__ == '__main__':
pass
|
Add model simulation with target observation to MEA
|
Add model simulation with target observation to MEA
|
Python
|
bsd-2-clause
|
sorgerlab/bioagents,bgyori/bioagents
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
if __name__ == '__main__':
pass
Add model simulation with target observation to MEA
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import warnings
import numpy
import matplotlib.pyplot as plt
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
def get_monomer(self, model, agent):
'''
Return the monomer from a model corresponding to a given
agent.
'''
try:
monomer = model.monomers[agent.name]
except KeyError:
warnings.warn('Monomer of interest %s could not be '
'found in model.' % agent.name)
monomer = None
return monomer
def get_create_observable(self, model, obs_name, obs_pattern):
'''
Try to create an observable with the given name and pattern or
if it already exists in the model then return it.
'''
try:
obs = pysb.Observable(obs_name, obs_pattern)
model.add_component(obs)
except pysb.ComponentDuplicateNameError:
return model.observables[obs_name]
return obs
def get_obs_name(self, model, monomer):
# TODO: how do we know that we are looking for an active species?
return monomer.name + '_act'
def simulate_model(self, model, agent_target):
'''
Simulate a model and return the observed dynamics of
a given target agent.
'''
monomer = self.get_monomer(model, agent_target)
obs_name = self.get_obs_name(model, monomer)
obs_pattern = monomer(act='active')
self.get_create_observable(model, obs_name, obs_pattern)
# TODO: where does the maximal time point come from?
ts = numpy.linspace(0, 100, 100)
try:
solver = Solver(model, ts)
except pysb.bng.GenerateNetworkError:
warnings.warn('Could not generate network')
return None
solver.run()
yobs_target = solver.yobs[obs_name]
plt.ion()
plt.plot(ts, yobs_target, label=obs_name)
plt.show()
plt.legend()
return yobs_target
if __name__ == '__main__':
pass
|
<commit_before># MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
if __name__ == '__main__':
pass
<commit_msg>Add model simulation with target observation to MEA<commit_after>
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import warnings
import numpy
import matplotlib.pyplot as plt
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
def get_monomer(self, model, agent):
'''
Return the monomer from a model corresponding to a given
agent.
'''
try:
monomer = model.monomers[agent.name]
except KeyError:
warnings.warn('Monomer of interest %s could not be '
'found in model.' % agent.name)
monomer = None
return monomer
def get_create_observable(self, model, obs_name, obs_pattern):
'''
Try to create an observable with the given name and pattern or
if it already exists in the model then return it.
'''
try:
obs = pysb.Observable(obs_name, obs_pattern)
model.add_component(obs)
except pysb.ComponentDuplicateNameError:
return model.observables[obs_name]
return obs
def get_obs_name(self, model, monomer):
# TODO: how do we know that we are looking for an active species?
return monomer.name + '_act'
def simulate_model(self, model, agent_target):
'''
Simulate a model and return the observed dynamics of
a given target agent.
'''
monomer = self.get_monomer(model, agent_target)
obs_name = self.get_obs_name(model, monomer)
obs_pattern = monomer(act='active')
self.get_create_observable(model, obs_name, obs_pattern)
# TODO: where does the maximal time point come from?
ts = numpy.linspace(0, 100, 100)
try:
solver = Solver(model, ts)
except pysb.bng.GenerateNetworkError:
warnings.warn('Could not generate network')
return None
solver.run()
yobs_target = solver.yobs[obs_name]
plt.ion()
plt.plot(ts, yobs_target, label=obs_name)
plt.show()
plt.legend()
return yobs_target
if __name__ == '__main__':
pass
|
# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
if __name__ == '__main__':
pass
Add model simulation with target observation to MEA# MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import warnings
import numpy
import matplotlib.pyplot as plt
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
def get_monomer(self, model, agent):
'''
Return the monomer from a model corresponding to a given
agent.
'''
try:
monomer = model.monomers[agent.name]
except KeyError:
warnings.warn('Monomer of interest %s could not be '
'found in model.' % agent.name)
monomer = None
return monomer
def get_create_observable(self, model, obs_name, obs_pattern):
'''
Try to create an observable with the given name and pattern or
if it already exists in the model then return it.
'''
try:
obs = pysb.Observable(obs_name, obs_pattern)
model.add_component(obs)
except pysb.ComponentDuplicateNameError:
return model.observables[obs_name]
return obs
def get_obs_name(self, model, monomer):
# TODO: how do we know that we are looking for an active species?
return monomer.name + '_act'
def simulate_model(self, model, agent_target):
'''
Simulate a model and return the observed dynamics of
a given target agent.
'''
monomer = self.get_monomer(model, agent_target)
obs_name = self.get_obs_name(model, monomer)
obs_pattern = monomer(act='active')
self.get_create_observable(model, obs_name, obs_pattern)
# TODO: where does the maximal time point come from?
ts = numpy.linspace(0, 100, 100)
try:
solver = Solver(model, ts)
except pysb.bng.GenerateNetworkError:
warnings.warn('Could not generate network')
return None
solver.run()
yobs_target = solver.yobs[obs_name]
plt.ion()
plt.plot(ts, yobs_target, label=obs_name)
plt.show()
plt.legend()
return yobs_target
if __name__ == '__main__':
pass
|
<commit_before># MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
if __name__ == '__main__':
pass
<commit_msg>Add model simulation with target observation to MEA<commit_after># MEA stands for model execution agent.
# Its task is to simulate models and interpret
# the simulation output.
import warnings
import numpy
import matplotlib.pyplot as plt
import pysb
from pysb.integrate import Solver
class MEA:
def __init__(self):
pass
def get_monomer(self, model, agent):
'''
Return the monomer from a model corresponding to a given
agent.
'''
try:
monomer = model.monomers[agent.name]
except KeyError:
warnings.warn('Monomer of interest %s could not be '
'found in model.' % agent.name)
monomer = None
return monomer
def get_create_observable(self, model, obs_name, obs_pattern):
'''
Try to create an observable with the given name and pattern or
if it already exists in the model then return it.
'''
try:
obs = pysb.Observable(obs_name, obs_pattern)
model.add_component(obs)
except pysb.ComponentDuplicateNameError:
return model.observables[obs_name]
return obs
def get_obs_name(self, model, monomer):
# TODO: how do we know that we are looking for an active species?
return monomer.name + '_act'
def simulate_model(self, model, agent_target):
'''
Simulate a model and return the observed dynamics of
a given target agent.
'''
monomer = self.get_monomer(model, agent_target)
obs_name = self.get_obs_name(model, monomer)
obs_pattern = monomer(act='active')
self.get_create_observable(model, obs_name, obs_pattern)
# TODO: where does the maximal time point come from?
ts = numpy.linspace(0, 100, 100)
try:
solver = Solver(model, ts)
except pysb.bng.GenerateNetworkError:
warnings.warn('Could not generate network')
return None
solver.run()
yobs_target = solver.yobs[obs_name]
plt.ion()
plt.plot(ts, yobs_target, label=obs_name)
plt.show()
plt.legend()
return yobs_target
if __name__ == '__main__':
pass
|
4a3da350105314310cb0a44f11b50c9c6c6617ee
|
integration-test/1387-business-and-spur-routes.py
|
integration-test/1387-business-and-spur-routes.py
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def test_first_capitol_dr_i70_business(self):
self.load_fixtures([
'https://www.openstreetmap.org/relation/1933234',
])
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
16, 16294, 25097, 'roads',
{'id': 12276055, 'shield_text': '70', 'network': 'US:I:Business'})
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
|
Add more test cases for spur / business route modifiers.
|
Add more test cases for spur / business route modifiers.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def test_first_capitol_dr_i70_business(self):
self.load_fixtures([
'https://www.openstreetmap.org/relation/1933234',
])
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
16, 16294, 25097, 'roads',
{'id': 12276055, 'shield_text': '70', 'network': 'US:I:Business'})
Add more test cases for spur / business route modifiers.
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
|
<commit_before>from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def test_first_capitol_dr_i70_business(self):
self.load_fixtures([
'https://www.openstreetmap.org/relation/1933234',
])
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
16, 16294, 25097, 'roads',
{'id': 12276055, 'shield_text': '70', 'network': 'US:I:Business'})
<commit_msg>Add more test cases for spur / business route modifiers.<commit_after>
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
|
from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def test_first_capitol_dr_i70_business(self):
self.load_fixtures([
'https://www.openstreetmap.org/relation/1933234',
])
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
16, 16294, 25097, 'roads',
{'id': 12276055, 'shield_text': '70', 'network': 'US:I:Business'})
Add more test cases for spur / business route modifiers.from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
|
<commit_before>from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def test_first_capitol_dr_i70_business(self):
self.load_fixtures([
'https://www.openstreetmap.org/relation/1933234',
])
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
16, 16294, 25097, 'roads',
{'id': 12276055, 'shield_text': '70', 'network': 'US:I:Business'})
<commit_msg>Add more test cases for spur / business route modifiers.<commit_after>from . import FixtureTest
class BusinessAndSpurRoutes(FixtureTest):
def _check_route_relation(
self, rel_id, way_id, tile, shield_text, network):
z, x, y = map(int, tile.split('/'))
self.load_fixtures([
'https://www.openstreetmap.org/relation/%d' % (rel_id,),
], clip=self.tile_bbox(z, x, y))
# check that First Capitol Dr, part of the above relation, is given
# a network that includes the "business" extension.
self.assert_has_feature(
z, x, y, 'roads',
{'id': way_id, 'shield_text': shield_text, 'network': network})
def test_first_capitol_dr_i70_business(self):
self._check_route_relation(
1933234, 12276055, '16/16294/25097', '70', 'US:I:Business')
def test_business_loop(self):
self._check_route_relation(
1935116, 5807439, '16/12285/23316', '15', 'US:I:Business:Loop')
def test_nj_essex(self):
self._check_route_relation(
945855, 221295008, '16/19267/24623', '672', 'US:NJ:Essex:Spur')
def test_nj_cr(self):
self._check_route_relation(
941526, 60523740, '16/19192/24767', '526', 'US:NJ:CR:Spur')
|
f7d83caae3264d86420ce654f3669175c284a82d
|
ocradmin/core/decorators.py
|
ocradmin/core/decorators.py
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
request.project = request.session.get("project")
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
Add project as request attribute to save a little boilerplate
|
Add project as request attribute to save a little boilerplate
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
Add project as request attribute to save a little boilerplate
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
request.project = request.session.get("project")
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
<commit_before># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
<commit_msg>Add project as request attribute to save a little boilerplate<commit_after>
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
request.project = request.session.get("project")
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
Add project as request attribute to save a little boilerplate# Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
request.project = request.session.get("project")
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
<commit_before># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
<commit_msg>Add project as request attribute to save a little boilerplate<commit_after># Miscellaneos functions relating the projects app
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
request.project = request.session.get("project")
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/nodelib/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
abfe1e291d0c9963cb91d0e95996c8fe72167107
|
src/Figures/FigureHelpers.py
|
src/Figures/FigureHelpers.py
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax, top=False):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.spines['right'].set_visible(False)
if top:
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('top')
else:
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
Add non-default option to despline top of axis.
|
Add non-default option to despline top of axis.
|
Python
|
mit
|
theandygross/Figures
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
Add non-default option to despline top of axis.
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax, top=False):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.spines['right'].set_visible(False)
if top:
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('top')
else:
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
<commit_before>"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
<commit_msg>Add non-default option to despline top of axis.<commit_after>
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax, top=False):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.spines['right'].set_visible(False)
if top:
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('top')
else:
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
Add non-default option to despline top of axis."""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax, top=False):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.spines['right'].set_visible(False)
if top:
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('top')
else:
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
<commit_before>"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
<commit_msg>Add non-default option to despline top of axis.<commit_after>"""
Created on Jun 12, 2013
@author: agross
"""
import matplotlib.pyplot as plt
def init_ax(ax, figsize=None):
"""
Helper to initialize an axis. If the axis is not specified (eg is None),
this fill create a new figure.
ax: matplotlib axis object, or None
figsize: size of figure if we have to create a new axis.
"""
if ax is None:
if figsize is None:
fig, ax = plt.subplots(1, 1)
else:
fig, ax = plt.subplots(1, 1, figsize=figsize)
else:
fig = plt.gcf()
return fig, ax
def prettify_ax(ax, top=False):
ax.grid(b=False)
ax.yaxis.set_ticks_position('left')
ax.spines['right'].set_visible(False)
if top:
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('top')
else:
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
def latex_float(f):
"""
http://stackoverflow.com/questions/13490292/
format-number-using-latex-notation-in-python
"""
float_str = "{0:.2g}".format(f)
if "e" in float_str:
base, exponent = float_str.split("e")
return r"{0} \times 10^{{{1}}}".format(base, int(exponent))
else:
return float_str
|
203642a879fb934b99e4d55025eede171390a4d4
|
mopidy_dleyna/__init__.py
|
mopidy_dleyna/__init__.py
|
import pathlib
from mopidy import config, exceptions, ext
__version__ = "1.2.2"
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
import pathlib
import pkg_resources
from mopidy import config, exceptions, ext
__version__ = pkg_resources.get_distribution("Mopidy-dLeyna").version
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
Use pkg_resources to read version
|
Use pkg_resources to read version
|
Python
|
apache-2.0
|
tkem/mopidy-dleyna
|
import pathlib
from mopidy import config, exceptions, ext
__version__ = "1.2.2"
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
Use pkg_resources to read version
|
import pathlib
import pkg_resources
from mopidy import config, exceptions, ext
__version__ = pkg_resources.get_distribution("Mopidy-dLeyna").version
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
<commit_before>import pathlib
from mopidy import config, exceptions, ext
__version__ = "1.2.2"
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
<commit_msg>Use pkg_resources to read version<commit_after>
|
import pathlib
import pkg_resources
from mopidy import config, exceptions, ext
__version__ = pkg_resources.get_distribution("Mopidy-dLeyna").version
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
import pathlib
from mopidy import config, exceptions, ext
__version__ = "1.2.2"
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
Use pkg_resources to read versionimport pathlib
import pkg_resources
from mopidy import config, exceptions, ext
__version__ = pkg_resources.get_distribution("Mopidy-dLeyna").version
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
<commit_before>import pathlib
from mopidy import config, exceptions, ext
__version__ = "1.2.2"
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
<commit_msg>Use pkg_resources to read version<commit_after>import pathlib
import pkg_resources
from mopidy import config, exceptions, ext
__version__ = pkg_resources.get_distribution("Mopidy-dLeyna").version
class Extension(ext.Extension):
dist_name = "Mopidy-dLeyna"
ext_name = "dleyna"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["upnp_browse_limit"] = config.Integer(minimum=0)
schema["upnp_lookup_limit"] = config.Integer(minimum=0)
schema["upnp_search_limit"] = config.Integer(minimum=0)
schema["dbus_start_session"] = config.String()
return schema
def setup(self, registry):
from .backend import dLeynaBackend
registry.add("backend", dLeynaBackend)
def validate_environment(self):
try:
import dbus # noqa
except ImportError:
raise exceptions.ExtensionError("Cannot import dbus")
|
49194edaff3766cc3853c6b561f5d20571492f74
|
asp/__init__.py
|
asp/__init__.py
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.2'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
Increment version number because of config file interface change.
|
Increment version number because of config file interface change.
|
Python
|
bsd-3-clause
|
pbirsinger/aspNew,pbirsinger/aspNew,shoaibkamil/asp,richardxia/asp-multilevel-debug,pbirsinger/aspNew,shoaibkamil/asp,richardxia/asp-multilevel-debug,shoaibkamil/asp,richardxia/asp-multilevel-debug
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
Increment version number because of config file interface change.
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.2'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
<commit_before># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
<commit_msg>Increment version number because of config file interface change.<commit_after>
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.2'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
Increment version number because of config file interface change.# From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.2'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
<commit_before># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.1'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
<commit_msg>Increment version number because of config file interface change.<commit_after># From http://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
# Author: James Antill (http://stackoverflow.com/users/10314/james-antill)
__version__ = '0.1.2.2'
__version_info__ = tuple([ int(num) for num in __version__.split('.')])
class SpecializationError(Exception):
"""
Exception that caused specialization not to occur.
Attributes:
msg -- the message/explanation to the user
phase -- which phase of specialization caused the error
"""
def __init__(self, msg, phase="Unknown phase"):
self.msg = msg
|
a044d33c1e29a1d283baa6bd24b1c63676b061df
|
install-qt.py
|
install-qt.py
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside')
else:
run('sudo apt-get install -qq python-pyside')
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s{,-dbg}' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s{,-dbg}' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside{,-dbg}')
else:
run('sudo apt-get install -qq python-pyside')
|
Install debugging symbols on Travis
|
Install debugging symbols on Travis
|
Python
|
mit
|
pytest-dev/pytest-qt,The-Compiler/pytest-qt
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside')
else:
run('sudo apt-get install -qq python-pyside')
Install debugging symbols on Travis
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s{,-dbg}' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s{,-dbg}' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside{,-dbg}')
else:
run('sudo apt-get install -qq python-pyside')
|
<commit_before>'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside')
else:
run('sudo apt-get install -qq python-pyside')
<commit_msg>Install debugging symbols on Travis<commit_after>
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s{,-dbg}' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s{,-dbg}' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside{,-dbg}')
else:
run('sudo apt-get install -qq python-pyside')
|
'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside')
else:
run('sudo apt-get install -qq python-pyside')
Install debugging symbols on Travis'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s{,-dbg}' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s{,-dbg}' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside{,-dbg}')
else:
run('sudo apt-get install -qq python-pyside')
|
<commit_before>'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside')
else:
run('sudo apt-get install -qq python-pyside')
<commit_msg>Install debugging symbols on Travis<commit_after>'''
Simple script to install PyQt or PySide based on PYTEST_QT_FORCE_PYQT
and python version. Meant to be used in travis-ci.
'''
import os
import sys
def run(cmd):
print(cmd)
r = os.system(cmd)
if r != 0:
sys.exit('command %s failed with status %s' % (cmd, r))
py3k = sys.version_info[0] == 3
if os.environ['PYTEST_QT_API'] in ('pyqt4', 'pyqt5'):
pyqt_ver = os.environ['PYTEST_QT_API'][-1]
if py3k:
run('sudo apt-get install -qq python3-pyqt%s{,-dbg}' % pyqt_ver)
else:
run('sudo apt-get install -qq python-qt%s{,-dbg}' % pyqt_ver)
else:
if py3k:
run('sudo apt-get install -qq python3-pyside{,-dbg}')
else:
run('sudo apt-get install -qq python-pyside')
|
b744498b2308748dacf9947b78386f10f2072061
|
beetle/utils.py
|
beetle/utils.py
|
import os
def read_folder(folder, mode):
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
import os
def read_folder(folder, mode):
if 'b' in mode:
encoding = None
else:
encoding = 'utf-8'
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode, encoding=encoding) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
Define encoding when reading files if they aren't opened in binary mode
|
Define encoding when reading files if they aren't opened in binary mode
|
Python
|
mit
|
cknv/beetle
|
import os
def read_folder(folder, mode):
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
Define encoding when reading files if they aren't opened in binary mode
|
import os
def read_folder(folder, mode):
if 'b' in mode:
encoding = None
else:
encoding = 'utf-8'
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode, encoding=encoding) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
<commit_before>import os
def read_folder(folder, mode):
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
<commit_msg>Define encoding when reading files if they aren't opened in binary mode<commit_after>
|
import os
def read_folder(folder, mode):
if 'b' in mode:
encoding = None
else:
encoding = 'utf-8'
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode, encoding=encoding) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
import os
def read_folder(folder, mode):
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
Define encoding when reading files if they aren't opened in binary modeimport os
def read_folder(folder, mode):
if 'b' in mode:
encoding = None
else:
encoding = 'utf-8'
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode, encoding=encoding) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
<commit_before>import os
def read_folder(folder, mode):
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
<commit_msg>Define encoding when reading files if they aren't opened in binary mode<commit_after>import os
def read_folder(folder, mode):
if 'b' in mode:
encoding = None
else:
encoding = 'utf-8'
for folder, __, files in os.walk(folder):
for file_name in files:
path = os.path.join(folder, file_name)
with open(path, mode, encoding=encoding) as fo:
yield path, fo.read()
def remove_leading_folder(path):
__, partial_path = path.split(os.sep, 1)
return partial_path
|
65b1f849cbf02320992e3ef9db86c71e564cc826
|
src/mountebank/exceptions.py
|
src/mountebank/exceptions.py
|
class ImposterException(StandardError):
def __init__(self, response):
self._response = response
|
import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
Make Python 2 and 3 compatible
|
Make Python 2 and 3 compatible
|
Python
|
bsd-2-clause
|
kevinjqiu/py-mountebank
|
class ImposterException(StandardError):
def __init__(self, response):
self._response = response
Make Python 2 and 3 compatible
|
import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
<commit_before>class ImposterException(StandardError):
def __init__(self, response):
self._response = response
<commit_msg>Make Python 2 and 3 compatible<commit_after>
|
import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
class ImposterException(StandardError):
def __init__(self, response):
self._response = response
Make Python 2 and 3 compatibleimport sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
<commit_before>class ImposterException(StandardError):
def __init__(self, response):
self._response = response
<commit_msg>Make Python 2 and 3 compatible<commit_after>import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
e9d62c12448822246ad0ed79a90b36dd27429615
|
echidna/demo/server.py
|
echidna/demo/server.py
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
Add list of channels to demo.html template context.
|
Add list of channels to demo.html template context.
|
Python
|
bsd-3-clause
|
praekelt/echidna,praekelt/echidna,praekelt/echidna,praekelt/echidna
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
Add list of channels to demo.html template context.
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
<commit_before>"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
<commit_msg>Add list of channels to demo.html template context.<commit_after>
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
Add list of channels to demo.html template context."""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
<commit_before>"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html", api_server="localhost:8888")
<commit_msg>Add list of channels to demo.html template context.<commit_after>"""
Echidna demo server.
"""
import os
from cyclone.web import RequestHandler
from echidna.server import EchidnaServer
class DemoServer(EchidnaServer):
"""
A server to demo Echidna.
"""
def __init__(self, **settings):
defaults = {
"template_path": (
os.path.join(os.path.dirname(__file__), "templates")),
"static_path": (
os.path.join(os.path.dirname(__file__), "static")),
"static_url_prefix": "/static/",
"autoescape": None,
}
defaults.update(settings)
EchidnaServer.__init__(self, DemoPageHandler, **defaults)
class DemoPageHandler(RequestHandler):
"""
Render the demo page.
"""
def get(self):
self.render("demo.html",
api_server="localhost:8888",
channels=[
("radio_ga_ga", "Radio Ga Ga"),
("channel_x", "Channel X"),
("major_tom", "Major Tom"),
])
|
396df5eac473fccc16e103d3d3316aefd653789a
|
changeling/models.py
|
changeling/models.py
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'array'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None, tags=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
self.tags = tags or []
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
Add tags - that was too easy
|
Add tags - that was too easy
|
Python
|
apache-2.0
|
bcwaldon/changeling,bcwaldon/changeling
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
Add tags - that was too easy
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'array'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None, tags=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
self.tags = tags or []
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
<commit_before>import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
<commit_msg>Add tags - that was too easy<commit_after>
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'array'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None, tags=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
self.tags = tags or []
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
Add tags - that was too easyimport uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'array'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None, tags=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
self.tags = tags or []
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
<commit_before>import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
<commit_msg>Add tags - that was too easy<commit_after>import uuid
import jsonschema
import changeling.exception
class Change(object):
schema = {
'name': 'change',
'properties': {
'id': {'type': 'string'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'tags': {'type': 'array'},
},
'additionalProperties': False,
}
def __init__(self, id=None, name=None, description=None, tags=None):
self.id = id or str(uuid.uuid4())
self.name = name
self.description = description
self.tags = tags or []
@classmethod
def from_dict(self, data):
self.validate(data)
return Change(**data)
def to_dict(self):
def _generate_set_attributes():
for k in Change.schema['properties'].keys():
val = getattr(self, k)
if val is not None:
yield (k, val)
return dict(_generate_set_attributes())
def __str__(self):
return "<Change id=%s name=%s>" % (self.id, self.name)
@classmethod
def validate(cls, data):
try:
jsonschema.validate(data, cls.schema)
except jsonschema.ValidationError as exc:
raise changeling.exception.ValidationError(exc)
def is_valid(self):
try:
self.validate(self.to_dict())
except changeling.exception.ValidationError:
return False
else:
return True
|
4a24e19c160535c7a65c7f3f11748e6048386038
|
examples/gst/wavenc.py
|
examples/gst/wavenc.py
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s"') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
while bin.iterate():
pass
bin.set_state(gst.STATE_NULL)
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s" }') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
bin.connect('eos', lambda bin: gst.main_quit())
gst.main()
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Put it in a thread and run it in a mainloop
|
Put it in a thread and run it in a mainloop
Original commit message from CVS:
Put it in a thread and run it in a mainloop
|
Python
|
lgpl-2.1
|
lubosz/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,lubosz/gst-python,alessandrod/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,GStreamer/gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,GStreamer/gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,alessandrod/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,GStreamer/gst-python,lubosz/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,alessandrod/gst-python
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s"') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
while bin.iterate():
pass
bin.set_state(gst.STATE_NULL)
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
Put it in a thread and run it in a mainloop
Original commit message from CVS:
Put it in a thread and run it in a mainloop
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s" }') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
bin.connect('eos', lambda bin: gst.main_quit())
gst.main()
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before>#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s"') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
while bin.iterate():
pass
bin.set_state(gst.STATE_NULL)
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Put it in a thread and run it in a mainloop
Original commit message from CVS:
Put it in a thread and run it in a mainloop<commit_after>
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s" }') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
bin.connect('eos', lambda bin: gst.main_quit())
gst.main()
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s"') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
while bin.iterate():
pass
bin.set_state(gst.STATE_NULL)
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
Put it in a thread and run it in a mainloop
Original commit message from CVS:
Put it in a thread and run it in a mainloop#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s" }') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
bin.connect('eos', lambda bin: gst.main_quit())
gst.main()
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before>#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s"') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
while bin.iterate():
pass
bin.set_state(gst.STATE_NULL)
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Put it in a thread and run it in a mainloop
Original commit message from CVS:
Put it in a thread and run it in a mainloop<commit_after>#!/usr/bin/env python
import sys
import gst
def decode(filename):
output = filename + '.wav'
pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! '
'filesink location="%s" }') % (filename, output)
bin = gst.parse_launch(pipeline)
bin.set_state(gst.STATE_PLAYING)
bin.connect('eos', lambda bin: gst.main_quit())
gst.main()
def main(args):
for arg in args[1:]:
decode(arg)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
fc523d543392b9ef7d5b8b6c8ec962b151552e42
|
tests/test_fields/test_uuid_field.py
|
tests/test_fields/test_uuid_field.py
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
def test_uuid_version_bellow_min(self):
self.assertRaises(ValidationError, UUIDField, 'version', 0)
def test_uuid_version_above_max(self):
self.assertRaises(ValidationError, UUIDField, 'version', 6)
|
Add tdd to increase coverage
|
Add tdd to increase coverage
|
Python
|
bsd-3-clause
|
carljm/django-model-utils,carljm/django-model-utils
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
Add tdd to increase coverage
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
def test_uuid_version_bellow_min(self):
self.assertRaises(ValidationError, UUIDField, 'version', 0)
def test_uuid_version_above_max(self):
self.assertRaises(ValidationError, UUIDField, 'version', 6)
|
<commit_before>from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
<commit_msg>Add tdd to increase coverage<commit_after>
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
def test_uuid_version_bellow_min(self):
self.assertRaises(ValidationError, UUIDField, 'version', 0)
def test_uuid_version_above_max(self):
self.assertRaises(ValidationError, UUIDField, 'version', 6)
|
from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
Add tdd to increase coveragefrom __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
def test_uuid_version_bellow_min(self):
self.assertRaises(ValidationError, UUIDField, 'version', 0)
def test_uuid_version_above_max(self):
self.assertRaises(ValidationError, UUIDField, 'version', 6)
|
<commit_before>from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
<commit_msg>Add tdd to increase coverage<commit_after>from __future__ import unicode_literals
import uuid
from django.core.exceptions import ValidationError
from django.test import TestCase
from model_utils.fields import UUIDField
class UUIDFieldTests(TestCase):
def test_uuid_version_default(self):
instance = UUIDField()
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_1(self):
instance = UUIDField(version=1)
self.assertEqual(instance.default, uuid.uuid1)
def test_uuid_version_2_error(self):
self.assertRaises(ValidationError, UUIDField, 'version', 2)
def test_uuid_version_3(self):
instance = UUIDField(version=3)
self.assertEqual(instance.default, uuid.uuid3)
def test_uuid_version_4(self):
instance = UUIDField(version=4)
self.assertEqual(instance.default, uuid.uuid4)
def test_uuid_version_5(self):
instance = UUIDField(version=5)
self.assertEqual(instance.default, uuid.uuid5)
def test_uuid_version_bellow_min(self):
self.assertRaises(ValidationError, UUIDField, 'version', 0)
def test_uuid_version_above_max(self):
self.assertRaises(ValidationError, UUIDField, 'version', 6)
|
61018e88f6ef7e24665fca8b336493ff254fa61b
|
examples/irrev_rxns.py
|
examples/irrev_rxns.py
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':1, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
for i in range(10):
dt = 0.001
rxnsys.step(dt)
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':0, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
#compute the concentration change with timestep
for i in range(10):
dt = 0.001
print(rxnsys.step(dt))
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
Add documentation of the new functions
|
Add documentation of the new functions
|
Python
|
mit
|
cs207-group11/cs207-FinalProject,krmotwani/cs207-FinalProject,hsim13372/cs207-FinalProject
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':1, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
for i in range(10):
dt = 0.001
rxnsys.step(dt)
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
Add documentation of the new functions
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':0, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
#compute the concentration change with timestep
for i in range(10):
dt = 0.001
print(rxnsys.step(dt))
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
<commit_before>
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':1, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
for i in range(10):
dt = 0.001
rxnsys.step(dt)
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
<commit_msg>Add documentation of the new functions<commit_after>
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':0, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
#compute the concentration change with timestep
for i in range(10):
dt = 0.001
print(rxnsys.step(dt))
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':1, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
for i in range(10):
dt = 0.001
rxnsys.step(dt)
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
Add documentation of the new functions
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':0, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
#compute the concentration change with timestep
for i in range(10):
dt = 0.001
print(rxnsys.step(dt))
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
<commit_before>
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':1, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
for i in range(10):
dt = 0.001
rxnsys.step(dt)
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
<commit_msg>Add documentation of the new functions<commit_after>
"""Example of irreversible reaction."""
import os
from chemkinlib.utils import Parser
from chemkinlib.reactions import ReactionSystems
from chemkinlib.config import DATA_DIRECTORY
import numpy
# USER INPUT: reaction (xml) file
xml_filename = os.path.join(DATA_DIRECTORY, "rxnset_long.xml")
parser = Parser.ReactionParser(xml_filename)
# USER INPUTS (concentrations and temperatures)
concentration = ({'H':1, 'H2':1, 'H2O':0, 'H2O2':1, 'HO2':1, 'O':1, "O2":1, "OH":1})
temperature = 1000
# Set up reaction system
rxnsys = ReactionSystems.ReactionSystem(parser.reaction_list,
parser.NASA_poly_coefs,
temperature,
concentration)
#compute the concentration change with timestep
for i in range(10):
dt = 0.001
print(rxnsys.step(dt))
# Compute and sort reaction rates
rxnrates_dict = rxnsys.sort_reaction_rates()
# display reaction rates by species
for k, v in rxnrates_dict.items():
print("d[{0}]/dt : \t {1:e}".format(k, v))
|
6d43879608a3f218120b88da911c8bacf8177d82
|
owebunit/tests/simple.py
|
owebunit/tests/simple.py
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
def test_multiple_sessions(self):
one = self.session()
one.get('http://127.0.0.1:8041/ok')
two = self.session()
two.get('http://127.0.0.1:8041/internal_server_error')
one.assert_code(200)
two.assert_code(500)
if __name__ == '__main__':
import unittest
unittest.main()
|
Test using multiple concurrent sessions
|
Test using multiple concurrent sessions
|
Python
|
bsd-2-clause
|
p/webracer
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
Test using multiple concurrent sessions
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
def test_multiple_sessions(self):
one = self.session()
one.get('http://127.0.0.1:8041/ok')
two = self.session()
two.get('http://127.0.0.1:8041/internal_server_error')
one.assert_code(200)
two.assert_code(500)
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before>import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Test using multiple concurrent sessions<commit_after>
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
def test_multiple_sessions(self):
one = self.session()
one.get('http://127.0.0.1:8041/ok')
two = self.session()
two.get('http://127.0.0.1:8041/internal_server_error')
one.assert_code(200)
two.assert_code(500)
if __name__ == '__main__':
import unittest
unittest.main()
|
import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
Test using multiple concurrent sessionsimport BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
def test_multiple_sessions(self):
one = self.session()
one.get('http://127.0.0.1:8041/ok')
two = self.session()
two.get('http://127.0.0.1:8041/internal_server_error')
one.assert_code(200)
two.assert_code(500)
if __name__ == '__main__':
import unittest
unittest.main()
|
<commit_before>import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
if __name__ == '__main__':
import unittest
unittest.main()
<commit_msg>Test using multiple concurrent sessions<commit_after>import BaseHTTPServer
import threading
import time
import owebunit
import bottle
app = bottle.Bottle()
@app.route('/ok')
def ok():
return 'ok'
@app.route('/internal_server_error')
def internal_error():
bottle.abort(500, 'internal server error')
def run_server():
app.run(host='localhost', port=8041)
class ServerThread(threading.Thread):
def run(self):
run_server()
def start_server():
server_thread = ServerThread()
server_thread.daemon = True
server_thread.start()
start_server()
time.sleep(0.1)
class Case(owebunit.WebTestCase):
def test_simple(self):
self.get('http://127.0.0.1:8041/ok')
self.assert_code(200)
def test_session(self):
with self.session() as s:
s.get('http://127.0.0.1:8041/ok')
s.assert_code(200)
def test_multiple_sessions(self):
one = self.session()
one.get('http://127.0.0.1:8041/ok')
two = self.session()
two.get('http://127.0.0.1:8041/internal_server_error')
one.assert_code(200)
two.assert_code(500)
if __name__ == '__main__':
import unittest
unittest.main()
|
0e8504c8ad81076f190918b17d9c46710875fe8f
|
common/fields.py
|
common/fields.py
|
from django.db.models import AutoField
class SequenceField(AutoField):
"""Overrides the parts of AutoField that force it to be a PK"""
def __init__(self, *args, **kwargs):
super(SequenceField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
"""Shut up '(fields.E100) AutoFields must set primary_key=True.'"""
errors = super(AutoField, self).check(**kwargs)
return errors
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def contribute_to_class(self, cls, name, **kwargs):
"""Stop enforcing the 'one autofield per class' validation"""
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
|
from django.db.models import Field
class SequenceField(Field):
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(SequenceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SequenceField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def to_python(self, value):
return int(value)
def db_type(self, connection):
return 'serial'
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def get_prep_value(self, value):
value = super(SequenceField, self).get_prep_value(value)
if value is None or value == '':
return None
return int(value)
def contribute_to_class(self, cls, name):
# TODO Cleaner way to call Field's version
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
|
Revert "Experiment with SequenceField that inherits from AutoField"
|
Revert "Experiment with SequenceField that inherits from AutoField"
This reverts commit 726c1d31e353e6c1a079fd06c3008c0714f95b86.
|
Python
|
mit
|
MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,urandu/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api
|
from django.db.models import AutoField
class SequenceField(AutoField):
"""Overrides the parts of AutoField that force it to be a PK"""
def __init__(self, *args, **kwargs):
super(SequenceField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
"""Shut up '(fields.E100) AutoFields must set primary_key=True.'"""
errors = super(AutoField, self).check(**kwargs)
return errors
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def contribute_to_class(self, cls, name, **kwargs):
"""Stop enforcing the 'one autofield per class' validation"""
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
Revert "Experiment with SequenceField that inherits from AutoField"
This reverts commit 726c1d31e353e6c1a079fd06c3008c0714f95b86.
|
from django.db.models import Field
class SequenceField(Field):
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(SequenceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SequenceField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def to_python(self, value):
return int(value)
def db_type(self, connection):
return 'serial'
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def get_prep_value(self, value):
value = super(SequenceField, self).get_prep_value(value)
if value is None or value == '':
return None
return int(value)
def contribute_to_class(self, cls, name):
# TODO Cleaner way to call Field's version
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
|
<commit_before>from django.db.models import AutoField
class SequenceField(AutoField):
"""Overrides the parts of AutoField that force it to be a PK"""
def __init__(self, *args, **kwargs):
super(SequenceField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
"""Shut up '(fields.E100) AutoFields must set primary_key=True.'"""
errors = super(AutoField, self).check(**kwargs)
return errors
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def contribute_to_class(self, cls, name, **kwargs):
"""Stop enforcing the 'one autofield per class' validation"""
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
<commit_msg>Revert "Experiment with SequenceField that inherits from AutoField"
This reverts commit 726c1d31e353e6c1a079fd06c3008c0714f95b86.<commit_after>
|
from django.db.models import Field
class SequenceField(Field):
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(SequenceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SequenceField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def to_python(self, value):
return int(value)
def db_type(self, connection):
return 'serial'
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def get_prep_value(self, value):
value = super(SequenceField, self).get_prep_value(value)
if value is None or value == '':
return None
return int(value)
def contribute_to_class(self, cls, name):
# TODO Cleaner way to call Field's version
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
|
from django.db.models import AutoField
class SequenceField(AutoField):
"""Overrides the parts of AutoField that force it to be a PK"""
def __init__(self, *args, **kwargs):
super(SequenceField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
"""Shut up '(fields.E100) AutoFields must set primary_key=True.'"""
errors = super(AutoField, self).check(**kwargs)
return errors
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def contribute_to_class(self, cls, name, **kwargs):
"""Stop enforcing the 'one autofield per class' validation"""
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
Revert "Experiment with SequenceField that inherits from AutoField"
This reverts commit 726c1d31e353e6c1a079fd06c3008c0714f95b86.from django.db.models import Field
class SequenceField(Field):
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(SequenceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SequenceField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def to_python(self, value):
return int(value)
def db_type(self, connection):
return 'serial'
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def get_prep_value(self, value):
value = super(SequenceField, self).get_prep_value(value)
if value is None or value == '':
return None
return int(value)
def contribute_to_class(self, cls, name):
# TODO Cleaner way to call Field's version
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
|
<commit_before>from django.db.models import AutoField
class SequenceField(AutoField):
"""Overrides the parts of AutoField that force it to be a PK"""
def __init__(self, *args, **kwargs):
super(SequenceField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
"""Shut up '(fields.E100) AutoFields must set primary_key=True.'"""
errors = super(AutoField, self).check(**kwargs)
return errors
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def contribute_to_class(self, cls, name, **kwargs):
"""Stop enforcing the 'one autofield per class' validation"""
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
<commit_msg>Revert "Experiment with SequenceField that inherits from AutoField"
This reverts commit 726c1d31e353e6c1a079fd06c3008c0714f95b86.<commit_after>from django.db.models import Field
class SequenceField(Field):
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(SequenceField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SequenceField, self).deconstruct()
# lacks 'kwargs['primary_key'] = True', unlike AutoField
return name, path, args, kwargs
def get_internal_type(self):
return "SequenceField"
def to_python(self, value):
return int(value)
def db_type(self, connection):
return 'serial'
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
# avoid the PK validation of AutoField
return value
def get_prep_value(self, value):
value = super(SequenceField, self).get_prep_value(value)
if value is None or value == '':
return None
return int(value)
def contribute_to_class(self, cls, name):
# TODO Cleaner way to call Field's version
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
|
5b8482aa7851f11df81e8a457c85b53dbcbeeddf
|
f8a_jobs/graph_sync.py
|
f8a_jobs/graph_sync.py
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
Fix code for review comments
|
Fix code for review comments
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
Fix code for review comments
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
<commit_before>"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
<commit_msg>Fix code for review comments<commit_after>
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
Fix code for review comments"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
<commit_before>"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
logger = logging.getLogger(__name__)
def _api_call(url, params={}):
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
if r is None:
logger.error("Returned response is: %s" % r)
raise Exception("Empty response found")
result = {"data": r.json()}
except Exception:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params={}):
"""Invoke Pending Graph Sync APIs for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params={}):
"""Invoke Graph Sync APIs to sync for given parameters."""
url = "%s%s" % (configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
<commit_msg>Fix code for review comments<commit_after>"""Functions to retrieve pending list and invoke Graph Sync."""
import f8a_jobs.defaults as configuration
import requests
import traceback
import logging
from urllib.parse import urljoin
logger = logging.getLogger(__name__)
def _api_call(url, params=None):
params = params or {}
try:
logger.info("API Call for url: %s, params: %s" % (url, params))
r = requests.get(url, params=params)
r.raise_for_status()
result = {"data": r.json()}
except requests.exceptions.HTTPError:
logger.error(traceback.format_exc())
result = {"error": "Failed to retrieve data from Data Model Importer backend"}
return result
def fetch_pending(params=None):
params = params or {}
"""Invoke Pending Graph Sync APIs for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/pending")
return _api_call(url, params)
def invoke_sync(params=None):
params = params or {}
"""Invoke Graph Sync APIs to sync for given parameters."""
url = urljoin(configuration.DATA_IMPORTER_ENDPOINT, "/api/v1/sync_all")
return _api_call(url, params)
|
079e7cbbd59266e1dc8b161989c90202caa4c5a8
|
flaskbb/utils/views.py
|
flaskbb/utils/views.py
|
from flask import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
from flaskbb.utils.helpers import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
Use local render_template than Flask's native
|
Use local render_template than Flask's native
TODO: Provide a renderer argument at instantation?
|
Python
|
bsd-3-clause
|
realityone/flaskbb,realityone/flaskbb,dromanow/flaskbb,dromanow/flaskbb,realityone/flaskbb,dromanow/flaskbb
|
from flask import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
Use local render_template than Flask's native
TODO: Provide a renderer argument at instantation?
|
from flaskbb.utils.helpers import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
<commit_before>from flask import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
<commit_msg>Use local render_template than Flask's native
TODO: Provide a renderer argument at instantation?<commit_after>
|
from flaskbb.utils.helpers import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
from flask import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
Use local render_template than Flask's native
TODO: Provide a renderer argument at instantation?from flaskbb.utils.helpers import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
<commit_before>from flask import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
<commit_msg>Use local render_template than Flask's native
TODO: Provide a renderer argument at instantation?<commit_after>from flaskbb.utils.helpers import render_template
from flask.views import View
class RenderableView(View):
def __init__(self, template, view):
self.template = template
self.view = view
def dispatch_request(self, *args, **kwargs):
view_model = self.view(*args, **kwargs)
return render_template(self.template, **view_model)
|
e4967c60c172ee85c6050744b487156daee13c23
|
Dice.py
|
Dice.py
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
|
Add base Die functionality(roll, hold, get)
|
Add base Die functionality(roll, hold, get)
|
Python
|
mit
|
achyutreddy24/DiceGame
|
Add base Die functionality(roll, hold, get)
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
|
<commit_before><commit_msg>Add base Die functionality(roll, hold, get)<commit_after>
|
import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
|
Add base Die functionality(roll, hold, get)import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
|
<commit_before><commit_msg>Add base Die functionality(roll, hold, get)<commit_after>import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
|
|
e0771e601c7429f7929077c5397ec9fb228fafb1
|
ide/__init__.py
|
ide/__init__.py
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': root_relative_path('node_modules', 'less', 'bin', 'lessc'),
})
assets.init_app(app)
import ide.views
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': str(root_relative_path('node_modules', 'less', 'bin', 'lessc')),
})
assets.init_app(app)
import ide.views
|
Fix crash when compiling less files.
|
Fix crash when compiling less files.
|
Python
|
apache-2.0
|
Sable/mclab-ide,Sable/mclab-ide,Sable/mclab-ide,Sable/mclab-ide,Sable/mclab-ide,Sable/mclab-ide
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': root_relative_path('node_modules', 'less', 'bin', 'lessc'),
})
assets.init_app(app)
import ide.views
Fix crash when compiling less files.
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': str(root_relative_path('node_modules', 'less', 'bin', 'lessc')),
})
assets.init_app(app)
import ide.views
|
<commit_before>from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': root_relative_path('node_modules', 'less', 'bin', 'lessc'),
})
assets.init_app(app)
import ide.views
<commit_msg>Fix crash when compiling less files.<commit_after>
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': str(root_relative_path('node_modules', 'less', 'bin', 'lessc')),
})
assets.init_app(app)
import ide.views
|
from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': root_relative_path('node_modules', 'less', 'bin', 'lessc'),
})
assets.init_app(app)
import ide.views
Fix crash when compiling less files.from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': str(root_relative_path('node_modules', 'less', 'bin', 'lessc')),
})
assets.init_app(app)
import ide.views
|
<commit_before>from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': root_relative_path('node_modules', 'less', 'bin', 'lessc'),
})
assets.init_app(app)
import ide.views
<commit_msg>Fix crash when compiling less files.<commit_after>from flask import Flask
from ide.assets import assets
from ide.util import root_relative_path
app = Flask(__name__)
app.config.from_object('ide.secret')
app.config.update({
'DEBUG': True,
'ASSETS_DEBUG': True,
'LESS_BIN': str(root_relative_path('node_modules', 'less', 'bin', 'lessc')),
})
assets.init_app(app)
import ide.views
|
3dcf879c7188f61d43d3a3b11dc74b8de431037a
|
pyethapp/tests/test_jsonrpc.py
|
pyethapp/tests/test_jsonrpc.py
|
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
c = Compilers()
assert 'solidity' in c.getCompilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
import pytest
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
if s == None:
pytest.xfail("solidity not installed, not tested")
else:
c = Compilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
Fix test to xfail when Solidity is not present
|
Fix test to xfail when Solidity is not present
|
Python
|
mit
|
ethereum/pyethapp,changwu-tw/pyethapp,gsalgado/pyethapp,RomanZacharia/pyethapp,changwu-tw/pyethapp,ethereum/pyethapp,d-das/pyethapp,vaporry/pyethapp,RomanZacharia/pyethapp,gsalgado/pyethapp
|
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
c = Compilers()
assert 'solidity' in c.getCompilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
Fix test to xfail when Solidity is not present
|
import pytest
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
if s == None:
pytest.xfail("solidity not installed, not tested")
else:
c = Compilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
<commit_before>
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
c = Compilers()
assert 'solidity' in c.getCompilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
<commit_msg>Fix test to xfail when Solidity is not present<commit_after>
|
import pytest
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
if s == None:
pytest.xfail("solidity not installed, not tested")
else:
c = Compilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
c = Compilers()
assert 'solidity' in c.getCompilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
Fix test to xfail when Solidity is not presentimport pytest
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
if s == None:
pytest.xfail("solidity not installed, not tested")
else:
c = Compilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
<commit_before>
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
c = Compilers()
assert 'solidity' in c.getCompilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
<commit_msg>Fix test to xfail when Solidity is not present<commit_after>import pytest
solidity_code = "contract test { function multiply(uint a) returns(uint d) { return a * 7; } }"
def test_compileSolidity():
from pyethapp.jsonrpc import Compilers, data_encoder
import ethereum._solidity
s = ethereum._solidity.get_solidity()
if s == None:
pytest.xfail("solidity not installed, not tested")
else:
c = Compilers()
bc = s.compile(solidity_code)
abi = s.mk_full_signature(solidity_code)
r = dict(code=data_encoder(bc),
info=dict(source=solidity_code,
language='Solidity',
languageVersion='0',
compilerVersion='0',
abiDefinition=abi,
userDoc=dict(methods=dict()),
developerDoc=dict(methods=dict()),
)
)
assert r == c.compileSolidity(solidity_code)
|
e77ed1e555f363c23734ec80a0daf6fa740b78ee
|
scripts/cache/dm/combine.py
|
scripts/cache/dm/combine.py
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys, os
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
print "No Shapefile for D %s" % (d,)
continue
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
Check for shapefile existance before attempting to open
|
Check for shapefile existance before attempting to open
|
Python
|
mit
|
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
Check for shapefile existance before attempting to open
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys, os
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
print "No Shapefile for D %s" % (d,)
continue
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
<commit_before># Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
<commit_msg>Check for shapefile existance before attempting to open<commit_after>
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys, os
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
print "No Shapefile for D %s" % (d,)
continue
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
Check for shapefile existance before attempting to open# Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys, os
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
print "No Shapefile for D %s" % (d,)
continue
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
<commit_before># Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
<commit_msg>Check for shapefile existance before attempting to open<commit_after># Combine DM layers into one shapefile!
# Daryl Herzmann 4 Nov 2005
import mapscript, dbflib, sys, os
ts = sys.argv[1]
outshp = mapscript.shapefileObj('dm_%s.shp'%ts, mapscript.MS_SHAPEFILE_POLYGON )
dbf = dbflib.create("dm_%s"%ts)
dbf.add_field("DCAT", dbflib.FTInteger, 1, 0)
counter = 0
for d in range(5):
if not os.path.isfile("Drought_Areas_US_D%s.shp" %d):
print "No Shapefile for D %s" % (d,)
continue
shp = mapscript.shapefileObj('Drought_Areas_US_D%s.shp' %d)
for i in range( shp.numshapes ):
shpObj = shp.getShape(i)
outshp.add( shpObj )
dbf.write_record(counter, [d,])
del shpObj
counter += 1
del outshp
del dbf
|
713e720bd3e4029273d72ab58aa79fbd79f0bafa
|
unit_tests/test_analyse_idynomics.py
|
unit_tests/test_analyse_idynomics.py
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_reaction_rates = ['MyGrowth-rate']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_reaction_rate_names(self):
actual_reaction_rates = self.analysis.reaction_rate_names
assert_list_equal(self.expected_reaction_rates, actual_reaction_rates)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
Add unit test for getting reaction rate names
|
Add unit test for getting reaction rate names
|
Python
|
mit
|
fophillips/pyDynoMiCS
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
Add unit test for getting reaction rate names
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_reaction_rates = ['MyGrowth-rate']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_reaction_rate_names(self):
actual_reaction_rates = self.analysis.reaction_rate_names
assert_list_equal(self.expected_reaction_rates, actual_reaction_rates)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
<commit_before>from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
<commit_msg>Add unit test for getting reaction rate names<commit_after>
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_reaction_rates = ['MyGrowth-rate']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_reaction_rate_names(self):
actual_reaction_rates = self.analysis.reaction_rate_names
assert_list_equal(self.expected_reaction_rates, actual_reaction_rates)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
Add unit test for getting reaction rate namesfrom nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_reaction_rates = ['MyGrowth-rate']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_reaction_rate_names(self):
actual_reaction_rates = self.analysis.reaction_rate_names
assert_list_equal(self.expected_reaction_rates, actual_reaction_rates)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
<commit_before>from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
<commit_msg>Add unit test for getting reaction rate names<commit_after>from nose.tools import *
from analyse_idynomics import *
from os.path import join, dirname, realpath
class TestAnalyseiDynomics:
expected_solutes = ['MyAtmos', 'pressure']
expected_species = ['MyBact']
expected_reaction_rates = ['MyGrowth-rate']
expected_timesteps = 2
expected_dimensions = (20.0, 20.0, 2.0)
def setUp(self):
self.directory = join(dirname(realpath(__file__)), 'test_data')
self.analysis = AnalyseiDynomics(self.directory)
def test_init(self):
assert_is(self.directory, self.analysis.directory)
def test_solute_names(self):
actual_solutes = self.analysis.solute_names
assert_list_equal(self.expected_solutes, actual_solutes)
def test_species_names(self):
actual_species = self.analysis.species_names
assert_list_equal(self.expected_species, actual_species)
def test_reaction_rate_names(self):
actual_reaction_rates = self.analysis.reaction_rate_names
assert_list_equal(self.expected_reaction_rates, actual_reaction_rates)
def test_total_timesteps(self):
actual_timesteps = self.analysis.total_timesteps
assert_equals(self.expected_timesteps, actual_timesteps)
def test_world_dimensions(self):
actual_dimensions = self.analysis.world_dimensions
assert_equal(self.expected_dimensions, actual_dimensions)
|
284d750d7da25b1d3db17ca4d5931e1b6d1d7319
|
tests/browser/test_editor.py
|
tests/browser/test_editor.py
|
from fancypages.test.testcases import SplinterTestCase
class TestEditingFancyPage(SplinterTestCase):
is_staff = True
is_logged_in = True
def test_moving_a_block(self):
pass
|
from django.core.urlresolvers import reverse
from fancypages.test.testcases import SplinterTestCase
class TestTheEditorPanel(SplinterTestCase):
is_staff = True
is_logged_in = True
def _get_cookie_names(self):
return [c.get('name') for c in self.browser.cookies.all()]
def test_can_be_opened_by_clicking_the_handle(self):
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-handle').click()
self.assertFalse(body_tag.has_class('editor-hidden'))
self.assertIn('fpEditorOpened', self._get_cookie_names())
def test_can_be_closed_by_clicking_the_x(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-close').click()
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.assertNotIn('fpEditorOpened', self._get_cookie_names())
def test_remains_opened_when_reloading_the_page(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
|
Add tests for editor panel JS
|
Add tests for editor panel JS
|
Python
|
bsd-3-clause
|
tangentlabs/django-fancypages,socradev/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages
|
from fancypages.test.testcases import SplinterTestCase
class TestEditingFancyPage(SplinterTestCase):
is_staff = True
is_logged_in = True
def test_moving_a_block(self):
pass
Add tests for editor panel JS
|
from django.core.urlresolvers import reverse
from fancypages.test.testcases import SplinterTestCase
class TestTheEditorPanel(SplinterTestCase):
is_staff = True
is_logged_in = True
def _get_cookie_names(self):
return [c.get('name') for c in self.browser.cookies.all()]
def test_can_be_opened_by_clicking_the_handle(self):
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-handle').click()
self.assertFalse(body_tag.has_class('editor-hidden'))
self.assertIn('fpEditorOpened', self._get_cookie_names())
def test_can_be_closed_by_clicking_the_x(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-close').click()
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.assertNotIn('fpEditorOpened', self._get_cookie_names())
def test_remains_opened_when_reloading_the_page(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
|
<commit_before>from fancypages.test.testcases import SplinterTestCase
class TestEditingFancyPage(SplinterTestCase):
is_staff = True
is_logged_in = True
def test_moving_a_block(self):
pass
<commit_msg>Add tests for editor panel JS<commit_after>
|
from django.core.urlresolvers import reverse
from fancypages.test.testcases import SplinterTestCase
class TestTheEditorPanel(SplinterTestCase):
is_staff = True
is_logged_in = True
def _get_cookie_names(self):
return [c.get('name') for c in self.browser.cookies.all()]
def test_can_be_opened_by_clicking_the_handle(self):
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-handle').click()
self.assertFalse(body_tag.has_class('editor-hidden'))
self.assertIn('fpEditorOpened', self._get_cookie_names())
def test_can_be_closed_by_clicking_the_x(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-close').click()
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.assertNotIn('fpEditorOpened', self._get_cookie_names())
def test_remains_opened_when_reloading_the_page(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
|
from fancypages.test.testcases import SplinterTestCase
class TestEditingFancyPage(SplinterTestCase):
is_staff = True
is_logged_in = True
def test_moving_a_block(self):
pass
Add tests for editor panel JSfrom django.core.urlresolvers import reverse
from fancypages.test.testcases import SplinterTestCase
class TestTheEditorPanel(SplinterTestCase):
is_staff = True
is_logged_in = True
def _get_cookie_names(self):
return [c.get('name') for c in self.browser.cookies.all()]
def test_can_be_opened_by_clicking_the_handle(self):
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-handle').click()
self.assertFalse(body_tag.has_class('editor-hidden'))
self.assertIn('fpEditorOpened', self._get_cookie_names())
def test_can_be_closed_by_clicking_the_x(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-close').click()
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.assertNotIn('fpEditorOpened', self._get_cookie_names())
def test_remains_opened_when_reloading_the_page(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
|
<commit_before>from fancypages.test.testcases import SplinterTestCase
class TestEditingFancyPage(SplinterTestCase):
is_staff = True
is_logged_in = True
def test_moving_a_block(self):
pass
<commit_msg>Add tests for editor panel JS<commit_after>from django.core.urlresolvers import reverse
from fancypages.test.testcases import SplinterTestCase
class TestTheEditorPanel(SplinterTestCase):
is_staff = True
is_logged_in = True
def _get_cookie_names(self):
return [c.get('name') for c in self.browser.cookies.all()]
def test_can_be_opened_by_clicking_the_handle(self):
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-handle').click()
self.assertFalse(body_tag.has_class('editor-hidden'))
self.assertIn('fpEditorOpened', self._get_cookie_names())
def test_can_be_closed_by_clicking_the_x(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.browser.find_by_css('#editor-close').click()
body_tag = self.browser.find_by_css('body').first
self.assertTrue(body_tag.has_class('editor-hidden'))
self.assertNotIn('fpEditorOpened', self._get_cookie_names())
def test_remains_opened_when_reloading_the_page(self):
self.goto(reverse('home'))
self.browser.find_by_css('#editor-handle').click()
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
self.goto(reverse('home'))
body_tag = self.browser.find_by_css('body').first
self.assertFalse(body_tag.has_class('editor-hidden'))
|
53cf4a078a072c9510e389295c19a0391b1eeef8
|
grab/tools/encoding.py
|
grab/tools/encoding.py
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
return '&#%d;' % ord(chr(num).decode('cp1252'))
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
try:
return '&#%d;' % ord(chr(num).decode('cp1252'))
except UnicodeDecodeError:
return match.group(0)
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
Fix issue in special_entity_handler function
|
Fix issue in special_entity_handler function
|
Python
|
mit
|
alihalabyah/grab,pombredanne/grab-1,SpaceAppsXploration/grab,alihalabyah/grab,codevlabs/grab,DDShadoww/grab,lorien/grab,kevinlondon/grab,raybuhr/grab,maurobaraldi/grab,huiyi1990/grab,istinspring/grab,giserh/grab,DDShadoww/grab,maurobaraldi/grab,pombredanne/grab-1,shaunstanislaus/grab,subeax/grab,huiyi1990/grab,liorvh/grab,raybuhr/grab,liorvh/grab,codevlabs/grab,subeax/grab,shaunstanislaus/grab,subeax/grab,giserh/grab,lorien/grab,SpaceAppsXploration/grab,istinspring/grab,kevinlondon/grab
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
return '&#%d;' % ord(chr(num).decode('cp1252'))
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
Fix issue in special_entity_handler function
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
try:
return '&#%d;' % ord(chr(num).decode('cp1252'))
except UnicodeDecodeError:
return match.group(0)
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
<commit_before>import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
return '&#%d;' % ord(chr(num).decode('cp1252'))
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
<commit_msg>Fix issue in special_entity_handler function<commit_after>
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
try:
return '&#%d;' % ord(chr(num).decode('cp1252'))
except UnicodeDecodeError:
return match.group(0)
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
return '&#%d;' % ord(chr(num).decode('cp1252'))
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
Fix issue in special_entity_handler functionimport re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
try:
return '&#%d;' % ord(chr(num).decode('cp1252'))
except UnicodeDecodeError:
return match.group(0)
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
<commit_before>import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
return '&#%d;' % ord(chr(num).decode('cp1252'))
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
<commit_msg>Fix issue in special_entity_handler function<commit_after>import re
RE_SPECIAL_ENTITY = re.compile('&#(1[2-6][0-9]);')
def smart_str(value, encoding='utf-8'):
"""
Normalize unicode/byte string to byte string.
"""
if isinstance(value, unicode):
value = value.encode(encoding)
return value
def smart_unicode(value, encoding='utf-8'):
"""
Normalize unicode/btye string to unicode string.
"""
if not isinstance(value, unicode):
value = value.decode(encoding)
return value
def special_entity_handler(match):
num = int(match.group(1))
if 128 <= num <= 160:
try:
return '&#%d;' % ord(chr(num).decode('cp1252'))
except UnicodeDecodeError:
return match.group(0)
else:
return match.group(0)
def fix_special_entities(body):
return RE_SPECIAL_ENTITY.sub(special_entity_handler, body)
|
9f4ffd065dc13ba16d8d9839f39f901c0c111155
|
backdrop/core/log_handler.py
|
backdrop/core/log_handler.py
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info("response: %s" % response.status)
return response
return log_response
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info(
"response: %s - %s - %s" % (
request.method, request.url, response.status
)
)
return response
return log_response
|
Add request method and url to response log
|
Add request method and url to response log
At the moment we can't relate the response to a request.
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info("response: %s" % response.status)
return response
return log_response
Add request method and url to response log
At the moment we can't relate the response to a request.
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info(
"response: %s - %s - %s" % (
request.method, request.url, response.status
)
)
return response
return log_response
|
<commit_before>from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info("response: %s" % response.status)
return response
return log_response
<commit_msg>Add request method and url to response log
At the moment we can't relate the response to a request.<commit_after>
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info(
"response: %s - %s - %s" % (
request.method, request.url, response.status
)
)
return response
return log_response
|
from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info("response: %s" % response.status)
return response
return log_response
Add request method and url to response log
At the moment we can't relate the response to a request.from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info(
"response: %s - %s - %s" % (
request.method, request.url, response.status
)
)
return response
return log_response
|
<commit_before>from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info("response: %s" % response.status)
return response
return log_response
<commit_msg>Add request method and url to response log
At the moment we can't relate the response to a request.<commit_after>from logging import FileHandler
from logstash_formatter import LogstashFormatter
import logging
from flask import request
def get_log_file_handler(path, log_level=logging.DEBUG):
handler = FileHandler(path)
handler.setFormatter(logging.Formatter(
"%(asctime)s [%(levelname)s] -> %(message)s"))
handler.setLevel(log_level)
return handler
def get_json_log_handler(path):
handler = FileHandler(path)
formatter = LogstashFormatter()
formatter.defaults['@tags'] = ['application']
handler.setFormatter(formatter)
return handler
def set_up_logging(app, name, env):
log_level = logging._levelNames[app.config['LOG_LEVEL']]
app.logger.addHandler(
get_log_file_handler("log/%s.log" % env, log_level)
)
app.logger.addHandler(
get_json_log_handler("log/%s.log.json" % env)
)
app.logger.setLevel(log_level)
app.logger.info("Backdrop %s API logging started" % name)
def create_request_logger(app):
def log_request():
app.logger.info("request: %s - %s" % (request.method, request.url))
return log_request
def create_response_logger(app):
def log_response(response):
app.logger.info(
"response: %s - %s - %s" % (
request.method, request.url, response.status
)
)
return response
return log_response
|
a0a90c7a4be2b419af0d745753b83f11f63916d2
|
registration/__init__.py
|
registration/__init__.py
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
Fix version number reporting so we can be installed before Django.
|
Fix version number reporting so we can be installed before Django.
|
Python
|
bsd-3-clause
|
lubosz/django-registration,lubosz/django-registration
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django.
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
<commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after>
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django.VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
<commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after>VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
66dc90fd88f3d70b166c5bb69a4b4e2bed743848
|
synapse/tests/test_config.py
|
synapse/tests/test_config.py
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
self.eq(conf.getConfOpt('enabled'), 0)
self.eq(conf.getConfOpt('fooval'), 99)
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
Update test to ensure that default configuration values are available via getConfOpt
|
Update test to ensure that default configuration values are available via getConfOpt
|
Python
|
apache-2.0
|
vivisect/synapse,vertexproject/synapse,vertexproject/synapse,vertexproject/synapse
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
Update test to ensure that default configuration values are available via getConfOpt
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
self.eq(conf.getConfOpt('enabled'), 0)
self.eq(conf.getConfOpt('fooval'), 99)
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
<commit_before>from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
<commit_msg>Update test to ensure that default configuration values are available via getConfOpt<commit_after>
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
self.eq(conf.getConfOpt('enabled'), 0)
self.eq(conf.getConfOpt('fooval'), 99)
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
Update test to ensure that default configuration values are available via getConfOptfrom synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
self.eq(conf.getConfOpt('enabled'), 0)
self.eq(conf.getConfOpt('fooval'), 99)
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
<commit_before>from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
<commit_msg>Update test to ensure that default configuration values are available via getConfOpt<commit_after>from synapse.tests.common import *
import synapse.lib.config as s_config
class ConfTest(SynTest):
def test_conf_base(self):
defs = (
('fooval',{'type':'int','doc':'what is foo val?','defval':99}),
('enabled',{'type':'bool','doc':'is thing enabled?','defval':0}),
)
data = {}
def callback(v):
data['woot'] = v
with s_config.Config(defs=defs) as conf:
self.eq(conf.getConfOpt('enabled'), 0)
self.eq(conf.getConfOpt('fooval'), 99)
conf.onConfOptSet('enabled',callback)
conf.setConfOpt('enabled','true')
self.eq(data.get('woot'), 1)
conf.setConfOpts({'fooval':'0x20'})
self.eq(conf.getConfOpt('fooval'), 0x20)
conf.setConfOpts({'fooval':0x30})
self.eq(conf.getConfOpt('fooval'), 0x30)
self.assertRaises( NoSuchOpt, conf.setConfOpts, {'newp':'hehe'} )
def test_conf_asloc(self):
with s_config.Config() as conf:
conf.addConfDef('foo',type='int',defval=0,asloc='_foo_valu')
self.eq( conf._foo_valu, 0 )
conf.setConfOpt('foo','0x20')
self.eq( conf._foo_valu, 0x20)
|
e77acf1c6144619ed12faa9ab9feb02e27c2f3fe
|
test/functionalities/backticks/TestBackticksWithoutATarget.py
|
test/functionalities/backticks/TestBackticksWithoutATarget.py
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.expectedFailure # llvm.org/pr19241 IRInterpreter does not handle metadata
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
Add expected failure annotation for llvm.org/pr19241
|
Add expected failure annotation for llvm.org/pr19241
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@204718 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Add expected failure annotation for llvm.org/pr19241
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@204718 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.expectedFailure # llvm.org/pr19241 IRInterpreter does not handle metadata
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Add expected failure annotation for llvm.org/pr19241
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@204718 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.expectedFailure # llvm.org/pr19241 IRInterpreter does not handle metadata
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Add expected failure annotation for llvm.org/pr19241
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@204718 91177308-0d34-0410-b5e6-96231b3b80d8"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.expectedFailure # llvm.org/pr19241 IRInterpreter does not handle metadata
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Add expected failure annotation for llvm.org/pr19241
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@204718 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
Test that backticks without a target should work (not infinite looping).
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BackticksWithNoTargetTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@unittest2.expectedFailure # llvm.org/pr19241 IRInterpreter does not handle metadata
def test_backticks_no_target(self):
"""A simple test of backticks without a target."""
self.expect("print `1+2-3`",
substrs = [' = 0'])
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
658e0a3dbd260f2d27756ed5605794b2320ba728
|
backend/src/pox/ext/gini/samples/packet_loss.py
|
backend/src/pox/ext/gini/samples/packet_loss.py
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):.
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
conn.send(event.connection)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
Fix bug in packet loss
|
Fix bug in packet loss
|
Python
|
mit
|
anrl/gini3,michaelkourlas/gini,michaelkourlas/gini,anrl/gini3,michaelkourlas/gini,anrl/gini3,michaelkourlas/gini,anrl/gini3,anrl/gini3,michaelkourlas/gini
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):.
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
conn.send(event.connection)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
Fix bug in packet loss
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
<commit_before>#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):.
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
conn.send(event.connection)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
<commit_msg>Fix bug in packet loss<commit_after>
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):.
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
conn.send(event.connection)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
Fix bug in packet loss#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
<commit_before>#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):.
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
conn.send(event.connection)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
<commit_msg>Fix bug in packet loss<commit_after>#!/usr/bin/python2
"""
packet_loss.py: Simulates packet loss by dropping all packets with a
probability of 25%.
"""
import random
from pox.core import core
import pox.openflow.libopenflow_01 as of
def packet_in(event):
if random.random() >= 0.25:
msg = of.ofp_packet_out(data = event.ofp)
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
event.connection.send(msg)
def launch():
core.openflow.addListenerByName("PacketIn", packet_in)
|
9b859b4dfc0f215b61e05662f0d0af435227e932
|
src/adhocracy_sample/adhocracy_sample/__init__.py
|
src/adhocracy_sample/adhocracy_sample/__init__.py
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include additional default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
config.include('adhocracy.resources.principal')
# include additional default sheet types
config.include('adhocracy.sheets.user')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
Include additional resources + sheets for login test.
|
Include additional resources + sheets for login test.
|
Python
|
agpl-3.0
|
fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,xs2maverick/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,xs2maverick/adhocracy3.mercator
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
Include additional resources + sheets for login test.
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include additional default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
config.include('adhocracy.resources.principal')
# include additional default sheet types
config.include('adhocracy.sheets.user')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
<commit_before>"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
<commit_msg>Include additional resources + sheets for login test.<commit_after>
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include additional default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
config.include('adhocracy.resources.principal')
# include additional default sheet types
config.include('adhocracy.sheets.user')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
Include additional resources + sheets for login test."""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include additional default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
config.include('adhocracy.resources.principal')
# include additional default sheet types
config.include('adhocracy.sheets.user')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
<commit_before>"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
<commit_msg>Include additional resources + sheets for login test.<commit_after>"""Simple sample app using the Adhocracy core."""
from adhocracy import root_factory
from pyramid.config import Configurator
def includeme(config):
"""Setup sample app."""
config.include('adhocracy')
# include additional default resource types
config.include('adhocracy.resources.tag')
config.include('adhocracy.resources.pool')
config.include('adhocracy.resources.principal')
# include additional default sheet types
config.include('adhocracy.sheets.user')
# include custom resource types
config.include('adhocracy_sample.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
|
b7eff5f52801fb066b975ce1726a76bcfa64987a
|
injectors/tty.py
|
injectors/tty.py
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, None, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, HicaValueType.NONE, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
Use the proper none value
|
Use the proper none value
|
Python
|
mit
|
shaded-enmity/docker-hica
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, None, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
Use the proper none value
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, HicaValueType.NONE, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
<commit_before># vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, None, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
<commit_msg>Use the proper none value<commit_after>
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, HicaValueType.NONE, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, None, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
Use the proper none value# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, HicaValueType.NONE, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
<commit_before># vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, None, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
<commit_msg>Use the proper none value<commit_after># vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
from base.hica_base import *
class TtyInjector(HicaInjector):
def get_description(self):
return 'Allocates a TTY for the process'
def get_config_key(self):
return 'io.hica.tty'
def get_injected_args(self):
return ((None, HicaValueType.NONE, None),)
def inject_config(self, config, args):
super(TtyInjector, self).inject_config(config, args)
config.append('-t')
|
c30d9685239607883aeaee73618651f694f7d1b2
|
server/lib/slack/add_player.py
|
server/lib/slack/add_player.py
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for you already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success('Alright, <@' + new_player_id + '>, you\'re ready to play!')
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
import slack.util as slackutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for <@' + new_player_id + '> already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success(slackutil.in_channel_response('Alright, <@' + new_player_id + '> is ready to play!'))
|
Update response for adding a player
|
Update response for adding a player
|
Python
|
mit
|
groppe/mario
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for you already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success('Alright, <@' + new_player_id + '>, you\'re ready to play!')
Update response for adding a player
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
import slack.util as slackutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for <@' + new_player_id + '> already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success(slackutil.in_channel_response('Alright, <@' + new_player_id + '> is ready to play!'))
|
<commit_before>#!/usr/bin/python2.7
import re
import lib.webutil as webutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for you already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success('Alright, <@' + new_player_id + '>, you\'re ready to play!')
<commit_msg>Update response for adding a player<commit_after>
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
import slack.util as slackutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for <@' + new_player_id + '> already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success(slackutil.in_channel_response('Alright, <@' + new_player_id + '> is ready to play!'))
|
#!/usr/bin/python2.7
import re
import lib.webutil as webutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for you already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success('Alright, <@' + new_player_id + '>, you\'re ready to play!')
Update response for adding a player#!/usr/bin/python2.7
import re
import lib.webutil as webutil
import slack.util as slackutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for <@' + new_player_id + '> already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success(slackutil.in_channel_response('Alright, <@' + new_player_id + '> is ready to play!'))
|
<commit_before>#!/usr/bin/python2.7
import re
import lib.webutil as webutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for you already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success('Alright, <@' + new_player_id + '>, you\'re ready to play!')
<commit_msg>Update response for adding a player<commit_after>#!/usr/bin/python2.7
import re
import lib.webutil as webutil
import slack.util as slackutil
from lib.data import players as player_data
def handle(command_text):
player_components = command_text.split(' ')
new_player_id = re.sub('[<@]', '', player_components[2].split('|')[0])
player = player_data.get_player(new_player_id)
if player is not None:
return webutil.respond_success('A player record for <@' + new_player_id + '> already exists.')
player_data.add_player(new_player_id)
return webutil.respond_success(slackutil.in_channel_response('Alright, <@' + new_player_id + '> is ready to play!'))
|
d34fb5a386eae07f57c78125d13664aa7965c487
|
demo/__init__.py
|
demo/__init__.py
|
#!/usr/bin/env python
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
Deploy Travis CI build 352 to GitHub
|
Deploy Travis CI build 352 to GitHub
|
Python
|
mit
|
jacebrowning/template-python-demo
|
#!/usr/bin/env python
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
Deploy Travis CI build 352 to GitHub
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
<commit_before>#!/usr/bin/env python
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
<commit_msg>Deploy Travis CI build 352 to GitHub<commit_after>
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
#!/usr/bin/env python
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
Deploy Travis CI build 352 to GitHub"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
<commit_before>#!/usr/bin/env python
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
<commit_msg>Deploy Travis CI build 352 to GitHub<commit_after>"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
3643edcd02d0b16c53fe07f9de0be1d13564237f
|
opps/db/conf.py
|
opps/db/conf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', None)
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', 'opps_db')
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
Add default database name, used in opps db
|
Add default database name, used in opps db
|
Python
|
mit
|
opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', None)
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
Add default database name, used in opps db
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', 'opps_db')
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', None)
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
<commit_msg>Add default database name, used in opps db<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', 'opps_db')
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', None)
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
Add default database name, used in opps db#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', 'opps_db')
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', None)
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
<commit_msg>Add default database name, used in opps db<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from appconf import AppConf
class OppsDataBaseConf(AppConf):
HOST = getattr(settings, 'OPPS_DB_HOSR', None)
USER = getattr(settings, 'OPPS_DB_USER', None)
PASSWORD = getattr(settings, 'OPPS_DB_PASSWORD', None)
PORT = getattr(settings, 'OPPS_DB_PORT', None)
NAME = getattr(settings, 'OPPS_DB_NAME', 'opps_db')
TYPE = getattr(settings, 'OPPS_DB_TYPE', None)
OPTION = getattr(settings, 'OPPS_BD_OPTION', None)
class Meta:
prefix = 'opps_db'
|
a5b57601da6e9b85eca18d61e3784addd1863fa4
|
i3pystatus/__init__.py
|
i3pystatus/__init__.py
|
#!/usr/bin/env python
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
|
from pkgutil import extend_path
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
__path__ = extend_path(__path__, __name__)
|
Make i3pystatus a namespace package
|
Make i3pystatus a namespace package
|
Python
|
mit
|
Arvedui/i3pystatus,schroeji/i3pystatus,opatut/i3pystatus,MaicoTimmerman/i3pystatus,eBrnd/i3pystatus,m45t3r/i3pystatus,teto/i3pystatus,drwahl/i3pystatus,Arvedui/i3pystatus,teto/i3pystatus,fmarchenko/i3pystatus,plumps/i3pystatus,paulollivier/i3pystatus,juliushaertl/i3pystatus,ismaelpuerto/i3pystatus,schroeji/i3pystatus,ncoop/i3pystatus,asmikhailov/i3pystatus,enkore/i3pystatus,drwahl/i3pystatus,claria/i3pystatus,ismaelpuerto/i3pystatus,richese/i3pystatus,onkelpit/i3pystatus,plumps/i3pystatus,claria/i3pystatus,yang-ling/i3pystatus,opatut/i3pystatus,MaicoTimmerman/i3pystatus,facetoe/i3pystatus,onkelpit/i3pystatus,eBrnd/i3pystatus,facetoe/i3pystatus,asmikhailov/i3pystatus,richese/i3pystatus,juliushaertl/i3pystatus,yang-ling/i3pystatus,fmarchenko/i3pystatus,m45t3r/i3pystatus,paulollivier/i3pystatus,enkore/i3pystatus,Elder-of-Ozone/i3pystatus,Elder-of-Ozone/i3pystatus,ncoop/i3pystatus
|
#!/usr/bin/env python
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
Make i3pystatus a namespace package
|
from pkgutil import extend_path
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
__path__ = extend_path(__path__, __name__)
|
<commit_before>#!/usr/bin/env python
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
<commit_msg>Make i3pystatus a namespace package<commit_after>
|
from pkgutil import extend_path
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
__path__ = extend_path(__path__, __name__)
|
#!/usr/bin/env python
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
Make i3pystatus a namespace package
from pkgutil import extend_path
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
__path__ = extend_path(__path__, __name__)
|
<commit_before>#!/usr/bin/env python
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
<commit_msg>Make i3pystatus a namespace package<commit_after>
from pkgutil import extend_path
from i3pystatus.core import Status
from i3pystatus.core.modules import Module, IntervalModule
from i3pystatus.core.settings import SettingsBase
from i3pystatus.core.util import formatp
__path__ = extend_path(__path__, __name__)
|
79d2a4824607048a79396731072d18636c7e69f3
|
jqsh/__main__.py
|
jqsh/__main__.py
|
#!/usr/bin/env python3
import sys
import jqsh.parser
import json
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
#!/usr/bin/env python3
"""A shell based on jq.
Usage:
jqsh
jqsh -c <filter> | --filter=<filter>
jqsh -h | --help
Options:
-c, --filter=<filter> Apply this filter to the standard input instead of starting interactive mode.
-h, --help Print this message and exit.
"""
import sys
import jqsh.parser
import json
arguments = sys.argv[1:]
filter_argument = None
while len(arguments):
if arguments[0].startswith('-c') or arguments[0].startswith('--filter=') or arguments[0] == '--filter':
if arguments[0] == '-c' and len(arguments) > 1:
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0].startswith('-c'):
filter_argument = arguments[0][len('-c'):]
arguments = arguments[1:]
elif arguments[0].startswith('--filter='):
filter_argument = arguments[0][len('--filter='):]
arguments = arguments[1:]
elif arguments[0] == '--filter':
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0] == '--help' or arguments[0].startswith('-h'):
print('jqsh:', __doc__)
sys.exit()
else:
sys.exit('[!!!!] invalid argument: ' + arguments[0])
if filter_argument is not None:
#TODO parse stdin
for value in jqsh.parser.parse(filter_argument).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
sys.exit()
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
Add argument parsing and --filter option
|
Add argument parsing and --filter option
|
Python
|
mit
|
jq-shell/python-jqsh
|
#!/usr/bin/env python3
import sys
import jqsh.parser
import json
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
Add argument parsing and --filter option
|
#!/usr/bin/env python3
"""A shell based on jq.
Usage:
jqsh
jqsh -c <filter> | --filter=<filter>
jqsh -h | --help
Options:
-c, --filter=<filter> Apply this filter to the standard input instead of starting interactive mode.
-h, --help Print this message and exit.
"""
import sys
import jqsh.parser
import json
arguments = sys.argv[1:]
filter_argument = None
while len(arguments):
if arguments[0].startswith('-c') or arguments[0].startswith('--filter=') or arguments[0] == '--filter':
if arguments[0] == '-c' and len(arguments) > 1:
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0].startswith('-c'):
filter_argument = arguments[0][len('-c'):]
arguments = arguments[1:]
elif arguments[0].startswith('--filter='):
filter_argument = arguments[0][len('--filter='):]
arguments = arguments[1:]
elif arguments[0] == '--filter':
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0] == '--help' or arguments[0].startswith('-h'):
print('jqsh:', __doc__)
sys.exit()
else:
sys.exit('[!!!!] invalid argument: ' + arguments[0])
if filter_argument is not None:
#TODO parse stdin
for value in jqsh.parser.parse(filter_argument).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
sys.exit()
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
<commit_before>#!/usr/bin/env python3
import sys
import jqsh.parser
import json
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
<commit_msg>Add argument parsing and --filter option<commit_after>
|
#!/usr/bin/env python3
"""A shell based on jq.
Usage:
jqsh
jqsh -c <filter> | --filter=<filter>
jqsh -h | --help
Options:
-c, --filter=<filter> Apply this filter to the standard input instead of starting interactive mode.
-h, --help Print this message and exit.
"""
import sys
import jqsh.parser
import json
arguments = sys.argv[1:]
filter_argument = None
while len(arguments):
if arguments[0].startswith('-c') or arguments[0].startswith('--filter=') or arguments[0] == '--filter':
if arguments[0] == '-c' and len(arguments) > 1:
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0].startswith('-c'):
filter_argument = arguments[0][len('-c'):]
arguments = arguments[1:]
elif arguments[0].startswith('--filter='):
filter_argument = arguments[0][len('--filter='):]
arguments = arguments[1:]
elif arguments[0] == '--filter':
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0] == '--help' or arguments[0].startswith('-h'):
print('jqsh:', __doc__)
sys.exit()
else:
sys.exit('[!!!!] invalid argument: ' + arguments[0])
if filter_argument is not None:
#TODO parse stdin
for value in jqsh.parser.parse(filter_argument).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
sys.exit()
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
#!/usr/bin/env python3
import sys
import jqsh.parser
import json
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
Add argument parsing and --filter option#!/usr/bin/env python3
"""A shell based on jq.
Usage:
jqsh
jqsh -c <filter> | --filter=<filter>
jqsh -h | --help
Options:
-c, --filter=<filter> Apply this filter to the standard input instead of starting interactive mode.
-h, --help Print this message and exit.
"""
import sys
import jqsh.parser
import json
arguments = sys.argv[1:]
filter_argument = None
while len(arguments):
if arguments[0].startswith('-c') or arguments[0].startswith('--filter=') or arguments[0] == '--filter':
if arguments[0] == '-c' and len(arguments) > 1:
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0].startswith('-c'):
filter_argument = arguments[0][len('-c'):]
arguments = arguments[1:]
elif arguments[0].startswith('--filter='):
filter_argument = arguments[0][len('--filter='):]
arguments = arguments[1:]
elif arguments[0] == '--filter':
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0] == '--help' or arguments[0].startswith('-h'):
print('jqsh:', __doc__)
sys.exit()
else:
sys.exit('[!!!!] invalid argument: ' + arguments[0])
if filter_argument is not None:
#TODO parse stdin
for value in jqsh.parser.parse(filter_argument).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
sys.exit()
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
<commit_before>#!/usr/bin/env python3
import sys
import jqsh.parser
import json
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
<commit_msg>Add argument parsing and --filter option<commit_after>#!/usr/bin/env python3
"""A shell based on jq.
Usage:
jqsh
jqsh -c <filter> | --filter=<filter>
jqsh -h | --help
Options:
-c, --filter=<filter> Apply this filter to the standard input instead of starting interactive mode.
-h, --help Print this message and exit.
"""
import sys
import jqsh.parser
import json
arguments = sys.argv[1:]
filter_argument = None
while len(arguments):
if arguments[0].startswith('-c') or arguments[0].startswith('--filter=') or arguments[0] == '--filter':
if arguments[0] == '-c' and len(arguments) > 1:
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0].startswith('-c'):
filter_argument = arguments[0][len('-c'):]
arguments = arguments[1:]
elif arguments[0].startswith('--filter='):
filter_argument = arguments[0][len('--filter='):]
arguments = arguments[1:]
elif arguments[0] == '--filter':
filter_argument = arguments[1]
arguments = arguments[2:]
elif arguments[0] == '--help' or arguments[0].startswith('-h'):
print('jqsh:', __doc__)
sys.exit()
else:
sys.exit('[!!!!] invalid argument: ' + arguments[0])
if filter_argument is not None:
#TODO parse stdin
for value in jqsh.parser.parse(filter_argument).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
sys.exit()
while True: # a simple repl
try:
for value in jqsh.parser.parse(input('jqsh> ')).start():
json.dump(value, sys.stdout, sort_keys=True, indent=2, separators=(',', ': '))
print() # add a newline because json.dump doesn't end its values with newlines
except EOFError:
print('^D')
break
except KeyboardInterrupt:
print() # add a newline after the Python-provided '^C'
continue
except SyntaxError as e:
print('jqsh: syntax error: ' + str(e))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.